You can subscribe to this list here.
2007 |
Jan
|
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
|
Aug
(120) |
Sep
(36) |
Oct
(116) |
Nov
(17) |
Dec
(44) |
---|---|---|---|---|---|---|---|---|---|---|---|---|
2008 |
Jan
(143) |
Feb
(192) |
Mar
(74) |
Apr
(84) |
May
(105) |
Jun
(64) |
Jul
(49) |
Aug
(120) |
Sep
(159) |
Oct
(156) |
Nov
(51) |
Dec
(28) |
2009 |
Jan
(17) |
Feb
(55) |
Mar
(33) |
Apr
(57) |
May
(54) |
Jun
(28) |
Jul
(6) |
Aug
(16) |
Sep
(38) |
Oct
(30) |
Nov
(26) |
Dec
(52) |
2010 |
Jan
(7) |
Feb
(91) |
Mar
(65) |
Apr
(2) |
May
(14) |
Jun
(25) |
Jul
(38) |
Aug
(48) |
Sep
(80) |
Oct
(70) |
Nov
(75) |
Dec
(77) |
2011 |
Jan
(68) |
Feb
(53) |
Mar
(51) |
Apr
(35) |
May
(65) |
Jun
(101) |
Jul
(29) |
Aug
(230) |
Sep
(95) |
Oct
(49) |
Nov
(110) |
Dec
(63) |
2012 |
Jan
(41) |
Feb
(42) |
Mar
(25) |
Apr
(46) |
May
(51) |
Jun
(44) |
Jul
(45) |
Aug
(29) |
Sep
(12) |
Oct
(9) |
Nov
(17) |
Dec
(2) |
2013 |
Jan
(12) |
Feb
(14) |
Mar
(7) |
Apr
(16) |
May
(54) |
Jun
(27) |
Jul
(11) |
Aug
(5) |
Sep
(85) |
Oct
(27) |
Nov
(37) |
Dec
(32) |
2014 |
Jan
(8) |
Feb
(29) |
Mar
(5) |
Apr
(3) |
May
(22) |
Jun
(3) |
Jul
(4) |
Aug
(3) |
Sep
|
Oct
|
Nov
|
Dec
|
From: <sha...@us...> - 2012-03-12 02:14:15
|
Revision: 3607 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3607&view=rev Author: shadowtm Date: 2012-03-12 02:14:08 +0000 (Mon, 12 Mar 2012) Log Message: ----------- Added OWLOntologyKnowledgeSource to support abstraction of the ontology creation so that we can introduce different implementations more easily. In particular, this will come in handy for the REST service interface. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/KBFile.java trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/kb/OWLOntologyKnowledgeSource.java Modified: trunk/components-core/src/main/java/org/dllearner/kb/KBFile.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/KBFile.java 2012-03-09 09:19:31 UTC (rev 3606) +++ trunk/components-core/src/main/java/org/dllearner/kb/KBFile.java 2012-03-12 02:14:08 UTC (rev 3607) @@ -36,61 +36,67 @@ import org.dllearner.parser.KBParser; import org.dllearner.parser.ParseException; import org.dllearner.reasoning.DIGConverter; -import org.springframework.beans.propertyeditors.StringTrimmerEditor; +import org.dllearner.utilities.owl.OWLAPIAxiomConvertVisitor; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyManager; /** * KB files are an internal convenience format used in DL-Learner. Their * syntax is close to Description Logics and easy to use. KB files can be * exported to OWL for usage outside of DL-Learner. - * + * * @author Jens Lehmann - * */ @ComponentAnn(name = "KB File", shortName = "kbfile", version = 0.8) -public class KBFile extends AbstractKnowledgeSource { +public class KBFile extends AbstractKnowledgeSource implements OWLOntologyKnowledgeSource { - private static Logger logger = Logger.getLogger(KBFile.class); - - private KB kb; + private static Logger logger = Logger.getLogger(KBFile.class); - @ConfigOption(name = "url", description = "URL pointer to the KB file") + private KB kb; + + @ConfigOption(name = "url", description = "URL pointer to the KB file") private String url; private String baseDir; @ConfigOption(name = "fileName", description = "relative or absolute path to KB file") private String fileName; - /** - * Default constructor (needed for reflection in ComponentManager). - */ - public KBFile() { - } - - /** - * Constructor allowing you to treat an already existing KB object - * as a KBFile knowledge source. Use it sparingly, because the - * standard way to create components is via - * {@link org.dllearner.core.ComponentManager}. - * - * @param kb A KB object. - */ - public KBFile(KB kb) { - this.kb = kb; - } - - public static Collection<org.dllearner.core.options.ConfigOption<?>> createConfigOptions() { - Collection<org.dllearner.core.options.ConfigOption<?>> options = new LinkedList<org.dllearner.core.options.ConfigOption<?>>(); + private OWLOntology owlOntology; + + /** + * Default constructor (needed for reflection in ComponentManager). + */ + public KBFile() { + } + + /** + * Constructor allowing you to treat an already existing KB object + * as a KBFile knowledge source. Use it sparingly, because the + * standard way to create components is via + * {@link org.dllearner.core.ComponentManager}. + * + * @param kb A KB object. + */ + public KBFile(KB kb) { + this.kb = kb; + } + + public static Collection<org.dllearner.core.options.ConfigOption<?>> createConfigOptions() { + Collection<org.dllearner.core.options.ConfigOption<?>> options = new LinkedList<org.dllearner.core.options.ConfigOption<?>>(); // options.add(new StringConfigOption("filename", "pointer to the KB file on local file system",null, true, true)); - URLConfigOption urlOption = new URLConfigOption("url", "URL pointer to the KB file",null, false, true); - urlOption.setRefersToFile(true); - options.add(urlOption); - return options; - } - - public static String getName() { - return "KB file"; - } - + URLConfigOption urlOption = new URLConfigOption("url", "URL pointer to the KB file", null, false, true); + urlOption.setRefersToFile(true); + options.add(urlOption); + return options; + } + + public static String getName() { + return "KB file"; + } + @Override public void init() throws ComponentInitException { try { @@ -106,6 +112,7 @@ kb = KBParser.parseKBFile(f); } + owlOntology = createOWLOntology(kb); logger.trace("KB File " + getUrl() + " parsed successfully."); } else { throw new ComponentInitException("No URL option or kb object given. Cannot initialise KBFile component."); @@ -113,46 +120,69 @@ } catch (ParseException e) { throw new ComponentInitException("KB file " + getUrl() + " could not be parsed correctly.", e); - }catch (FileNotFoundException e) { + } catch (FileNotFoundException e) { throw new ComponentInitException("KB file " + getUrl() + " could not be found.", e); } catch (URISyntaxException e) { - throw new ComponentInitException("KB file " + getUrl() + " could not be found.", e); - } + throw new ComponentInitException("KB file " + getUrl() + " could not be found.", e); + } } - /* - * (non-Javadoc) - * - * @see org.dllearner.core.KnowledgeSource#toDIG() - */ - @Override - public String toDIG(URI kbURI) { - return DIGConverter.getDIGString(kb, kbURI).toString(); - } - - @Override - public String toString() { - if(kb==null) - return "KB file (not initialised)"; - else - return kb.toString(); - } - - @Override - public void export(File file, org.dllearner.core.OntologyFormat format){ - kb.export(file, format); - } - - public String getUrl() { - return url; - } + /** + * Create the OWL Ontology. + * + * @param kb The kb to create the ontology on top of. + * @return The OWL Ontology + */ + private OWLOntology createOWLOntology(KB kb) { + //This call is potentially dangerous in a multi-threaded(web) environment - I believe it returns a singleton instance + // There are ways around this, but getting it to work single threaded first. + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + IRI ontologyURI = IRI.create("http://example.com"); + OWLOntology ontology; + try { + ontology = manager.createOntology(ontologyURI); + OWLAPIAxiomConvertVisitor.fillOWLOntology(manager, ontology, kb); - @Override - public KB toKB() { - return kb; - } + } catch (OWLOntologyCreationException e) { + throw new RuntimeException(e); + } + return ontology; + } + /* + * (non-Javadoc) + * + * @see org.dllearner.core.KnowledgeSource#toDIG() + */ + @Override + public String toDIG(URI kbURI) { + return DIGConverter.getDIGString(kb, kbURI).toString(); + } + + @Override + public String toString() { + if (kb == null) + return "KB file (not initialised)"; + else + return kb.toString(); + } + + @Override + public void export(File file, org.dllearner.core.OntologyFormat format) { + kb.export(file, format); + } + + public String getUrl() { + return url; + } + + @Override + public KB toKB() { + return kb; + } + + public void setUrl(String url) { this.url = url; } @@ -165,11 +195,16 @@ this.baseDir = baseDir; } - public String getFileName() { - return fileName; - } + public String getFileName() { + return fileName; + } - public void setFileName(String fileName) { - this.fileName = fileName; - } + public void setFileName(String fileName) { + this.fileName = fileName; + } + + @Override + public OWLOntology getOWLOntology() { + return owlOntology; + } } Added: trunk/components-core/src/main/java/org/dllearner/kb/OWLOntologyKnowledgeSource.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/OWLOntologyKnowledgeSource.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/kb/OWLOntologyKnowledgeSource.java 2012-03-12 02:14:08 UTC (rev 3607) @@ -0,0 +1,22 @@ +package org.dllearner.kb; + +import org.semanticweb.owlapi.model.OWLOntology; + +/** + * Created by IntelliJ IDEA. + * User: Chris Shellenbarger + * Date: 3/11/12 + * Time: 6:36 PM + * + * This interface represents objects which can return an OWLOntology representation of itself. + */ +public interface OWLOntologyKnowledgeSource { + + + /** + * Get the OWL Ontology that this object represents. + * + * @return The OWL ontology that this object represents. + */ + public OWLOntology getOWLOntology(); +} Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java 2012-03-09 09:19:31 UTC (rev 3606) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java 2012-03-12 02:14:08 UTC (rev 3607) @@ -30,6 +30,7 @@ import org.dllearner.core.owl.*; import org.dllearner.kb.OWLAPIOntology; import org.dllearner.kb.OWLFile; +import org.dllearner.kb.OWLOntologyKnowledgeSource; import org.dllearner.kb.sparql.SparqlKnowledgeSource; import org.dllearner.utilities.owl.*; import org.semanticweb.HermiT.Reasoner.ReasonerFactory; @@ -209,17 +210,25 @@ // all other sources are converted to KB and then to an // OWL API ontology } else { + + //KB Files KB kb = source.toKB(); + + if(source instanceof OWLOntologyKnowledgeSource){ + ontology = ((OWLOntologyKnowledgeSource) source).getOWLOntology(); + } else { + // System.out.println(kb.toString(null,null)); - IRI ontologyURI = IRI.create("http://example.com"); - ontology = null; - try { - ontology = manager.createOntology(ontologyURI); - } catch (OWLOntologyCreationException e) { - throw new RuntimeException(e); + IRI ontologyURI = IRI.create("http://example.com"); + ontology = null; + try { + ontology = manager.createOntology(ontologyURI); + } catch (OWLOntologyCreationException e) { + throw new RuntimeException(e); + } + OWLAPIAxiomConvertVisitor.fillOWLOntology(manager, ontology, kb); } - OWLAPIAxiomConvertVisitor.fillOWLOntology(manager, ontology, kb); owlAPIOntologies.add(ontology); allImports.add(ontology); atomicConcepts.addAll(kb.findAllAtomicConcepts()); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-03-09 09:19:42
|
Revision: 3606 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3606&view=rev Author: lorenz_b Date: 2012-03-09 09:19:31 +0000 (Fri, 09 Mar 2012) Log Message: ----------- Updated JENA libs. Modified Paths: -------------- trunk/components-core/pom.xml trunk/pom.xml Modified: trunk/components-core/pom.xml =================================================================== --- trunk/components-core/pom.xml 2012-03-07 14:50:29 UTC (rev 3605) +++ trunk/components-core/pom.xml 2012-03-09 09:19:31 UTC (rev 3606) @@ -102,6 +102,10 @@ <artifactId>arq</artifactId> <groupId>com.hp.hpl.jena</groupId> </exclusion> + <exclusion> + <artifactId>xercesImpl</artifactId> + <groupId>xerces</groupId> + </exclusion> </exclusions> </dependency> @@ -131,6 +135,12 @@ <dependency> <groupId>org.apache.jena</groupId> <artifactId>jena-arq</artifactId> + <exclusions> + <exclusion> + <artifactId>xercesImpl</artifactId> + <groupId>xerces</groupId> + </exclusion> + </exclusions> </dependency> <!--JSON is in Central --> @@ -178,6 +188,12 @@ <dependency> <groupId>org.apache.jena</groupId> <artifactId>jena-core</artifactId> + <exclusions> + <exclusion> + <artifactId>xercesImpl</artifactId> + <groupId>xerces</groupId> + </exclusion> + </exclusions> </dependency> <dependency> @@ -236,7 +252,6 @@ <dependency> <groupId>org.apache.lucene</groupId> <artifactId>lucene-core</artifactId> - <version>3.5.0</version> </dependency> </dependencies> </project> Modified: trunk/pom.xml =================================================================== --- trunk/pom.xml 2012-03-07 14:50:29 UTC (rev 3605) +++ trunk/pom.xml 2012-03-09 09:19:31 UTC (rev 3606) @@ -164,7 +164,7 @@ <dependency> <groupId>org.apache.jena</groupId> <artifactId>jena-core</artifactId> - <version>2.7.0-incubating</version> + <version>2.7.1-incubating-SNAPSHOT</version> </dependency> <!--SwingX is in central --> <dependency> @@ -197,7 +197,7 @@ <dependency> <groupId>org.apache.lucene</groupId> <artifactId>lucene-core</artifactId> - <version>2.9.3</version> + <version>3.5.0</version> </dependency> <dependency> @@ -224,7 +224,7 @@ <dependency> <groupId>org.apache.jena</groupId> <artifactId>jena-arq</artifactId> - <version>2.9.0-incubating</version> + <version>2.9.1-incubating-SNAPSHOT</version> </dependency> <!--Junits --> @@ -306,7 +306,7 @@ <dependency> <groupId>org.aksw.commons</groupId> <artifactId>model</artifactId> - <version>${aksw.version}</version> + <version>0.2-SNAPSHOT</version> </dependency> <dependency> <groupId>org.aksw.commons</groupId> @@ -317,7 +317,7 @@ <dependency> <groupId>org.aksw.commons</groupId> <artifactId>util</artifactId> - <version>${aksw.version}</version> + <version>0.2-SNAPSHOT</version> </dependency> @@ -421,7 +421,7 @@ <dependency> <groupId>org.springframework</groupId> <artifactId>spring-context</artifactId> - <version>3.0.5.RELEASE</version> + <version>3.1.1.RELEASE</version> <exclusions> <exclusion> <groupId>commons-logging</groupId> @@ -432,7 +432,7 @@ <dependency> <groupId>org.springframework</groupId> <artifactId>spring-beans</artifactId> - <version>3.0.5.RELEASE</version> + <version>3.1.1.RELEASE</version> </dependency> <!--BEGIN Logging Dependencies--> @@ -541,13 +541,24 @@ <name>Semanticscience projects</name> <url>http://s1.semanticscience.org:8080/nexus/content/groups/public/</url> </repository> + <repository> - <id>apache-repo-releases</id> - <url>https://repository.apache.org/content/repositories/releases/</url> - <releases> - <enabled>true</enabled> - </releases> + <id>apache-repo-releases</id> + <url>https://repository.apache.org/content/repositories/releases/</url> + <releases> + <enabled>true</enabled> + </releases> </repository> + <repository> + <id>apache-repo-snapshots</id> + <url>https://repository.apache.org/content/repositories/snapshots/</url> + <releases> + <enabled>false</enabled> + </releases> + <snapshots> + <enabled>true</enabled> + </snapshots> + </repository> </repositories> <pluginRepositories> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <seb...@us...> - 2012-03-07 14:50:39
|
Revision: 3605 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3605&view=rev Author: sebastianwtr Date: 2012-03-07 14:50:29 +0000 (Wed, 07 Mar 2012) Log Message: ----------- [tbsl_exploration] first step reorganizing the project Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Hypothesis.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Template.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/TemplateBuilder.java Added Paths: ----------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/Index_utils.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/SQLiteIndex.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/ElementList_new.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Utils/ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Utils/GetRessourcePropertys.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Utils/Levenshtein.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Utils/SparqlFilter.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/testClass_new.java Removed Paths: ------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Levenshtein.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlFilter.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/mySQLDictionary.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/testClass_new.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/utils_new.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/Levenshtein.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/Parsing.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/test_vergleich.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/sax/ Copied: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/Index_utils.java (from rev 3595, trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/utils_new.java) =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/Index_utils.java (rev 0) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/Index_utils.java 2012-03-07 14:50:29 UTC (rev 3605) @@ -0,0 +1,56 @@ +package org.dllearner.algorithm.tbsl.exploration.Index; + +import java.sql.SQLException; +import java.util.ArrayList; + + +public class Index_utils { + + /** + * + * @param string + * @param fall 1=Property, 0=Resource, 2=OntologyClass/Yago, 2=resource+yago+ontlogy + * @return ArrayList with possible URIs gotten from the Index + * @throws SQLException + */ + public static ArrayList<String> searchIndex(String string, int fall, SQLiteIndex myindex) throws SQLException{ + + String originalString=string; + string=string.replace("_", " "); + string=string.replace("-", " "); + string=string.replace(".", " "); + String result=null; + String tmp1=null; + String tmp2 = null; + ArrayList<String> result_List = new ArrayList<String>(); + + if(fall==0 || fall==3){ + + result=myindex.getResourceURI(string.toLowerCase()); + result_List.add(result); + + } + if(fall==2||fall==3){ + + tmp1=myindex.getontologyClassURI(string.toLowerCase()); + tmp2=myindex.getYagoURI(string.toLowerCase()); + if(tmp1!=null) result_List.add(tmp1); + if(tmp2!=null) result_List.add(tmp2); + //result_List.add("www.TEST.de"); + } + + + if(fall==1){ + tmp1=myindex.getPropertyURI(string.toLowerCase()); + tmp2=myindex.getontologyURI(string.toLowerCase()); + if(tmp1!=null) result_List.add(tmp1); + if(tmp2!=null) result_List.add(tmp2); + + } + + return result_List; + } + + + +} Copied: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/SQLiteIndex.java (from rev 3594, trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/mySQLDictionary.java) =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/SQLiteIndex.java (rev 0) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/SQLiteIndex.java 2012-03-07 14:50:29 UTC (rev 3605) @@ -0,0 +1,594 @@ +package org.dllearner.algorithm.tbsl.exploration.Index; + +import java.io.BufferedReader; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStreamReader; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; + +import org.dllearner.algorithm.tbsl.nlp.StanfordLemmatizer; + +public class SQLiteIndex { + private Connection conn; + StanfordLemmatizer lemma; + + public SQLiteIndex() throws ClassNotFoundException, SQLException { + // TODO Auto-generated constructor stub + Class.forName( "org.sqlite.JDBC" ); + conn = DriverManager.getConnection("jdbc:sqlite::memory:"); + createIndexPropertys(); + createIndexResource(); + createWordnetHelp(); + createIndexOntology(); + createIndexOntologyClass(); + createIndexofYago(); + lemma = new StanfordLemmatizer(); + + //optional!! + //createIndexWikipedia(); + + } + + /* + * Next, we want to select the persons living in a city that contains the pattern "tav" from the "Persons" table. + +We use the following SELECT statement: +SELECT * FROM Persons +WHERE City LIKE '%tav%' + */ + + public String getResourceURI(String string) throws SQLException{ + /* while(rs.next()) + {*/ + Statement stat = conn.createStatement(); + ResultSet rs; + try { + rs = stat.executeQuery("select uri from resource where name='"+string.toLowerCase()+"';"); + /*while(rs.next()){ + System.out.println("Next: "+rs.getString("uri")); + }*/ + return rs.getString("uri"); + } catch (Exception e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + return null; + } + + } + + public ArrayList<String> getResourceURILike(String string) throws SQLException{ + /* while(rs.next()) + {*/ + Statement stat = conn.createStatement(); + ResultSet rs; + ArrayList<String> result= new ArrayList<String>(); + try { + rs = stat.executeQuery("select uri from resource where name like'"+string.toLowerCase()+"%';"); + while(rs.next()){ + System.out.println("Next: "+rs.getString("uri")); + result.add(rs.getString("uri")); + } + return result; + } catch (Exception e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + return null; + } + + } + + public ArrayList<String> getYagoURILike(String string) throws SQLException{ + /* while(rs.next()) + {*/ + Statement stat = conn.createStatement(); + ResultSet rs; + ArrayList<String> result= new ArrayList<String>(); + try { + rs = stat.executeQuery("select uri from yago where name like'"+string.toLowerCase()+"%';"); + while(rs.next()){ + System.out.println("Next: "+rs.getString("uri")); + result.add(rs.getString("uri")); + } + return result; + } catch (Exception e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + return null; + } + + } + + + public String getYagoURI(String string) throws SQLException{ + /* while(rs.next()) + {*/ + Statement stat = conn.createStatement(); + ResultSet rs; + try { + rs = stat.executeQuery("select uri from yago where name='"+string.toLowerCase()+"';"); + return rs.getString("uri"); + } catch (Exception e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + return null; + } + + } + + + public String getPropertyURI(String string) throws SQLException{ + Statement stat = conn.createStatement(); + ResultSet rs; + try { + rs = stat.executeQuery("select uri from property where name='"+string.toLowerCase()+"';"); + return rs.getString("uri"); + } catch (Exception e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + return null; + } + + + } + + public String getontologyURI(String string) throws SQLException{ + Statement stat = conn.createStatement(); + ResultSet rs; + try { + rs = stat.executeQuery("select uri from ontology where name='"+string.toLowerCase()+"';"); + return rs.getString("uri"); + } catch (Exception e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + return null; + } + + + } + + public String getontologyClassURI(String string) throws SQLException{ + Statement stat = conn.createStatement(); + ResultSet rs; + try { + rs = stat.executeQuery("select uri from ontologyClass where name='"+string.toLowerCase()+"';"); + return rs.getString("uri"); + } catch (Exception e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + return null; + } + + + } + public ArrayList<String> getontologyClassURILike(String string) throws SQLException{ + Statement stat = conn.createStatement(); + ResultSet rs; + ArrayList<String> result= new ArrayList<String>(); + try { + rs = stat.executeQuery("select uri from ontologyClass where name like'"+string.toLowerCase()+"%';"); + while(rs.next()){ + System.out.println("Next: "+rs.getString("uri")); + result.add(rs.getString("uri")); + } + return result; + } catch (Exception e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + return null; + } + + + } + + + + public String getWikipediaURI(String string) throws SQLException{ + Statement stat = conn.createStatement(); + ResultSet rs; + try { + rs = stat.executeQuery("select uri from wikiindex where name='"+string.toLowerCase()+"';"); + return rs.getString("uri"); + } catch (Exception e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + return null; + } + + + } + + public String getWordnetHelp(String string) throws SQLException{ + Statement stat = conn.createStatement(); + ResultSet rs; + try { + rs = stat.executeQuery("select singular from wordnet where plural='"+string.toLowerCase()+"';"); + return rs.getString("singular"); + } catch (Exception e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + return null; + } + + + } + + private void createWordnetHelp() throws SQLException{ /*System.out.println("Start SQL test"); + Class.forName( "org.sqlite.JDBC" ); + conn = DriverManager.getConnection("jdbc:sqlite::memory:");*/ + System.out.println("start generating Wordnet Help-Function"); + Statement stat = conn.createStatement(); + stat.executeUpdate("drop table if exists wordnet;"); + stat.executeUpdate("create table wordnet (plural, singular);"); + PreparedStatement prep = conn.prepareStatement("insert into wordnet values (?, ?);"); + BufferedReader in=null; + // conn.setAutoCommit(false); + int zaehler=0; + try { + in = new BufferedReader( + new InputStreamReader( + new FileInputStream( "/home/swalter/workspace/noun.exc" ) ) ); + String s; + while( null != (s = in.readLine()) ) { + String[] tmp_array =s.split(" "); + if(tmp_array.length>=2){ + prep.setString(1, tmp_array[0]); + prep.setString(2, tmp_array[1]); + String temp=""; + if(tmp_array.length>2){ + for(int i =1;i<tmp_array.length;i++){ + temp=temp+tmp_array[i]+" "; + } + prep.setString(2, temp); + } + prep.addBatch(); + zaehler=zaehler+1; + //if(zaehler%10000==0) System.out.println(zaehler); + if(zaehler%10000==0){ + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(false); + System.out.println("done"); + } + + } + } + } catch( FileNotFoundException ex ) { + } catch( Exception ex ) { + System.out.println( ex ); + } finally { + if( in != null ) + try { + in.close(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(true); + System.out.println("Done"); + + } + + private void createIndexWikipedia() throws ClassNotFoundException, SQLException{ + /*System.out.println("Start SQL test"); + Class.forName( "org.sqlite.JDBC" ); + conn = DriverManager.getConnection("jdbc:sqlite::memory:");*/ + Statement stat = conn.createStatement(); + stat.executeUpdate("drop table if exists wikiindex;"); + stat.executeUpdate("create table wikiindex (name, uri);"); + PreparedStatement prep = conn.prepareStatement("insert into wikiindex values (?, ?);"); + BufferedReader in=null; + // conn.setAutoCommit(false); + int zaehler=0; + try { + in = new BufferedReader( + new InputStreamReader( + new FileInputStream( "/home/swalter/workspace/URIsFromWikipedia" ) ) ); + String s; + while( null != (s = in.readLine()) ) { + String[] tmp_array =s.split("::"); + if(tmp_array.length>=2){ + prep.setString(1, tmp_array[0]); + prep.setString(2, tmp_array[1]); + prep.addBatch(); + zaehler=zaehler+1; + //if(zaehler%100000==0) System.out.println(zaehler); + if(zaehler%1000000==0){ + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(false); + System.out.println("done"); + } + + } + } + } catch( FileNotFoundException ex ) { + } catch( Exception ex ) { + System.out.println( ex ); + } finally { + if( in != null ) + try { + in.close(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(true); + System.out.println("Done"); + } +private void createIndexPropertys() throws ClassNotFoundException, SQLException{ + System.out.println("start indexing Properties"); + Statement stat = conn.createStatement(); + stat.executeUpdate("drop table if exists property;"); + stat.executeUpdate("create table property (name, uri);"); + PreparedStatement prep = conn.prepareStatement("insert into property values (?, ?);"); + BufferedReader in=null; + int zaehler=0; + try { + in = new BufferedReader( + new InputStreamReader( + new FileInputStream( "/home/swalter/workspace/property" ) ) ); + String s; + while( null != (s = in.readLine()) ) { + String[] tmp_array =s.split(":::"); + if(tmp_array.length>=2){ + prep.setString(1, tmp_array[0]); + prep.setString(2, tmp_array[1]); + prep.addBatch(); + zaehler=zaehler+1; + if(zaehler%1000000==0){ + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(false); + // System.out.println(zaehler+" done"); + } + + } + } + } catch( FileNotFoundException ex ) { + } catch( Exception ex ) { + System.out.println( ex ); + } finally { + if( in != null ) + try { + in.close(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(true); + System.out.println("Number of Property: "+zaehler); + System.out.println("Done"); + + } +private void createIndexResource() throws ClassNotFoundException, SQLException{ + System.out.println("start indexing Resources"); + Statement stat = conn.createStatement(); + stat.executeUpdate("drop table if exists resource;"); + stat.executeUpdate("create table resource (name, uri);"); + PreparedStatement prep = conn.prepareStatement("insert into resource values (?, ?);"); + BufferedReader in=null; + int zaehler=0; + try { + in = new BufferedReader( + new InputStreamReader( + new FileInputStream( "/home/swalter/workspace/resource" ) ) ); + String s; + while( null != (s = in.readLine()) ) { + String[] tmp_array =s.split(":::"); + if(tmp_array.length>=2){ + prep.setString(1, tmp_array[0]); + prep.setString(2, tmp_array[1]); + prep.addBatch(); + zaehler=zaehler+1; + if(zaehler%1000000==0){ + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(false); + } + + } + } + } catch( FileNotFoundException ex ) { + } catch( Exception ex ) { + System.out.println( ex ); + } finally { + if( in != null ) + try { + in.close(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(true); + System.out.println("Number of Resources: "+zaehler); + System.out.println("Done"); + + + + } +private void createIndexOntology() throws ClassNotFoundException, SQLException{ + /*System.out.println("Start SQL test");*/ + System.out.println("start indexing Ontology"); + Statement stat = conn.createStatement(); + stat.executeUpdate("drop table if exists ontology;"); + stat.executeUpdate("create table ontology (name, uri);"); + PreparedStatement prep = conn.prepareStatement("insert into ontology values (?, ?);"); + BufferedReader in=null; + // conn.setAutoCommit(false); + int zaehler=0; + try { + in = new BufferedReader( + new InputStreamReader( + new FileInputStream( "/home/swalter/workspace/ontology" ) ) ); + String s; + while( null != (s = in.readLine()) ) { + String[] tmp_array =s.split(":::"); + if(tmp_array.length>=2){ + prep.setString(1, tmp_array[0]); + prep.setString(2, tmp_array[1]); + prep.addBatch(); + zaehler=zaehler+1; + // if(zaehler%10000==0) System.out.println(zaehler); + if(zaehler%1000000==0){ + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(false); + //System.out.println("done" + zaehler); + } + + } + } + } catch( FileNotFoundException ex ) { + } catch( Exception ex ) { + System.out.println( ex ); + } finally { + if( in != null ) + try { + in.close(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(true); + System.out.println("Number of Ontologys: "+zaehler); + System.out.println("Done"); + + } + +private void createIndexOntologyClass() throws ClassNotFoundException, SQLException{ + /*System.out.println("Start SQL test");*/ + System.out.println("start indexing ontologyClass"); + Statement stat = conn.createStatement(); + stat.executeUpdate("drop table if exists ontologyClass;"); + stat.executeUpdate("create table ontologyClass (name, uri);"); + PreparedStatement prep = conn.prepareStatement("insert into ontologyClass values (?, ?);"); + BufferedReader in=null; + // conn.setAutoCommit(false); + int zaehler=0; + try { + in = new BufferedReader( + new InputStreamReader( + new FileInputStream( "/home/swalter/workspace/ontologyClass" ) ) ); + String s; + while( null != (s = in.readLine()) ) { + String[] tmp_array =s.split(":::"); + if(tmp_array.length>=2){ + prep.setString(1, tmp_array[0]); + prep.setString(2, tmp_array[1]); + prep.addBatch(); + zaehler=zaehler+1; + // if(zaehler%10000==0) System.out.println(zaehler); + if(zaehler%1000000==0){ + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(false); + //System.out.println("done" + zaehler); + } + + } + } + } catch( FileNotFoundException ex ) { + } catch( Exception ex ) { + System.out.println( ex ); + } finally { + if( in != null ) + try { + in.close(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(true); + System.out.println("Number of OntologyClass: "+zaehler); + System.out.println("Done"); + + } + + +private void createIndexofYago() throws ClassNotFoundException, SQLException{ + /*System.out.println("Start SQL test");*/ + System.out.println("start indexing yago"); + Statement stat = conn.createStatement(); + stat.executeUpdate("drop table if exists yago;"); + stat.executeUpdate("create table yago (name, uri);"); + PreparedStatement prep = conn.prepareStatement("insert into yago values (?, ?);"); + BufferedReader in=null; + // conn.setAutoCommit(false); + int zaehler=0; + try { + in = new BufferedReader( + new InputStreamReader( + new FileInputStream( "/home/swalter/workspace/yago" ) ) ); + String s; + while( null != (s = in.readLine()) ) { + String[] tmp_array =s.split(":::"); + if(tmp_array.length>=2){ + prep.setString(1, tmp_array[0]); + prep.setString(2, tmp_array[1]); + prep.addBatch(); + zaehler=zaehler+1; + // if(zaehler%10000==0) System.out.println(zaehler); + if(zaehler%1000000==0){ + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(false); + //System.out.println("done" + zaehler); + } + + } + } + } catch( FileNotFoundException ex ) { + } catch( Exception ex ) { + System.out.println( ex ); + } finally { + if( in != null ) + try { + in.close(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(true); + System.out.println("Number of Yago: "+zaehler); + System.out.println("Done"); + + } + + + +} \ No newline at end of file Added: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/ElementList_new.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/ElementList_new.java (rev 0) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/ElementList_new.java 2012-03-07 14:50:29 UTC (rev 3605) @@ -0,0 +1,49 @@ +package org.dllearner.algorithm.tbsl.exploration.Sparql; + +import java.util.HashMap; + +/** + * + * @author swalter + * + */ +public class ElementList_new { + + /** + * Name of the variable, e.g. ?y0 + */ + private String variablename; + + /** + * URI of the Resource or Class, which was used for getting the depending elements with the uri + */ + private String resourceURI; + /** + * HashMap with name -uri pairs. + */ + private HashMap<String,String> hm = new HashMap<String,String>(); + public String getVariablename() { + return variablename; + } + public void setVariablename(String variablename) { + this.variablename = variablename; + } + public HashMap<String,String> getHm() { + return hm; + } + public void setHm(HashMap<String,String> hm) { + this.hm = hm; + } + public String getResourceURI() { + return resourceURI; + } + public void setResourceURI(String resourceURI) { + this.resourceURI = resourceURI; + } + + public ElementList_new(String variable, String resource, HashMap<String,String> hm){ + this.setHm(hm); + this.setResourceURI(resource); + this.setVariablename(variable); + } +} Deleted: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java 2012-03-05 12:12:37 UTC (rev 3604) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java 2012-03-07 14:50:29 UTC (rev 3605) @@ -1,214 +0,0 @@ -package org.dllearner.algorithm.tbsl.exploration.Sparql; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStreamWriter; -import java.net.HttpURLConnection; -import java.net.MalformedURLException; -import java.net.URL; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; - -import org.dllearner.algorithm.tbsl.exploration.sax.MySaxParser; - -public class GetRessourcePropertys { - - //String Prefix="http://greententacle.techfak.uni-bielefeld.de:5171/sparql"; - String Prefix="http://dbpedia.org/sparql"; - //String Prefix="http://purpurtentacle.techfak.uni-bielefeld.de:8890/sparql"; - //String Prefix="http://greententacle.techfak.uni-bielefeld.de:5171/sparql"; - - public HashMap<String,String> getPropertys(String element, String side, int timeToTimeoutOnServer) throws IOException{ - - return sendServerPropertyRequest(element,side, timeToTimeoutOnServer); - - - - } - - /** - * Get an uri and saves the properties of this resource - * @param vergleich - * @return - * @throws IOException - */ - private HashMap<String,String> sendServerPropertyRequest(String vergleich, String side, int timeToTimeoutOnServer) throws IOException{ - - //System.out.println("Resource die gesucht wird: "+ vergleich); - //System.out.println("Seite die gesucht wird: "+side); - /* - * - * For the second Iteration, I can just add the sparql property here. - */ - - /* - * - * SELECT DISTINCT ?p WHERE {<http://dbpedia.org/resource/Berlin> ?y ?p.} für Berlin links der Property - * PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT DISTINCT ?s ?p WHERE {<http://dbpedia.org/resource/Berlin> ?p ?y. ?p rdfs:label ?s.} - * - * SELECT DISTINCT ?p WHERE {?y ?p <http://dbpedia.org/resource/Berlin>.} für Berlin rechts der Property - * PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT DISTINCT ?s ?p WHERE {?y ?p <http://dbpedia.org/resource/Berlin>. ?p rdfs:label ?s.} - * http://greententacle.techfak.uni-bielefeld.de:5171/sparql?default-graph-uri=&query=PREFIX+rdfs%3A+%3Chttp%3A%2F%2Fwww.w3.org%2F2000%2F01%2Frdf-schema%23%3E+SELECT+DISTINCT+%3Fs+%3Fp+WHERE+{%3Fy+%3Fp+%3Chttp%3A%2F%2Fdbpedia.org%2Fresource%2FBerlin%3E.+%3Fp+rdfs%3Alabel+%3Fs.}&format=text%2Fhtml&debug=on&timeout= - */ - - String vergleichorig = vergleich; - - /* - * change to dbpedia http://dbpedia.org/sparql - */ - //String tmp_left="http://greententacle.techfak.uni-bielefeld.de:5171/sparql?default-graph-uri=&query="+createServerRequest("PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT DISTINCT ?s ?p WHERE {?y ?p <"+vergleichorig+">. ?p rdfs:label ?s.}")+"%0D%0A&format=text%2Fhtml&debug=on&timeout="; - String tmp_left=Prefix+"?default-graph-uri=&query="+createServerRequest("PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT DISTINCT ?s ?p WHERE {?y ?p <"+vergleichorig+">. ?p rdfs:label ?s.}")+"%0D%0A&format=text%2Fhtml&debug=on&timeout="; - - //System.out.println("property right!!! : " +tmp_right); - String tmp_right=Prefix+"?default-graph-uri=&query="+createServerRequest("PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT DISTINCT ?s ?p WHERE {<"+vergleichorig+"> ?p ?y. ?p rdfs:label ?s.}")+"%0D%0A&format=text%2Fhtml&debug=on&timeout="; - - String tmp_both=Prefix+"?default-graph-uri=&query="+createServerRequest("PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT DISTINCT ?s ?p WHERE {{?y ?p <"+vergleichorig+">. ?p rdfs:label ?s.} UNION {<"+vergleichorig+"> ?p ?y. ?p rdfs:label ?s.}}")+"%0D%0A&format=text%2Fhtml&debug=on&timeout="; - String verarbeitungsurl=null; - /*Original*/ - if(side.contains("RIGHT")) verarbeitungsurl=tmp_right; - if(side.contains("LEFT")) verarbeitungsurl=tmp_left; - - /*if(side.contains("LEFT")) verarbeitungsurl=tmp_both; - if(side.contains("RIGHT")) verarbeitungsurl=tmp_both;*/ - - //System.out.println(verarbeitungsurl); - //just in case..... - if(!side.contains("LEFT") && !side.contains("RIGHT")) verarbeitungsurl=tmp_left; - - String result=""; - HttpURLConnection connection = null; - BufferedReader rd = null; - StringBuilder sb = null; - String line = null; - - URL serverAddress = null; - - try { - serverAddress = new URL(verarbeitungsurl); - //set up out communications stuff - connection = null; - - //Set up the initial connection - connection = (HttpURLConnection)serverAddress.openConnection(); - connection.setRequestMethod("GET"); - connection.setDoOutput(true); - connection.setReadTimeout(timeToTimeoutOnServer); - - connection.connect(); - rd = new BufferedReader(new InputStreamReader(connection.getInputStream())); - sb = new StringBuilder(); - - while ((line = rd.readLine()) != null) - { - sb.append(line + '\n'); - } - - //System.out.println(sb.toString()); - result=sb.toString(); - - } catch (MalformedURLException e) { - System.out.println("Must enter a valid URL"); - } catch (IOException e) { - System.out.println("Can not connect or timeout"); - } - finally - { - //close the connection, set all objects to null - connection.disconnect(); - rd = null; - sb = null; - Object wr = null; - connection = null; - } - - HashMap<String,String> hm = new HashMap(); - result=result.replace("<th>s</th>",""); - result=result.replace("<th>p</th>",""); - result=result.replace("<table class=\"sparql\" border=\"1\">",""); - result=result.replace("<tr>",""); - result=result.replace("</tr>",""); - result=result.replace("\n", ""); - result=result.replace(" ", ""); - result=result.replaceFirst("<td>", ""); - - - String[] tmp_array=result.split("</td><td>"); - - for(int i =1; i<=tmp_array.length-2;i=i+2) { - hm.put(tmp_array[i-1].toLowerCase(), tmp_array[i]); - //System.out.println(tmp_array[i-1].toLowerCase() + " " +tmp_array[i]); - } - - // System.out.println("created Properties: "+hm); - return hm; - } - - - private static ArrayList<String> do_parsing(String datei) - { - ArrayList<String> indexObject = null; - - File file = new File(datei); - try - { - MySaxParser parser = new MySaxParser(file); - parser.parse(); - indexObject = parser.getIndexObject(); - } - catch (Exception ex) - { - System.out.println("Another exciting error occured: " + ex.getLocalizedMessage()); - } - - return indexObject; - } - - - - - private String createServerRequest(String query){ - String anfrage=null; - anfrage=removeSpecialKeys(query); - anfrage=anfrage.replace("<","<"); - anfrage=anfrage.replace("%gt;",">"); - anfrage=anfrage.replace("&","&"); - //anfrage=anfrage.replaceAll("#>","%23%3E%0D%0A%"); - anfrage=anfrage.replace("#","%23"); - anfrage=anfrage.replace(" ","+"); - anfrage=anfrage.replace("/","%2F"); - anfrage=anfrage.replace(":","%3A"); - anfrage=anfrage.replace("?","%3F"); - anfrage=anfrage.replace("$","%24"); - //anfrage=anfrage.replaceAll("F>+","F%3E%0D%0A"); - anfrage=anfrage.replace(">","%3E"); - anfrage=anfrage.replace("<","%3C"); - anfrage=anfrage.replace("\"","%22"); - anfrage=anfrage.replace("\n","%0D%0A%09"); - anfrage=anfrage.replace("%%0D%0A%09","%09"); - anfrage=anfrage.replace("=","%3D"); - anfrage=anfrage.replace("@","%40"); - anfrage=anfrage.replace("&","%26"); - anfrage=anfrage.replace("(","%28"); - anfrage=anfrage.replace(")","%29"); - anfrage=anfrage.replace("%3E%0D%0A%25","%3E"); - //anfrage=anfrage.replaceAll("\n",".%0D%0A%09"); - return anfrage; - } - - private String removeSpecialKeys(String query){ - query=query.replace("\\",""); - //query=query.replaceAll("\a",""); - query=query.replace("\b",""); - query=query.replace("\f",""); - query=query.replace("\r",""); - query=query.replace("\t",""); - // query=query.replaceAll("\v",""); - return query; - } - -} Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Hypothesis.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Hypothesis.java 2012-03-05 12:12:37 UTC (rev 3604) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Hypothesis.java 2012-03-07 14:50:29 UTC (rev 3605) @@ -6,6 +6,7 @@ private String variable; private String uri; private float rank; +private String name; /** * RESOURCE,PROPERTY,UNSPEC @@ -31,11 +32,12 @@ this.rank = rank; } -public Hypothesis(String variable, String uri, String type, float rank){ - setRank(rank); - setVariable(variable); - setUri(uri); - setType(type); +public Hypothesis(String variable, String name, String uri, String type, float rank){ + this.setRank(rank); + this.setVariable(variable); + this.setUri(uri); + this.setType(type); + this.setName(name); } public String getType() { @@ -48,10 +50,17 @@ public void printAll(){ System.out.println("%%%%%%%%%%%"); System.out.println("Variable: "+variable); + System.out.println("Name: "+name); System.out.println("Uri: " + uri); System.out.println("Type: " + type); System.out.println("Rank: "+rank); System.out.println("%%%%%%%%%%%"); } +public String getName() { + return name; +} +public void setName(String name) { + this.name = name; +} } Deleted: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Levenshtein.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Levenshtein.java 2012-03-05 12:12:37 UTC (rev 3604) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Levenshtein.java 2012-03-07 14:50:29 UTC (rev 3605) @@ -1,54 +0,0 @@ -package org.dllearner.algorithm.tbsl.exploration.Sparql; - - -import java.lang.Math; -import java.math.BigDecimal; - -public class Levenshtein { - - - public static double nld(String orig, String eing){ - double result = computeLevenshteinDistance(orig,eing); - //System.out - double length=Math.max(orig.length(),eing.length()); - - //if distance between both is zero, then the NLD must be one - //but because they are equal, return a very high value, so that that query will be taken. - if(result==0.0 ){ - return 10.0; - } - else{ - - double result_nld =result/length; - return result_nld; - } - - } - - - //http://en.wikibooks.org/wiki/Algorithm_Implementation/Strings/Levenshtein_distance#Java - private static int minimum(int a, int b, int c) { - return Math.min(Math.min(a, b), c); - } - - public static int computeLevenshteinDistance(CharSequence str1, - CharSequence str2) { - int[][] distance = new int[str1.length() + 1][str2.length() + 1]; - - for (int i = 0; i <= str1.length(); i++) - distance[i][0] = i; - for (int j = 0; j <= str2.length(); j++) - distance[0][j] = j; - - for (int i = 1; i <= str1.length(); i++) - for (int j = 1; j <= str2.length(); j++) - distance[i][j] = minimum( - distance[i - 1][j] + 1, - distance[i][j - 1] + 1, - distance[i - 1][j - 1] - + ((str1.charAt(i - 1) == str2.charAt(j - 1)) ? 0 - : 1)); - - return distance[str1.length()][str2.length()]; - } -} Deleted: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlFilter.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlFilter.java 2012-03-05 12:12:37 UTC (rev 3604) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlFilter.java 2012-03-07 14:50:29 UTC (rev 3605) @@ -1,54 +0,0 @@ -package org.dllearner.algorithm.tbsl.exploration.Sparql; - -import java.util.HashMap; - -public class SparqlFilter { - public void create_Sparql_who(String string,HashMap<String, String> hm){ - // string=string.replaceAll("?", ""); - String[] array= string.split(" "); - //schauen ob erstes Wort ein who ist! - if(array[0].contains("who")){ - int position=0; - for(int i=0;i<array.length;i++){ - if (array[i].contains("of")){ - position=i; - break; - } - } - String vor_of=array[position-1]; - String nach_of=""; - //wenn nur ein element hinter of kommt - if(array.length-position-1==1){ - nach_of=array[position+1]; - } - else{ - for(int i=position+1; i<array.length;i++){ - //nach_of=nach_of+array[i]+" "; - nach_of=(nach_of.concat(array[i])).concat(" "); - } - - //letztes leerzeichen loeschen - nach_of = nach_of.substring(0, nach_of.length()-1); - } - String uri_vor_of=" "; - String uri_nach_of=" "; - - uri_vor_of=hm.get(vor_of); - uri_nach_of=hm.get(nach_of); - if(uri_vor_of!=null && uri_nach_of!=null){ - uri_nach_of=uri_nach_of.replace("Category:", ""); - uri_nach_of=uri_nach_of.replace("category:", ""); - - - String anfrage=null; - anfrage="PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>select ?x where { <"+uri_nach_of+"> <"+uri_vor_of+"> ?x.}"; - - } - else{ - //System.out.println("Nothing to do"); - } - - } - - } -} Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java 2012-03-05 12:12:37 UTC (rev 3604) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java 2012-03-07 14:50:29 UTC (rev 3605) @@ -27,7 +27,9 @@ import net.didion.jwnl.JWNLException; import net.didion.jwnl.data.POS; -import org.dllearner.algorithm.tbsl.exploration.sax.ParseXmlHtml; +import org.dllearner.algorithm.tbsl.exploration.Index.SQLiteIndex; +import org.dllearner.algorithm.tbsl.exploration.Utils.GetRessourcePropertys; +import org.dllearner.algorithm.tbsl.exploration.Utils.Levenshtein; import org.dllearner.algorithm.tbsl.nlp.StanfordLemmatizer; import org.dllearner.algorithm.tbsl.nlp.WordNet; import org.dllearner.algorithm.tbsl.sparql.BasicQueryTemplate; @@ -62,7 +64,7 @@ static WordNet wordnet; BasicTemplator btemplator; Templator templator; - private static mySQLDictionary myindex; + private static SQLiteIndex myindex; boolean only_best_levensthein_query; static StanfordLemmatizer lemmatiser; //one Minute @@ -86,7 +88,7 @@ //templator = new Templator(); System.out.println("Loading SPARQL Templator Done\n"); System.out.println("Start Indexing"); - myindex = new mySQLDictionary(); + myindex = new SQLiteIndex(); System.out.println("Done:Indexing"); Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Template.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Template.java 2012-03-05 12:12:37 UTC (rev 3604) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Template.java 2012-03-07 14:50:29 UTC (rev 3605) @@ -4,6 +4,7 @@ public class Template { + private ArrayList<ElementList_new> list_of_element_uri_pair = new ArrayList<ElementList_new>(); private ArrayList<ArrayList<String>> condition = new ArrayList<ArrayList<String>>(); private ArrayList<ArrayList<Hypothesis>> hypothesen = new ArrayList<ArrayList<Hypothesis>>(); private String selectTerm; @@ -11,6 +12,9 @@ private String filter; private String OrderBy; private String limit; + private String question; + private float overallTime; + private float time_Templator; public String getHaving() { return having; @@ -45,13 +49,14 @@ } - public Template(ArrayList<ArrayList<String>>condition_new, String having_new, String filter_new, String SelectTerm_new, String OrderBy_new, String limit_new){ - setCondition(condition_new); - setHaving(having_new); - setFilter(filter_new); - setOrderBy(OrderBy_new); - setLimit(limit_new); - setSelectTerm(SelectTerm_new); + public Template(ArrayList<ArrayList<String>>condition_new, String having_new, String filter_new, String SelectTerm_new, String OrderBy_new, String limit_new, String question_new){ + this.setCondition(condition_new); + this.setHaving(having_new); + this.setFilter(filter_new); + this.setOrderBy(OrderBy_new); + this.setLimit(limit_new); + this.setSelectTerm(SelectTerm_new); + this.setQuestion(question_new); } public ArrayList<ArrayList<String>> getCondition() { return condition; @@ -73,6 +78,7 @@ public void printAll(){ System.out.println("###### Template ######"); + System.out.println("question: "+ question); System.out.println("condition: "+condition); //System.out.println("hypotesen: "+hypothesen); int anzahl = 1; @@ -91,6 +97,35 @@ System.out.println("limit: "+limit); System.out.println("###### Template printed ######\n"); } + public String getQuestion() { + return question; + } + public void setQuestion(String question) { + this.question = question; + } + public float getOverallTime() { + return overallTime; + } + public void setOverallTime(float overallTime) { + this.overallTime = overallTime; + } + public float getTime_Templator() { + return time_Templator; + } + public void setTime_Templator(float time_Templator) { + this.time_Templator = time_Templator; + } + public ArrayList<ElementList_new> getList_of_element_uri_pair() { + return list_of_element_uri_pair; + } + public void setList_of_element_uri_pair(ArrayList<ElementList_new> list_of_element_uri_pair) { + this.list_of_element_uri_pair = list_of_element_uri_pair; + } + + public void addToList_of_element_uri_pair(ElementList_new newElement) { + this.list_of_element_uri_pair.add(newElement); + } + } Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/TemplateBuilder.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/TemplateBuilder.java 2012-03-05 12:12:37 UTC (rev 3604) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/TemplateBuilder.java 2012-03-07 14:50:29 UTC (rev 3605) @@ -1,10 +1,22 @@ package org.dllearner.algorithm.tbsl.exploration.Sparql; +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.FileReader; +import java.io.FileWriter; +import java.io.IOException; import java.net.MalformedURLException; import java.sql.SQLException; import java.util.ArrayList; +import java.util.HashMap; import java.util.Set; +import org.dllearner.algorithm.tbsl.exploration.Index.SQLiteIndex; +import org.dllearner.algorithm.tbsl.exploration.Index.Index_utils; import org.dllearner.algorithm.tbsl.sparql.BasicQueryTemplate; import org.dllearner.algorithm.tbsl.sparql.Path; import org.dllearner.algorithm.tbsl.sparql.SPARQL_Filter; @@ -16,20 +28,54 @@ public class TemplateBuilder { static BasicTemplator btemplator; -private static mySQLDictionary myindex; +private static SQLiteIndex myindex; public TemplateBuilder() throws MalformedURLException, ClassNotFoundException, SQLException{ TemplateBuilder.btemplator = new BasicTemplator(); //btemplator.UNTAGGED_INPUT = false; - TemplateBuilder.myindex = new mySQLDictionary(); + TemplateBuilder.myindex = new SQLiteIndex(); } - public ArrayList<Template> createTemplates(String question){ + public ArrayList<Template> createTemplates(String question) throws IOException{ + + long start = System.currentTimeMillis(); + ArrayList<Template> resultArrayList = new ArrayList<Template>(); - Set<BasicQueryTemplate> querytemps = btemplator.buildBasicQueries(question); + Set<BasicQueryTemplate> querytemps =null; + querytemps = btemplator.buildBasicQueries(question); + + /* + * check if templates were build, if not, safe the question and delete it for next time from the xml file. + */ + if(querytemps.contains("could not be parsed") || querytemps.isEmpty()){ + String dateiname="/home/swalter/Dokumente/Auswertung/NotParsed.txt"; + String result_string =""; + //Open the file for reading + try { + BufferedReader br = new BufferedReader(new FileReader(dateiname)); + String thisLine; + while ((thisLine = br.readLine()) != null) { // while loop begins here + result_string+=thisLine+"\n"; + } // end while + } // end try + catch (IOException e) { + System.err.println("Error: " + e); + } + + File file = new File(dateiname); + BufferedWriter bw = new BufferedWriter(new FileWriter(file)); + + bw.write(result_string+"\n"+question); + bw.flush(); + bw.close(); + + + } + + long stop_template = System.currentTimeMillis(); for (BasicQueryTemplate bqt : querytemps) { ArrayList<ArrayList<String>> condition = new ArrayList<ArrayList<String>>(); //ArrayList<ArrayList<Hypothesis>> hypotesen = new ArrayList<ArrayList<Hypothesis>>(); @@ -114,22 +160,24 @@ * SLOT_title: PROPERTY {title,name,label} mitfuehren */ //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - Template template = new Template(condition, having, filter, selectTerm,OrderBy, limit); + Template template = new Template(condition, having, filter, selectTerm,OrderBy, limit,question); //TODO: Iterate over slots ArrayList<Hypothesis> list_of_hypothesis = new ArrayList<Hypothesis>(); for(Slot slot : bqt.getSlots()){ + //System.out.println("Slot: "+slot.toString()); if(slot.toString().contains("UNSPEC")){ String tmp= slot.toString().replace(" UNSPEC {", ""); tmp=tmp.replace("}",""); String[] tmp_array = tmp.split(":"); - Hypothesis tmp_hypothesis = new Hypothesis("?"+tmp_array[0], tmp_array[1], "UNSPEC", 0); + Hypothesis tmp_hypothesis = new Hypothesis("?"+tmp_array[0],tmp_array[1], tmp_array[1], "UNSPEC", 0); + //tmp_hypothesis.printAll(); list_of_hypothesis.add(tmp_hypothesis); } if(slot.toString().contains("PROPERTY")){ String tmp= slot.toString().replace(" PROPERTY {", ""); tmp=tmp.replace("}",""); String[] tmp_array = tmp.split(":"); - Hypothesis tmp_hypothesis = new Hypothesis("?"+tmp_array[0], tmp_array[1], "PROPERTY", 0); + Hypothesis tmp_hypothesis = new Hypothesis("?"+tmp_array[0], tmp_array[1],tmp_array[1], "PROPERTY", 0); list_of_hypothesis.add(tmp_hypothesis); } @@ -137,7 +185,7 @@ String tmp= slot.toString().replace(" RESOURCE {", ""); tmp=tmp.replace("}",""); String[] tmp_array = tmp.split(":"); - Hypothesis tmp_hypothesis = new Hypothesis("?"+tmp_array[0], tmp_array[1], "RESOURCE", 0); + Hypothesis tmp_hypothesis = new Hypothesis("?"+tmp_array[0],tmp_array[1], tmp_array[1], "RESOURCE", 0); list_of_hypothesis.add(tmp_hypothesis); } } @@ -145,10 +193,13 @@ //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% for(Hypothesis x : list_of_hypothesis){ - if(x.getType().contains("RESOURCE")){ + if(x.getType().contains("RESOURCE")|| x.getType().contains("UNSPEC") ){ ArrayList<String> result= new ArrayList<String>(); try { - result = utils_new.searchIndex(x.getUri(), 3, myindex); + /* here I have to check the hypothesis if I have an isA in my Condition, + * if so, only look up Yago and OntologyClass. + */ + result = Index_utils.searchIndex(x.getUri(), 3, myindex); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); @@ -159,11 +210,18 @@ //String variable, String uri, String type, float rank for(Hypothesis h : list_of_hypothesis){ if (h.getUri().equals(x.getUri())){ - Hypothesis new_h = new Hypothesis(h.getVariable(), s, h.getType(), 1); - new_list.add(new_h); + if(s!=null){ + Hypothesis new_h = new Hypothesis(h.getVariable(),h.getName(), s, h.getType(), 1); + new_list.add(new_h); + } + else{ + Hypothesis new_h = new Hypothesis(h.getVariable(),h.getName(), h.getUri(), h.getType(), 1); + new_list.add(new_h); + } + } else{ - Hypothesis new_h = new Hypothesis(h.getVariable(), h.getUri(), h.getType(), h.getRank()); + Hypothesis new_h = new Hypothesis(h.getVariable(),h.getName(), h.getUri(), h.getType(), h.getRank()); new_list.add(new_h); } } @@ -174,20 +232,37 @@ - //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + /* + * safe lookups for properties, so we dont have to access sql database every time + */ + HashMap<String,String> hm = new HashMap<String, String>(); + for(ArrayList<Hypothesis> x : final_list_set_hypothesis){ + + for(Hypothesis h : x){ - if(h.getType().contains("PROPERTY") || h.getType().contains("UNSPEC")){ + + //only if you have a Property or an Unspec, which still has no http:/dbpedia etc + if(h.getType().contains("PROPERTY") || (h.getType().contains("UNSPEC")&& !h.getUri().contains("http"))){ ArrayList<String> result= new ArrayList<String>(); try { - result = utils_new.searchIndex(h.getUri(), 1, myindex); + if(hm.containsKey(h.getUri().toLowerCase())){ + result.add(hm.get(h.getUri().toLowerCase())); + } + else{ + result = Index_utils.searchIndex(h.getUri(), 1, myindex); + if(!result.isEmpty())hm.put(h.getUri().toLowerCase(),result.get(0)); + } if(!result.isEmpty()){ h.setUri(result.get(0)); h.setRank(1); } else{ - String tmp = "http://dbpedia.org/ontology/"+h.getUri().toLowerCase(); + String tmp = "http://dbpedia.org/ontology/"+h.getUri().toLowerCase().replace(" ", "_"); + h.setUri(tmp); h.setRank(0); } @@ -200,8 +275,11 @@ } template.setHypothesen(final_list_set_hypothesis); + + + //TODO: Take Template like it is and change Condition - Template template_reverse_conditions = new Template(template.getCondition(), template.getHaving(), template.getFilter(), template.getSelectTerm(), template.getOrderBy(), template.getLimit()); + Template template_reverse_conditions = new Template(template.getCondition(), template.getHaving(), template.getFilter(), template.getSelectTerm(), template.getOrderBy(), template.getLimit(), template.getQuestion()); //= template; ArrayList<ArrayList<String>> condition_template_reverse_conditions = template_reverse_conditions.getCondition(); @@ -214,7 +292,15 @@ condition_reverse_new.add(new_list); } + long stop = System.currentTimeMillis(); + template_reverse_conditions.setOverallTime(stop-start); + template.setOverallTime(stop-start); + + template_reverse_conditions.setTime_Templator(stop_template-start); + template.setTime_Templator(stop_template-start); + template_reverse_conditions.setCondition(condition_reverse_new); + template_reverse_conditions.setHypothesen(template.getHypothesen()); resultArrayList.add(template); resultArrayList.add(template_reverse_conditions); Deleted: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/mySQLDictionary.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/mySQLDictionary.java 2012-03-05 12:12:37 UTC (rev 3604) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/mySQLDictionary.java 2012-03-07 14:50:29 UTC (rev 3605) @@ -1,594 +0,0 @@ -package org.dllearner.algorithm.tbsl.exploration.Sparql; - -import java.io.BufferedReader; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStreamReader; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.ArrayList; - -import org.dllearner.algorithm.tbsl.nlp.StanfordLemmatizer; - -public class mySQLDictionary { - private Connection conn; - StanfordLemmatizer lemma; - - public mySQLDictionary() throws ClassNotFoundException, SQLException { - // TODO Auto-generated constructor stub - Class.forName( "org.sqlite.JDBC" ); - conn = DriverManager.getConnection("jdbc:sqlite::memory:"); - createIndexPropertys(); - createIndexResource(); - createWordnetHelp(); - createIndexOntology(); - createIndexOntologyClass(); - createIndexofYago(); - lemma = new StanfordLemmatizer(); - - //optional!! - //createIndexWikipedia(); - - } - - /* - * Next, we want to select the persons living in a city that contains the pattern "tav" from the "Persons" table. - -We use the following SELECT statement: -SELECT * FROM Persons -WHERE City LIKE '%tav%' - */ - - public String getResourceURI(String string) throws SQLException{ - /* while(rs.next()) - {*/ - Statement stat = conn.createStatement(); - ResultSet rs; - try { - rs = stat.executeQuery("select uri from resource where name='"+string.toLowerCase()+"';"); - /*while(rs.next()){ - System.out.println("Next: "+rs.getString("uri")); - }*/ - return rs.getString("uri"); - } catch (Exception e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - return null; - } - - } - - public ArrayList<String> getResourceURILike(String string) throws SQLException{ - /* while(rs.next()) - {*/ - Statement stat = conn.createStatement(); - ResultSet rs; - ArrayList<String> result= new ArrayList<String>(); - try { - rs = stat.executeQuery("select uri from resource where name like'"+string.toLowerCase()+"%';"); - while(rs.next()){ - System.out.println("Next: "+rs.getString("uri")); - result.add(rs.getString("uri")); - } - return result; - } catch (Exception e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - return null; - } - - } - - public ArrayList<String> getYagoURILike(String string) throws SQLException{ - /* while(rs.next()) - {*/ - Statement stat = conn.createStatement(); - ResultSet rs; - ArrayList<String> result= new ArrayList<String>(); - try { - rs = stat.executeQuery("select uri from yago where name like'"+string.toLowerCase()+"%';"); - while(rs.next()){ - System.out.println("Next: "+rs.getString("uri")); - result.add(rs.getString("uri")); - } - return result; - } catch (Exception e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - return null; - } - - } - - - public String getYagoURI(String string) throws SQLException{ - /* while(rs.next()) - {*/ - Statement stat = conn.createStatement(); - ResultSet rs; - try { - rs = stat.executeQuery("select uri from yago where name='"+string.toLowerCase()+"';"); - return rs.getString("uri"); - } catch (Exception e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - return null; - } - - } - - - public String getPropertyURI(String string) throws SQLException{ - Statement stat = conn.createStatement(); - ResultSet rs; - try { - rs = stat.executeQuery("select uri from property where name='"+string.toLowerCase()+"';"); - return rs.getString("uri"); - } catch (Exception e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - return null; - } - - - } - - public String getontologyURI(String string) throws SQLException{ - Statement stat = conn.createStatement(); - ResultSet rs; - try { - rs = stat.executeQuery("select uri from ontology where name='"+string.toLowerCase()+"';"); - return rs.getString("uri"); - } catch (Exception e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - return null; - } - - - } - - public String getontologyClassURI(String string) throws SQLException{ - Statement stat = conn.createStatement(); - ResultSet rs; - try { - rs = stat.executeQuery("select uri from ontologyClass where name='"+string.toLowerCase()+"'... [truncated message content] |
From: <lor...@us...> - 2012-03-05 12:12:47
|
Revision: 3604 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3604&view=rev Author: lorenz_b Date: 2012-03-05 12:12:37 +0000 (Mon, 05 Mar 2012) Log Message: ----------- Updated JENA libs. Modified Paths: -------------- trunk/components-core/pom.xml trunk/components-core/src/main/java/org/dllearner/kb/sparql/ExtendedQueryEngineHTTP.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/HttpQuery.java trunk/pom.xml Modified: trunk/components-core/pom.xml =================================================================== --- trunk/components-core/pom.xml 2012-03-01 09:32:01 UTC (rev 3603) +++ trunk/components-core/pom.xml 2012-03-05 12:12:37 UTC (rev 3604) @@ -93,12 +93,15 @@ <dependency> <groupId>org.aksw.commons</groupId> <artifactId>sparql</artifactId> - <version>0.2-SNAPSHOT</version> <exclusions> <exclusion> <artifactId>slf4j-api</artifactId> <groupId>org.slf4j</groupId> </exclusion> + <exclusion> + <artifactId>arq</artifactId> + <groupId>com.hp.hpl.jena</groupId> + </exclusion> </exclusions> </dependency> @@ -125,18 +128,10 @@ <artifactId>log4j</artifactId> </dependency> - <!-- Available via central, we use the latest with minor mods to DL Learner - source (IE Dig related code) --> <dependency> - <groupId>com.hp.hpl.jena</groupId> - <artifactId>jena</artifactId> - <exclusions> - <exclusion> - <artifactId>slf4j-api</artifactId> - <groupId>org.slf4j</groupId> - </exclusion> - </exclusions> - </dependency> + <groupId>org.apache.jena</groupId> + <artifactId>jena-arq</artifactId> + </dependency> <!--JSON is in Central --> <dependency> @@ -180,17 +175,10 @@ </dependency> - <!--JENA ARQ is in central - we use the latest --> <dependency> - <groupId>com.hp.hpl.jena</groupId> - <artifactId>arq</artifactId> - <exclusions> - <exclusion> - <artifactId>slf4j-api</artifactId> - <groupId>org.slf4j</groupId> - </exclusion> - </exclusions> - </dependency> + <groupId>org.apache.jena</groupId> + <artifactId>jena-core</artifactId> + </dependency> <dependency> <groupId>junit</groupId> @@ -230,7 +218,7 @@ <dependency> <groupId>commons-codec</groupId> <artifactId>commons-codec</artifactId> - <version>20041127.091804</version> + <version>1.5</version> </dependency> <dependency> <groupId>org.springframework</groupId> @@ -245,5 +233,10 @@ <artifactId>jwnl</artifactId> <version>1.4.1.RC2</version> </dependency> + <dependency> + <groupId>org.apache.lucene</groupId> + <artifactId>lucene-core</artifactId> + <version>3.5.0</version> + </dependency> </dependencies> </project> Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/ExtendedQueryEngineHTTP.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/ExtendedQueryEngineHTTP.java 2012-03-01 09:32:01 UTC (rev 3603) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/ExtendedQueryEngineHTTP.java 2012-03-05 12:12:37 UTC (rev 3604) @@ -19,24 +19,32 @@ package org.dllearner.kb.sparql; -import com.hp.hpl.jena.query.*; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.hp.hpl.jena.query.ARQ; +import com.hp.hpl.jena.query.Dataset; +import com.hp.hpl.jena.query.Query; +import com.hp.hpl.jena.query.QueryExecution; +import com.hp.hpl.jena.query.QueryFactory; +import com.hp.hpl.jena.query.QuerySolution; +import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.query.ResultSetFactory; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.sparql.engine.http.HttpParams; import com.hp.hpl.jena.sparql.engine.http.Params; import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; +import com.hp.hpl.jena.sparql.graph.GraphFactory; import com.hp.hpl.jena.sparql.resultset.XMLInput; import com.hp.hpl.jena.sparql.util.Context; -import com.hp.hpl.jena.sparql.util.graph.GraphFactory; import com.hp.hpl.jena.util.FileManager; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.concurrent.TimeUnit; - /** * Created by Claus Stadler * Date: Oct 25, 2010 @@ -364,6 +372,11 @@ public Dataset getDataset() { return null; } + + @Override + public Query getQuery() { + return QueryFactory.create(queryString); + } } /* Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/HttpQuery.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/HttpQuery.java 2012-03-01 09:32:01 UTC (rev 3603) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/HttpQuery.java 2012-03-05 12:12:37 UTC (rev 3604) @@ -37,12 +37,11 @@ import java.util.List; import java.util.Map; -import org.openjena.atlas.lib.Base64; +import org.apache.commons.codec.binary.Base64; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.hp.hpl.jena.query.ARQ; -import com.hp.hpl.jena.sdb.util.Pair; import com.hp.hpl.jena.shared.JenaException; import com.hp.hpl.jena.sparql.ARQInternalErrorException; import com.hp.hpl.jena.sparql.engine.http.HttpParams; @@ -293,7 +292,7 @@ // Build string, get as UTF-8, bytes, translate to base 64. StringBuffer x = new StringBuffer() ; byte b[] = x.append(user).append(":").append(password).toString().getBytes("UTF-8") ; - String y = Base64.encodeBytes(b) ; + String y = Base64.encodeBase64String(b); httpConnection.setRequestProperty("Authorization", "Basic "+y) ; // Overwrite any password details we copied. // Still leaves the copy in the HTTP connection. But this only basic auth. Modified: trunk/pom.xml =================================================================== --- trunk/pom.xml 2012-03-01 09:32:01 UTC (rev 3603) +++ trunk/pom.xml 2012-03-05 12:12:37 UTC (rev 3604) @@ -162,9 +162,9 @@ <!--Available via central, we use the latest with minor mods to DL Learner source (IE Dig related code) --> <dependency> - <groupId>com.hp.hpl.jena</groupId> - <artifactId>jena</artifactId> - <version>2.6.4</version> + <groupId>org.apache.jena</groupId> + <artifactId>jena-core</artifactId> + <version>2.7.0-incubating</version> </dependency> <!--SwingX is in central --> <dependency> @@ -221,11 +221,10 @@ </dependency> - <!--JENA ARQ is in central - we use the latest --> <dependency> - <groupId>com.hp.hpl.jena</groupId> - <artifactId>arq</artifactId> - <version>2.8.8</version> + <groupId>org.apache.jena</groupId> + <artifactId>jena-arq</artifactId> + <version>2.9.0-incubating</version> </dependency> <!--Junits --> @@ -302,7 +301,7 @@ <dependency> <groupId>org.aksw.commons</groupId> <artifactId>sparql</artifactId> - <version>${aksw.version}</version> + <version>0.2-SNAPSHOT</version> </dependency> <dependency> <groupId>org.aksw.commons</groupId> @@ -542,6 +541,13 @@ <name>Semanticscience projects</name> <url>http://s1.semanticscience.org:8080/nexus/content/groups/public/</url> </repository> + <repository> + <id>apache-repo-releases</id> + <url>https://repository.apache.org/content/repositories/releases/</url> + <releases> + <enabled>true</enabled> + </releases> +</repository> </repositories> <pluginRepositories> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-03-01 09:32:12
|
Revision: 3603 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3603&view=rev Author: lorenz_b Date: 2012-03-01 09:32:01 +0000 (Thu, 01 Mar 2012) Log Message: ----------- Added some optimization to use local debugging if all justifications for each entity are found, so no reasoner call would be needed to check if there are still some unsatisfiable entities. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-03-01 02:50:14 UTC (rev 3602) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-03-01 09:32:01 UTC (rev 3603) @@ -106,6 +106,7 @@ private Set<OWLTransitiveObjectPropertyAxiom> removedTransitiveAxioms; private Set<OWLObjectProperty> unsatObjectProperties; + private Set<OWLClass> unsatClasses; //whether to debug classes and properties in parallel private boolean computeParallel = false; @@ -196,7 +197,7 @@ logger.info("Computing root/derived unsatisfiable classes..."); long startTime = System.currentTimeMillis(); StructureBasedRootClassFinder rootFinder = new StructureBasedRootClassFinder(reasoner, this); - Set<OWLClass> unsatClasses = rootFinder.getRootUnsatisfiableClasses(); + unsatClasses = rootFinder.getRootUnsatisfiableClasses(); Set<OWLClass> derivedUnsatClasses = rootFinder.getDerivedUnsatisfiableClasses(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); @@ -347,8 +348,9 @@ System.gc(); } } - save("log/" + fileName + "_coherent.owl"); + logger.info("Finished. \n Coherent ontology contains " + ontology.getLogicalAxiomCount() + " logical axioms." + + "Removed axioms: " + diffOntology.getLogicalAxiomCount()); return ontology; } @@ -361,7 +363,7 @@ //compute the unsatisfiable classes logger.info("Computing unsatisfiable classes..."); long startTime = System.currentTimeMillis(); - Set<OWLClass> unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); + unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); logger.info("Detected " + unsatClasses.size() + " unsatisfiable classes."); @@ -384,7 +386,7 @@ cnt += unsatObjectProperties.size(); } - while(cnt > 0){ + while(!runLocalDebugging() && cnt > 0){ //we remove the most appropriate axiom from the ontology removeAppropriateAxiom(); @@ -399,8 +401,8 @@ logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); //save - if((!computeParallel && (cnt-unsatClasses.size()>= 5)) - || (computeParallel && (cnt-unsatClasses.size()+unsatObjectProperties.size())>=5)){ + if((!computeParallel && (cnt-unsatClasses.size()>= 10)) + || (computeParallel && (cnt-unsatClasses.size()+unsatObjectProperties.size())>=10)){ cnt = unsatClasses.size(); save("log/" + fileName + "_" + unsatClasses.size() + "cls" + unsatObjectProperties.size() + "prop.owl"); if(computeParallel){ @@ -425,6 +427,8 @@ } entity2Explanations.clear(); entity2ModuleMap.clear(); + entitiesWithLessExplanations.clear(); + entity2ExpGen.clear(); save("log/" + fileName + "_" + unsatClasses.size() + "cls" + unsatObjectProperties.size() + "prop.owl"); @@ -434,11 +438,11 @@ int unsatPropCnt = unsatObjectProperties.size(); logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); - logger.info("Recomputing explanations..."); + logger.info("Computing explanations..."); startTime = System.currentTimeMillis(); computeExplanations(unsatObjectProperties); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); - while(unsatPropCnt > 0){ + while(!runLocalDebugging() && unsatPropCnt > 0){ //we remove the most appropriate axiom from the ontology removeAppropriateAxiom(); @@ -447,7 +451,7 @@ logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); //save - if((unsatPropCnt - unsatObjectProperties.size()) >= 5){ + if((unsatPropCnt - unsatObjectProperties.size()) >= 10){ save("log/" + fileName + "_" + unsatClasses.size() + "cls" + unsatPropCnt + "prop.owl"); unsatPropCnt = unsatObjectProperties.size(); } @@ -464,12 +468,52 @@ System.gc(); } } - save("log/" + fileName + "_coherent.owl"); + logger.info("Finished. \n Coherent ontology contains " + ontology.getLogicalAxiomCount() + " logical axioms." + + "Removed axioms: " + diffOntology.getLogicalAxiomCount()); return ontology; } + /* + * check here if all explanations are found, i.e. for each entity the number of justifications is lower than the limit which was set + */ + private boolean allExplanationsFound(){ + boolean allExplanationsFound = false; + if(computeParallel){ + allExplanationsFound = entitiesWithLessExplanations.size() == (unsatClasses.size() + unsatObjectProperties.size()); + } else { + allExplanationsFound = entitiesWithLessExplanations.size() == unsatClasses.size(); + } + return allExplanationsFound; + } + + private boolean runLocalDebugging(){ + if(allExplanationsFound()){ + //add all explanations into one set + Set<Set<OWLAxiom>> explanations = new HashSet<Set<OWLAxiom>>(); + for(Entry<OWLEntity, Set<Set<OWLAxiom>>> e: entity2Explanations.entrySet()){ + explanations.addAll(e.getValue()); + } + //get the frequency for each axiom + Map<OWLAxiom, Integer> axiom2CountMap = getAxiomFrequency(entity2Explanations); + //get a sorted list of entries with the highest axiom count first + List<Entry<OWLAxiom, Integer>> candidates = sort(axiom2CountMap); + //remove axioms until no further explanation exists + while(!explanations.isEmpty()){ + removeAppropriateAxiomLocal(explanations, candidates); + } + if(computeParallel){ + unsatClasses.clear(); + unsatObjectProperties.clear(); + } else { + unsatClasses.clear(); + } + return true; + } + return false; + } + private OWLOntology computeCoherentOntology2(OWLOntology ontology) { //compute the unsatisfiable classes logger.info("Computing unsatisfiable classes..."); @@ -562,6 +606,40 @@ } } + private void removeAppropriateAxiomLocal(Set<Set<OWLAxiom>> explanations, List<Entry<OWLAxiom, Integer>> candidates){ + logger.info("Searching for appropriate axiom to remove..."); + logger.info("Candidates: " + candidates.size()); + if(candidates.size() >= 2){ + logger.info("First: " + candidates.get(0) + "(" + getConfidence(candidates.get(0).getKey()) + ")"); + logger.info("Second: " + candidates.get(1) + "(" + getConfidence(candidates.get(1).getKey()) + ")"); + } + + //we remove the most frequent axiom from the ontology which is not contained in the original DBpedia ontology + for(Iterator<Entry<OWLAxiom, Integer>> iter = candidates.iterator(); iter.hasNext();){ + OWLAxiom axiom = iter.next().getKey(); + if(!dbpediaOntology.containsAxiomIgnoreAnnotations(axiom)){ + iter.remove(); + logger.info("Removing axiom " + axiom + "."); + manager.removeAxiom(incoherentOntology, axiom); + //remove the axiom also from the loaded ontology + OWLAxiom originalAnnotatedAxiom = ontology.getAxiomsIgnoreAnnotations(axiom).iterator().next(); + ontology.getOWLOntologyManager().removeAxiom(ontology, originalAnnotatedAxiom); + //add the removed annotated axiom to the diff ontology + manager.addAxiom(diffOntology, originalAnnotatedAxiom); + //remove each explanation which contains the axiom + for (Iterator<Set<OWLAxiom>> iterator = explanations.iterator(); iterator.hasNext();) { + Set<OWLAxiom> explanation = iterator.next(); + if(explanation.contains(axiom)){ + iterator.remove(); + } + } + return; + } else { + iter.remove(); + } + } + } + private void save(String fileName){ logger.info("Writing to disk..."); long startTime = System.currentTimeMillis(); @@ -951,7 +1029,6 @@ OWLOntology coherentOntology = extractor.getCoherentOntology(schema, preferRoots); - System.out.println("Coherent ontology contains " + coherentOntology.getLogicalAxiomCount() + " logical axioms."); } class HermiTReasonerFactory implements OWLReasonerFactory{ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-03-01 02:50:21
|
Revision: 3602 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3602&view=rev Author: lorenz_b Date: 2012-03-01 02:50:14 +0000 (Thu, 01 Mar 2012) Log Message: ----------- Added workaround for problem in OWLAPI with user-defined datatypes. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-29 08:08:16 UTC (rev 3601) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-03-01 02:50:14 UTC (rev 3602) @@ -2,6 +2,8 @@ import java.io.BufferedInputStream; import java.io.BufferedOutputStream; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; @@ -47,7 +49,6 @@ import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLEntity; import org.semanticweb.owlapi.model.OWLLiteral; -import org.semanticweb.owlapi.model.OWLLogicalAxiom; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; @@ -64,13 +65,18 @@ import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; import uk.ac.manchester.cs.owlapi.modularity.ModuleType; -import com.clarkparsia.modularity.IncrementalClassifier; import com.clarkparsia.modularity.ModularityUtils; import com.clarkparsia.owlapi.explanation.BlackBoxExplanation; import com.clarkparsia.owlapi.explanation.HSTExplanationGenerator; import com.clarkparsia.owlapi.explanation.PelletExplanation; import com.clarkparsia.pellet.owlapiv3.PelletReasoner; import com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; +import com.hp.hpl.jena.rdf.model.RDFNode; +import com.hp.hpl.jena.rdf.model.Statement; +import com.hp.hpl.jena.rdf.model.StmtIterator; +import com.hp.hpl.jena.vocabulary.RDFS; public class JustificationBasedCoherentOntologyExtractor implements CoherentOntologyExtractor{ @@ -108,7 +114,7 @@ private OWLOntology dbpediaOntology; - private String fileName = "dbpedia"; + private String fileName; private String diffFileName = "diff.owl"; public JustificationBasedCoherentOntologyExtractor() { @@ -117,7 +123,7 @@ } catch (NoSuchAlgorithmException e) { e.printStackTrace(); } - dbpediaOntology = loadDBpediaOntology(); + dbpediaOntology = loadDBpediaOntologyOWLDL(); } static {PelletExplanation.setup();} @@ -147,7 +153,8 @@ e1.printStackTrace(); } - //only for debugging + /*only to avoid Pellet warnings during the process and this axioms are only removed from the ontology, + which is used during the debugging and not from the ontology which is always saved and returned finally*/ removedTransitiveAxioms = incoherentOntology.getAxioms(AxiomType.TRANSITIVE_OBJECT_PROPERTY); incoherentOntology.getOWLOntologyManager().removeAxioms(incoherentOntology, removedTransitiveAxioms); @@ -181,12 +188,6 @@ } private OWLOntology computeCoherentOntologyRootBased(OWLOntology ontology) { - -// startTime = System.currentTimeMillis(); -// hermitReasoner = new Reasoner(incoherentOntology); -// hermitReasoner.classifyClasses(); -// logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); - OWLOntologyManager man = incoherentOntology.getOWLOntologyManager(); factory = man.getOWLDataFactory(); // man.addOntologyChangeListener(reasoner); @@ -203,9 +204,9 @@ int derivedCnt = derivedUnsatClasses.size(); //if no roots are found we use all unsat classes - if(rootCnt == 0){ - unsatClasses = derivedUnsatClasses; - } + if(rootCnt == 0){ + unsatClasses = derivedUnsatClasses; + } // Set<OWLClass> unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); int cnt = rootCnt + derivedCnt; @@ -333,6 +334,10 @@ unsatPropCnt = unsatObjectProperties.size(); } + if(unsatObjectProperties.isEmpty()){ + break; + } + //recompute explanations if necessary logger.info("Recomputing explanations..."); startTime = System.currentTimeMillis(); @@ -349,39 +354,42 @@ } private OWLOntology computeCoherentOntology(OWLOntology ontology) { + OWLOntologyManager man = incoherentOntology.getOWLOntologyManager(); + factory = man.getOWLDataFactory(); +// man.addOntologyChangeListener(reasoner); + //compute the unsatisfiable classes logger.info("Computing unsatisfiable classes..."); long startTime = System.currentTimeMillis(); + Set<OWLClass> unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); - Set<OWLClass> unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); - int cnt = unsatClasses.size(); - logger.info("Detected " + cnt + " unsatisfiable classes."); + logger.info("Detected " + unsatClasses.size() + " unsatisfiable classes."); //if the ontology is not incoherent we return it here if(unsatClasses.isEmpty()){ return incoherentOntology; } - //compute the logical modules for each unsatisfiable class - logger.info("Computing module for each unsatisfiable class..."); - startTime = System.currentTimeMillis(); - entity2ModuleMap = extractModules(unsatClasses); - logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); //compute initial explanations for each unsatisfiable class logger.info("Computing initial explanations..."); startTime = System.currentTimeMillis(); computeExplanations(unsatClasses); -// entity2Explanations.putAll(getInitialExplanations(unsatObjectProperties)); + if(computeParallel){ + computeExplanations(unsatObjectProperties); + } logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); - while(!unsatClasses.isEmpty()){// && !unsatObjectProperties.isEmpty()){ + int cnt = unsatClasses.size(); + if(computeParallel){ + cnt += unsatObjectProperties.size(); + } + + while(cnt > 0){ //we remove the most appropriate axiom from the ontology removeAppropriateAxiom(); - //recompute the unsatisfiable classes - logger.info("Reclassifying..."); + logger.info("Computing unsatisfiable classes..."); startTime = System.currentTimeMillis(); - reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); logger.info("Remaining unsatisfiable classes: " + unsatClasses.size()); @@ -391,30 +399,75 @@ logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); //save - if(cnt - unsatClasses.size() >= 10){ + if((!computeParallel && (cnt-unsatClasses.size()>= 5)) + || (computeParallel && (cnt-unsatClasses.size()+unsatObjectProperties.size())>=5)){ cnt = unsatClasses.size(); - save("log/" + fileName + "_" + cnt + "cls" + unsatObjectProperties.size() + "prop.owl"); + save("log/" + fileName + "_" + unsatClasses.size() + "cls" + unsatObjectProperties.size() + "prop.owl"); + if(computeParallel){ + cnt += unsatObjectProperties.size(); + } } + if(unsatClasses.isEmpty() && (!computeParallel || (computeParallel && unsatObjectProperties.isEmpty()))){ + cnt = 0; + break; + } //recompute explanations if necessary logger.info("Recomputing explanations..."); startTime = System.currentTimeMillis(); - refillExplanations(unsatClasses, entity2Explanations); - //refillExplanations(unsatObjectProperties, entity2Explanations); + computeExplanations(unsatClasses); + if(computeParallel){ + computeExplanations(unsatObjectProperties); + } logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); System.gc(); } - try { - incoherentOntology.getOWLOntologyManager().saveOntology(getOntologyWithAnnotations(incoherentOntology), new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/dbpedia_coherent.owl"))); - } catch (OWLOntologyStorageException e) { - e.printStackTrace(); - } catch (FileNotFoundException e) { - e.printStackTrace(); + entity2Explanations.clear(); + entity2ModuleMap.clear(); + + save("log/" + fileName + "_" + unsatClasses.size() + "cls" + unsatObjectProperties.size() + "prop.owl"); + + + if(!computeParallel){ + unsatObjectProperties = getUnsatisfiableObjectProperties(); + int unsatPropCnt = unsatObjectProperties.size(); + logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); + + logger.info("Recomputing explanations..."); + startTime = System.currentTimeMillis(); + computeExplanations(unsatObjectProperties); + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); + while(unsatPropCnt > 0){ + //we remove the most appropriate axiom from the ontology + removeAppropriateAxiom(); + + //recompute unsatisfiable object properties + unsatObjectProperties = getUnsatisfiableObjectProperties(); + logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); + + //save + if((unsatPropCnt - unsatObjectProperties.size()) >= 5){ + save("log/" + fileName + "_" + unsatClasses.size() + "cls" + unsatPropCnt + "prop.owl"); + unsatPropCnt = unsatObjectProperties.size(); + } + if(unsatObjectProperties.isEmpty()){ + break; + } + + //recompute explanations if necessary + logger.info("Recomputing explanations..."); + startTime = System.currentTimeMillis(); + computeExplanations(unsatObjectProperties); + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); + + System.gc(); + } } - System.out.println(incoherentOntology.getLogicalAxiomCount()); - return getOntologyWithAnnotations(incoherentOntology); + save("log/" + fileName + "_coherent.owl"); + + return ontology; } private OWLOntology computeCoherentOntology2(OWLOntology ontology) { @@ -473,16 +526,9 @@ logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); } - try { - incoherentOntology.getOWLOntologyManager().saveOntology(getOntologyWithAnnotations(incoherentOntology), new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/dbpedia_coherent.owl"))); - } catch (OWLOntologyStorageException e) { - e.printStackTrace(); - } catch (FileNotFoundException e) { - e.printStackTrace(); - } - System.out.println(incoherentOntology.getLogicalAxiomCount()); + save("log/" + fileName + "_coherent.owl"); - return getOntologyWithAnnotations(incoherentOntology); + return ontology; } private void removeAppropriateAxiom(){ @@ -491,6 +537,12 @@ Map<OWLAxiom, Integer> axiom2CountMap = getAxiomFrequency(entity2Explanations); //get a sorted list of entries with the highest axiom count first List<Entry<OWLAxiom, Integer>> sortedEntries = sort(axiom2CountMap); + logger.info("Candidates: " + sortedEntries.size()); + if(sortedEntries.size() >= 2){ + logger.info("First: " + sortedEntries.get(0) + "(" + getConfidence(sortedEntries.get(0).getKey()) + ")"); + logger.info("Second: " + sortedEntries.get(1) + "(" + getConfidence(sortedEntries.get(1).getKey()) + ")"); + } + //we remove the most frequent axiom from the ontology which is not contained in the original DBpedia ontology for(Entry<OWLAxiom, Integer> e : sortedEntries){ OWLAxiom axiom = e.getKey(); @@ -511,6 +563,8 @@ } private void save(String fileName){ + logger.info("Writing to disk..."); + long startTime = System.currentTimeMillis(); try { ontology.getOWLOntologyManager().saveOntology(ontology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(fileName))); diffOntology.getOWLOntologyManager().saveOntology(diffOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/" + diffFileName))); @@ -519,6 +573,7 @@ } catch (FileNotFoundException e) { e.printStackTrace(); } + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); } private Set<OWLObjectProperty> getUnsatisfiableObjectProperties(){ @@ -526,13 +581,20 @@ long startTime = System.currentTimeMillis(); SortedSet<OWLObjectProperty> properties = new TreeSet<OWLObjectProperty>(); OWLDataFactory f = OWLManager.createOWLOntologyManager().getOWLDataFactory(); - for(OWLObjectProperty p : reasoner.getRootOntology().getObjectPropertiesInSignature()){ + for(OWLObjectProperty p : incoherentOntology.getObjectPropertiesInSignature()){ // boolean satisfiable = reasoner.isSatisfiable(f.getOWLObjectExactCardinality(1, p)); boolean satisfiable = reasoner.isSatisfiable(f.getOWLObjectSomeValuesFrom(p, factory.getOWLThing())); if(!satisfiable){ properties.add(p); } } + /* + * this method down't seem to work TODO ask Pellet developers why + for(OWLObjectPropertyExpression p : reasoner.getEquivalentObjectProperties(factory.getOWLBottomObjectProperty()).getEntitiesMinusBottom()){ + if(!p.isAnonymous()){ + properties.add(p.asOWLObjectProperty()); + } + }*/ logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); return properties; } @@ -556,17 +618,6 @@ } } - private void refillExplanations(Set<? extends OWLEntity> unsatEntities, Map<OWLEntity, Set<Set<OWLAxiom>>> entity2Explanations){ - for(OWLEntity unsatClass : unsatEntities){ - Set<Set<OWLAxiom>> precomputedExplanations = entity2Explanations.get(unsatClass); - if(precomputedExplanations == null || precomputedExplanations.size() < numberOfJustifications){ - Set<Set<OWLAxiom>> newExplanations = computeExplanations(unsatClass, numberOfJustifications); - logger.info(unsatClass + ": " + newExplanations.size()); - entity2Explanations.put(unsatClass, newExplanations); - } - } - } - private Map<OWLAxiom, Integer> getAxiomFrequency(Map<OWLEntity, Set<Set<OWLAxiom>>> entity2Explanations){ Map<OWLAxiom, Integer> axiom2CountMap = new HashMap<OWLAxiom, Integer>(); @@ -632,18 +683,6 @@ return null; } - private OWLOntology getOntologyWithAnnotations(OWLOntology ontologyWithOutAnnotations){ - logger.info("BEFORE: "+ ontology.getSubClassAxiomsForSubClass(factory.getOWLClass(IRI.create("http://dbpedia.org/ontology/Award")))); - OWLOntologyManager man = ontology.getOWLOntologyManager(); - for (Iterator<OWLLogicalAxiom> iterator = ontology.getLogicalAxioms().iterator(); iterator.hasNext();) { - OWLLogicalAxiom axiom = iterator.next(); - if(!ontologyWithOutAnnotations.containsAxiomIgnoreAnnotations(axiom)){ - man.removeAxiom(ontology, axiom); - } - }logger.info("AFTER: "+ ontology.getSubClassAxiomsForSubClass(factory.getOWLClass(IRI.create("http://dbpedia.org/ontology/Award")))); - return ontology; - } - private Set<Set<OWLAxiom>> computeExplanations(OWLEntity unsatEntity){ return computeExplanations(unsatEntity, numberOfJustifications); } @@ -675,7 +714,7 @@ private double getConfidence(OWLAxiom axiom){ Set<OWLAxiom> axiomsWithAnnotations = ontology.getAxiomsIgnoreAnnotations(axiom); - if(axiomsWithAnnotations.isEmpty()){ + if(axiomsWithAnnotations.isEmpty()){//this should never happen logger.info("Axiom with annotations not found: " + axiom); return 2; } @@ -768,6 +807,62 @@ return ontology; } + /** + * First try to clean up ontology with JENA as original ontology is in OWL Full because of some user-defined datatypes. + * We could either (1) return the rdfs:range triples of the properties with user-defined datatypes or (2) remove all triples about the property. + * @return + */ + private OWLOntology loadDBpediaOntologyOWLDL() { + long startTime = System.currentTimeMillis(); + logger.info("Loading DBpedia reference ontology..."); + OWLOntology ontology = null; + try { + URL dbpediaURL = new URL("http://downloads.dbpedia.org/3.7/dbpedia_3.7.owl.bz2"); + InputStream is = dbpediaURL.openStream(); + is = new CompressorStreamFactory().createCompressorInputStream("bzip2", is); + Model model = ModelFactory.createDefaultModel(); + model.read(is, null); + //get all subjects where URI of RDFS:range starts with http://dbpedia.org/datatype/ + for(StmtIterator iter = model.listStatements(null, RDFS.range, (RDFNode)null); iter.hasNext();){ + Statement st = iter.next(); + if(st.getObject().asResource().getURI().startsWith("http://dbpedia.org/datatype/")){ + iter.remove(); + } + //solution 2 +// for(StmtIterator iter2 = model.listStatements(iter.next().getSubject(), null, (RDFNode)null); iter2.hasNext();){ +// iter2.remove(); +// } + } + + + + return convert(model); + + } catch (MalformedURLException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } catch (CompressorException e) { + e.printStackTrace(); + } + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); + return ontology; + } + + private OWLOntology convert(Model model) { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + model.write(baos, "N-TRIPLE"); + ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + OWLOntology retOnt = null; + try { + retOnt = manager.loadOntologyFromOntologyDocument(bais); + } catch (OWLOntologyCreationException e) { + + } + return retOnt; + } + private Map<OWLEntity, OWLOntology> extractModules(Set<? extends OWLEntity> entities){ logger.info("Computing modules..."); long startTime = System.currentTimeMillis(); @@ -843,8 +938,6 @@ is = new CompressorStreamFactory().createCompressorInputStream("bzip2", is); } OWLOntology schema = man.loadOntologyFromOntologyDocument(is); - Set<OWLTransitiveObjectPropertyAxiom> removedAxioms = schema.getAxioms(AxiomType.TRANSITIVE_OBJECT_PROPERTY); - man.removeAxioms(schema, removedAxioms); System.out.println("...done."); JustificationBasedCoherentOntologyExtractor extractor = new JustificationBasedCoherentOntologyExtractor(); @@ -854,10 +947,9 @@ if(filename.indexOf('/') >= 0){ filename = filename.substring( filename.lastIndexOf('/')+1, filename.length() ); } - extractor.setFileName(filename); + OWLOntology coherentOntology = extractor.getCoherentOntology(schema, preferRoots); - man.addAxioms(coherentOntology, removedAxioms); System.out.println("Coherent ontology contains " + coherentOntology.getLogicalAxiomCount() + " logical axioms."); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-29 08:08:26
|
Revision: 3601 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3601&view=rev Author: lorenz_b Date: 2012-02-29 08:08:16 +0000 (Wed, 29 Feb 2012) Log Message: ----------- Added optimizations. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-29 08:07:17 UTC (rev 3600) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-29 08:08:16 UTC (rev 3601) @@ -129,7 +129,7 @@ @Override public OWLOntology getCoherentOntology(OWLOntology ontology, boolean preferRoots){ -// ontology.getOWLOntologyManager().addAxioms(ontology, dbpediaOntology.getLogicalAxioms()); + ontology.getOWLOntologyManager().addAxioms(ontology, dbpediaOntology.getLogicalAxioms()); this.ontology = ontology; this.incoherentOntology = getOntologyWithoutAnnotations(ontology); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-29 08:07:28
|
Revision: 3600 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3600&view=rev Author: lorenz_b Date: 2012-02-29 08:07:17 +0000 (Wed, 29 Feb 2012) Log Message: ----------- Added optimizations. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-29 08:00:04 UTC (rev 3599) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-29 08:07:17 UTC (rev 3600) @@ -78,8 +78,8 @@ private static final String DIFF_ONTOLOGY_NAME = "diff.owl"; private int numberOfJustifications = 10; - private PelletReasoner propReasoner; - private IncrementalClassifier reasoner; + private PelletReasoner baseReasoner; + private PelletReasoner reasoner;//IncrementalClassifier reasoner; private Reasoner hermitReasoner; private OWLOntology incoherentOntology; @@ -155,8 +155,8 @@ factory = manager.getOWLDataFactory(); long startTime = System.currentTimeMillis(); - propReasoner = PelletReasonerFactory.getInstance().createNonBufferingReasoner(incoherentOntology); - reasoner = new IncrementalClassifier(propReasoner); + baseReasoner = PelletReasonerFactory.getInstance().createNonBufferingReasoner(incoherentOntology); + reasoner = baseReasoner;//new IncrementalClassifier(baseReasoner); reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); @@ -246,7 +246,7 @@ //recompute the unsatisfiable classes logger.info("Reclassifying..."); startTime = System.currentTimeMillis(); - reasoner.classify(); + reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); // hermitReasoner.classifyClasses(); //Set<OWLClass> unsatClasses2 = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); @@ -381,7 +381,7 @@ //recompute the unsatisfiable classes logger.info("Reclassifying..."); startTime = System.currentTimeMillis(); - reasoner.classify(); + reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); logger.info("Remaining unsatisfiable classes: " + unsatClasses.size()); @@ -434,7 +434,7 @@ //compute initial explanations for each unsatisfiable class logger.info("Computing initial explanations..."); startTime = System.currentTimeMillis(); - PelletExplanation expGen = new PelletExplanation(reasoner.getReasoner()); + PelletExplanation expGen = new PelletExplanation(baseReasoner); Set<Set<OWLAxiom>> explanations; for(OWLClass unsatCls : unsatClasses){ explanations = expGen.getUnsatisfiableExplanations(unsatCls, numberOfJustifications); @@ -450,7 +450,7 @@ //recompute the unsatisfiable classes logger.info("Reclassifying..."); startTime = System.currentTimeMillis(); - reasoner.classify(); + reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); logger.info("Remaining unsatisfiable classes: " + unsatClasses.size()); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-29 08:00:11
|
Revision: 3599 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3599&view=rev Author: lorenz_b Date: 2012-02-29 08:00:04 +0000 (Wed, 29 Feb 2012) Log Message: ----------- Added optimizations. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-28 21:44:59 UTC (rev 3598) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-29 08:00:04 UTC (rev 3599) @@ -27,8 +27,6 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import openlink.util.MD5; - import org.apache.commons.compress.compressors.CompressorException; import org.apache.commons.compress.compressors.CompressorStreamFactory; import org.apache.log4j.ConsoleAppender; @@ -58,6 +56,7 @@ import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom; import org.semanticweb.owlapi.model.RemoveAxiom; import org.semanticweb.owlapi.reasoner.IllegalConfigurationException; +import org.semanticweb.owlapi.reasoner.InferenceType; import org.semanticweb.owlapi.reasoner.OWLReasoner; import org.semanticweb.owlapi.reasoner.OWLReasonerConfiguration; import org.semanticweb.owlapi.reasoner.OWLReasonerFactory; @@ -70,6 +69,7 @@ import com.clarkparsia.owlapi.explanation.BlackBoxExplanation; import com.clarkparsia.owlapi.explanation.HSTExplanationGenerator; import com.clarkparsia.owlapi.explanation.PelletExplanation; +import com.clarkparsia.pellet.owlapiv3.PelletReasoner; import com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory; public class JustificationBasedCoherentOntologyExtractor implements CoherentOntologyExtractor{ @@ -78,7 +78,7 @@ private static final String DIFF_ONTOLOGY_NAME = "diff.owl"; private int numberOfJustifications = 10; -// private PelletReasoner reasoner; + private PelletReasoner propReasoner; private IncrementalClassifier reasoner; private Reasoner hermitReasoner; @@ -129,7 +129,7 @@ @Override public OWLOntology getCoherentOntology(OWLOntology ontology, boolean preferRoots){ - ontology.getOWLOntologyManager().addAxioms(ontology, dbpediaOntology.getLogicalAxioms()); +// ontology.getOWLOntologyManager().addAxioms(ontology, dbpediaOntology.getLogicalAxioms()); this.ontology = ontology; this.incoherentOntology = getOntologyWithoutAnnotations(ontology); @@ -155,12 +155,13 @@ factory = manager.getOWLDataFactory(); long startTime = System.currentTimeMillis(); - reasoner = new IncrementalClassifier(incoherentOntology); - reasoner.classify(); + propReasoner = PelletReasonerFactory.getInstance().createNonBufferingReasoner(incoherentOntology); + reasoner = new IncrementalClassifier(propReasoner); + reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); //compute the unsatisfiable object properties and their corresponding modules - unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); + unsatObjectProperties = getUnsatisfiableObjectProperties(); logger.info("Found unsatisfiable object properties: " + unsatObjectProperties.size()); if(computeParallel){ entity2ModuleMap.putAll(extractModules(unsatObjectProperties)); @@ -272,7 +273,7 @@ //recompute unsatisfiable object properties // if(computeParallel){ - unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); + unsatObjectProperties = getUnsatisfiableObjectProperties(); logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); // } @@ -286,6 +287,11 @@ cnt += unsatPropCnt; } } + if(unsatClasses.isEmpty() && (!computeParallel || (computeParallel && unsatObjectProperties.isEmpty()))){ + cnt = 0; + unsatPropCnt = unsatObjectProperties.size(); + break; + } //recompute explanations if necessary logger.info("Recomputing explanations..."); @@ -304,10 +310,10 @@ save("log/" + fileName + "_" + cnt + "cls" + unsatPropCnt + "prop.owl"); if(!computeParallel){ - unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); + unsatObjectProperties = getUnsatisfiableObjectProperties(); logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); - entity2ModuleMap.putAll(extractModules(unsatObjectProperties)); +// entity2ModuleMap.putAll(extractModules(unsatObjectProperties)); logger.info("Recomputing explanations..."); startTime = System.currentTimeMillis(); @@ -317,18 +323,12 @@ //we remove the most appropriate axiom from the ontology removeAppropriateAxiom(); - //recompute the unsatisfiable classes - logger.info("Reclassifying..."); - startTime = System.currentTimeMillis(); - reasoner.classify(); - logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); - //recompute unsatisfiable object properties - unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); + unsatObjectProperties = getUnsatisfiableObjectProperties(); logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); //save - if((unsatPropCnt - unsatObjectProperties.size()) >= 1){ + if((unsatPropCnt - unsatObjectProperties.size()) >= 5){ save("log/" + fileName + "_" + cnt + "cls" + unsatPropCnt + "prop.owl"); unsatPropCnt = unsatObjectProperties.size(); } @@ -387,7 +387,7 @@ logger.info("Remaining unsatisfiable classes: " + unsatClasses.size()); //recompute unsatisfiable object properties - unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); + unsatObjectProperties = getUnsatisfiableObjectProperties(); logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); //save @@ -521,7 +521,7 @@ } } - private Set<OWLObjectProperty> getUnsatisfiableObjectProperties(IncrementalClassifier reasoner){ + private Set<OWLObjectProperty> getUnsatisfiableObjectProperties(){ logger.info("Computing unsatisfiable object properties..."); long startTime = System.currentTimeMillis(); SortedSet<OWLObjectProperty> properties = new TreeSet<OWLObjectProperty>(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-28 21:45:06
|
Revision: 3598 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3598&view=rev Author: lorenz_b Date: 2012-02-28 21:44:59 +0000 (Tue, 28 Feb 2012) Log Message: ----------- Further debugging for eval. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-28 15:56:18 UTC (rev 3597) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-28 21:44:59 UTC (rev 3598) @@ -238,7 +238,7 @@ cnt += unsatPropCnt; } - while(cnt >= 0){ + while(cnt > 0){ //we remove the most appropriate axiom from the ontology removeAppropriateAxiom(); @@ -301,6 +301,8 @@ entity2Explanations.clear(); entity2ModuleMap.clear(); + save("log/" + fileName + "_" + cnt + "cls" + unsatPropCnt + "prop.owl"); + if(!computeParallel){ unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); @@ -341,15 +343,9 @@ } } - try { - incoherentOntology.getOWLOntologyManager().saveOntology(getOntologyWithAnnotations(incoherentOntology), new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/dbpedia_coherent.owl"))); - } catch (OWLOntologyStorageException e) { - e.printStackTrace(); - } catch (FileNotFoundException e) { - e.printStackTrace(); - } + save("log/" + fileName + "_coherent.owl"); - return getOntologyWithAnnotations(incoherentOntology); + return ontology; } private OWLOntology computeCoherentOntology(OWLOntology ontology) { @@ -501,6 +497,10 @@ if(!dbpediaOntology.containsAxiomIgnoreAnnotations(axiom)){ logger.info("Removing axiom " + axiom + "."); manager.removeAxiom(incoherentOntology, axiom); + //remove the axiom also from the loaded ontology + OWLAxiom originalAnnotatedAxiom = ontology.getAxiomsIgnoreAnnotations(axiom).iterator().next(); + ontology.getOWLOntologyManager().removeAxiom(ontology, originalAnnotatedAxiom); + manager.addAxiom(diffOntology, axiom); manager.applyChange(new RemoveAxiom(incoherentOntology, axiom)); removeFromExplanations(entity2Explanations, axiom); @@ -511,10 +511,9 @@ } private void save(String fileName){ - OWLOntology toSave = getOntologyWithAnnotations(incoherentOntology); try { - toSave.getOWLOntologyManager().saveOntology(toSave, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(fileName))); - toSave.getOWLOntologyManager().saveOntology(diffOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/" + diffFileName))); + ontology.getOWLOntologyManager().saveOntology(ontology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(fileName))); + diffOntology.getOWLOntologyManager().saveOntology(diffOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/" + diffFileName))); } catch (OWLOntologyStorageException e) { e.printStackTrace(); } catch (FileNotFoundException e) { @@ -634,13 +633,14 @@ } private OWLOntology getOntologyWithAnnotations(OWLOntology ontologyWithOutAnnotations){ + logger.info("BEFORE: "+ ontology.getSubClassAxiomsForSubClass(factory.getOWLClass(IRI.create("http://dbpedia.org/ontology/Award")))); OWLOntologyManager man = ontology.getOWLOntologyManager(); for (Iterator<OWLLogicalAxiom> iterator = ontology.getLogicalAxioms().iterator(); iterator.hasNext();) { OWLLogicalAxiom axiom = iterator.next(); if(!ontologyWithOutAnnotations.containsAxiomIgnoreAnnotations(axiom)){ man.removeAxiom(ontology, axiom); } - } + }logger.info("AFTER: "+ ontology.getSubClassAxiomsForSubClass(factory.getOWLClass(IRI.create("http://dbpedia.org/ontology/Award")))); return ontology; } @@ -677,9 +677,7 @@ Set<OWLAxiom> axiomsWithAnnotations = ontology.getAxiomsIgnoreAnnotations(axiom); if(axiomsWithAnnotations.isEmpty()){ logger.info("Axiom with annotations not found: " + axiom); - logger.info("Ontology contains axiom: " + incoherentOntology.containsAxiomIgnoreAnnotations(axiom)); - logger.info("Original loaded ontology contains axiom: " + ontology.containsAxiomIgnoreAnnotations(axiom)); - System.out.println(ontology.getSubClassAxiomsForSubClass(factory.getOWLClass(IRI.create("http://dbpedia.org/ontology/Award")))); + return 2; } OWLAxiom axiomWithAnnotations = axiomsWithAnnotations.iterator().next(); Set<OWLAnnotation> annotations = axiomWithAnnotations.getAnnotations(confidenceProperty); @@ -694,38 +692,46 @@ public OWLOntology getModule(OWLEntity entity){ - OWLOntology module = entity2ModuleMap.get(entity); - new File("log").mkdir(); - if(module == null){ - md5.reset(); - md5.update((ontology.getOWLOntologyManager().getOntologyDocumentIRI(ontology).toString() + entity.toStringID()).getBytes()); - String hash = MD5.asHex(md5.digest()); - String filename = "log/" + hash + ".owl"; - File file = new File(filename); - boolean load = false; - if(load){//file.exists()){ - module = loadModule(file); - } else { - try { - module = OWLManager.createOWLOntologyManager().createOntology(ModularityUtils.extractModule(incoherentOntology, Collections.singleton(entity), ModuleType.TOP_OF_BOT)); - } catch (OWLOntologyCreationException e) { - e.printStackTrace(); - } - /* - module = OntologyUtils.getOntologyFromAxioms( - ModularityUtils.extractModule(incoherentOntology, Collections.singleton(entity), ModuleType.TOP_OF_BOT)); - - try { - manager.saveOntology(module, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(filename))); - } catch (OWLOntologyStorageException e) { - e.printStackTrace(); - } catch (FileNotFoundException e) { - e.printStackTrace(); - }*/ - } - - //entity2ModuleMap.put(entity, module); + OWLOntology module = null; + try { + module = OWLManager.createOWLOntologyManager().createOntology(ModularityUtils.extractModule(incoherentOntology, Collections.singleton(entity), ModuleType.TOP_OF_BOT)); + } catch (OWLOntologyCreationException e) { + // TODO Auto-generated catch block + e.printStackTrace(); } + +// OWLOntology module = entity2ModuleMap.get(entity); +// new File("log").mkdir(); +// if(module == null){ +// md5.reset(); +// md5.update((ontology.getOWLOntologyManager().getOntologyDocumentIRI(ontology).toString() + entity.toStringID()).getBytes()); +// String hash = MD5.asHex(md5.digest()); +// String filename = "log/" + hash + ".owl"; +// File file = new File(filename); +// boolean load = false; +// if(load){//file.exists()){ +// module = loadModule(file); +// } else { +// try { +// module = OWLManager.createOWLOntologyManager().createOntology(ModularityUtils.extractModule(incoherentOntology, Collections.singleton(entity), ModuleType.TOP_OF_BOT)); +// } catch (OWLOntologyCreationException e) { +// e.printStackTrace(); +// } +// /* +// module = OntologyUtils.getOntologyFromAxioms( +// ModularityUtils.extractModule(incoherentOntology, Collections.singleton(entity), ModuleType.TOP_OF_BOT)); +// +// try { +// manager.saveOntology(module, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(filename))); +// } catch (OWLOntologyStorageException e) { +// e.printStackTrace(); +// } catch (FileNotFoundException e) { +// e.printStackTrace(); +// }*/ +// } +// +// //entity2ModuleMap.put(entity, module); +// } return module; } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-28 15:56:27
|
Revision: 3597 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3597&view=rev Author: lorenz_b Date: 2012-02-28 15:56:18 +0000 (Tue, 28 Feb 2012) Log Message: ----------- Many modifications to work for eval. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-28 15:46:01 UTC (rev 3596) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-28 15:56:18 UTC (rev 3597) @@ -14,7 +14,9 @@ import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -24,25 +26,29 @@ import java.util.TreeSet; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.logging.Level; -import java.util.logging.Logger; import openlink.util.MD5; import org.apache.commons.compress.compressors.CompressorException; import org.apache.commons.compress.compressors.CompressorStreamFactory; -import org.mindswap.pellet.RBox; +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.FileAppender; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.apache.log4j.SimpleLayout; import org.semanticweb.HermiT.Configuration; import org.semanticweb.HermiT.Reasoner; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.io.RDFXMLOntologyFormat; import org.semanticweb.owlapi.model.AxiomType; import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLAnnotation; +import org.semanticweb.owlapi.model.OWLAnnotationProperty; import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLEntity; +import org.semanticweb.owlapi.model.OWLLiteral; import org.semanticweb.owlapi.model.OWLLogicalAxiom; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLOntology; @@ -56,6 +62,7 @@ import org.semanticweb.owlapi.reasoner.OWLReasonerConfiguration; import org.semanticweb.owlapi.reasoner.OWLReasonerFactory; +import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; import uk.ac.manchester.cs.owlapi.modularity.ModuleType; import com.clarkparsia.modularity.IncrementalClassifier; @@ -63,7 +70,6 @@ import com.clarkparsia.owlapi.explanation.BlackBoxExplanation; import com.clarkparsia.owlapi.explanation.HSTExplanationGenerator; import com.clarkparsia.owlapi.explanation.PelletExplanation; -import com.clarkparsia.owlapiv3.OntologyUtils; import com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory; public class JustificationBasedCoherentOntologyExtractor implements CoherentOntologyExtractor{ @@ -78,7 +84,7 @@ private OWLOntology incoherentOntology; private OWLOntology ontology; - private OWLDataFactory factory; + private OWLDataFactory factory = new OWLDataFactoryImpl();; //we store the removed axioms in it private OWLOntology diffOntology; @@ -86,6 +92,8 @@ private Map<OWLEntity, OWLOntology> entity2ModuleMap = new HashMap<OWLEntity, OWLOntology>(); private Map<OWLEntity, Set<Set<OWLAxiom>>> entity2Explanations = new HashMap<OWLEntity, Set<Set<OWLAxiom>>>(); private Map<OWLEntity, PelletExplanation> entity2ExpGen = new HashMap<OWLEntity, PelletExplanation>(); + private Set<OWLEntity> entitiesWithLessExplanations = new HashSet<OWLEntity>(); + private OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); MessageDigest md5; @@ -96,6 +104,8 @@ //whether to debug classes and properties in parallel private boolean computeParallel = false; + private OWLAnnotationProperty confidenceProperty; + private OWLOntology dbpediaOntology; private String fileName = "dbpedia"; @@ -119,6 +129,8 @@ @Override public OWLOntology getCoherentOntology(OWLOntology ontology, boolean preferRoots){ + ontology.getOWLOntologyManager().addAxioms(ontology, dbpediaOntology.getLogicalAxioms()); + this.ontology = ontology; this.incoherentOntology = getOntologyWithoutAnnotations(ontology); @@ -218,7 +230,7 @@ startTime = System.currentTimeMillis(); computeExplanations(unsatClasses); if(computeParallel){ - entity2Explanations.putAll(getInitialExplanations(unsatObjectProperties)); + computeExplanations(unsatObjectProperties); } logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); @@ -235,12 +247,12 @@ startTime = System.currentTimeMillis(); reasoner.classify(); // hermitReasoner.classifyClasses(); -// unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); + //Set<OWLClass> unsatClasses2 = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); logger.info("Computing root/derived unsatisfiable classes..."); startTime = System.currentTimeMillis(); - rootFinder = new StructureBasedRootClassFinder(reasoner); + rootFinder = new StructureBasedRootClassFinder(reasoner, this); unsatClasses = rootFinder.getRootUnsatisfiableClasses(); derivedUnsatClasses = rootFinder.getDerivedUnsatisfiableClasses(); rootCnt = unsatClasses.size(); @@ -253,19 +265,19 @@ } logger.info("Remaining unsatisfiable classes: " + (rootCnt + derivedCnt) + "(" + rootCnt + " roots)."); - + if(unsatClasses.isEmpty()){ unsatClasses = derivedUnsatClasses; } //recompute unsatisfiable object properties - if(computeParallel){ + // if(computeParallel){ unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); - } + // } //save - if(cnt - (rootCnt+derivedCnt) >= 1 || (unsatPropCnt - unsatObjectProperties.size()) >= 1){ + if(cnt - (rootCnt+derivedCnt) >= 5 || (unsatPropCnt - unsatObjectProperties.size()) >= 5){ cnt = rootCnt + derivedCnt; save("log/" + fileName + "_" + cnt + "cls" + unsatPropCnt + "prop.owl"); cnt = rootCnt + derivedCnt; @@ -362,11 +374,11 @@ //compute initial explanations for each unsatisfiable class logger.info("Computing initial explanations..."); startTime = System.currentTimeMillis(); - entity2Explanations.putAll(getInitialExplanations(unsatClasses)); - entity2Explanations.putAll(getInitialExplanations(unsatObjectProperties)); + computeExplanations(unsatClasses); +// entity2Explanations.putAll(getInitialExplanations(unsatObjectProperties)); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); - while(!unsatClasses.isEmpty() && !unsatObjectProperties.isEmpty()){ + while(!unsatClasses.isEmpty()){// && !unsatObjectProperties.isEmpty()){ //we remove the most appropriate axiom from the ontology removeAppropriateAxiom(); @@ -392,7 +404,7 @@ logger.info("Recomputing explanations..."); startTime = System.currentTimeMillis(); refillExplanations(unsatClasses, entity2Explanations); - refillExplanations(unsatObjectProperties, entity2Explanations); + //refillExplanations(unsatObjectProperties, entity2Explanations); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); System.gc(); @@ -478,10 +490,11 @@ } private void removeAppropriateAxiom(){ + logger.info("Searching for appropriate axiom to remove..."); //get frequency for each axiom Map<OWLAxiom, Integer> axiom2CountMap = getAxiomFrequency(entity2Explanations); //get a sorted list of entries with the highest axiom count first - List<Entry<OWLAxiom, Integer>> sortedEntries = MapUtils.sortByValues(axiom2CountMap); + List<Entry<OWLAxiom, Integer>> sortedEntries = sort(axiom2CountMap); //we remove the most frequent axiom from the ontology which is not contained in the original DBpedia ontology for(Entry<OWLAxiom, Integer> e : sortedEntries){ OWLAxiom axiom = e.getKey(); @@ -500,7 +513,7 @@ private void save(String fileName){ OWLOntology toSave = getOntologyWithAnnotations(incoherentOntology); try { - toSave.getOWLOntologyManager().saveOntology(incoherentOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(fileName))); + toSave.getOWLOntologyManager().saveOntology(toSave, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(fileName))); toSave.getOWLOntologyManager().saveOntology(diffOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/" + diffFileName))); } catch (OWLOntologyStorageException e) { e.printStackTrace(); @@ -549,6 +562,7 @@ Set<Set<OWLAxiom>> precomputedExplanations = entity2Explanations.get(unsatClass); if(precomputedExplanations == null || precomputedExplanations.size() < numberOfJustifications){ Set<Set<OWLAxiom>> newExplanations = computeExplanations(unsatClass, numberOfJustifications); + logger.info(unsatClass + ": " + newExplanations.size()); entity2Explanations.put(unsatClass, newExplanations); } } @@ -576,17 +590,20 @@ private void computeExplanations(Set<? extends OWLEntity> unsatEntities){ ExecutorService executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); - List<Future<Void>> list = new ArrayList<Future<Void>>(); for(final OWLEntity unsatEntity : unsatEntities){ Set<Set<OWLAxiom>> precomputedExplanations = entity2Explanations.get(unsatEntity); - if(precomputedExplanations == null || precomputedExplanations.size() < numberOfJustifications){ + if(precomputedExplanations == null || (!entitiesWithLessExplanations.contains(unsatEntity) && precomputedExplanations.size() < numberOfJustifications)){ executor.execute(new Runnable(){ @Override public void run() { Set<Set<OWLAxiom>> explanations = computeExplanations(unsatEntity); + logger.info("Computed "+ explanations.size() + " explanations for " + unsatEntity); entity2Explanations.put(unsatEntity, explanations); + if(explanations.size() < numberOfJustifications){ + entitiesWithLessExplanations.add(unsatEntity); + } } }); @@ -601,22 +618,10 @@ } - private Map<OWLEntity, Set<Set<OWLAxiom>>> getInitialExplanations(Set<? extends OWLEntity> unsatEntities){ - Map<OWLEntity, Set<Set<OWLAxiom>>> cls2Explanations = new HashMap<OWLEntity, Set<Set<OWLAxiom>>>(); - - Set<Set<OWLAxiom>> explanations; - for(OWLEntity unsatEntity : unsatEntities){ - explanations = computeExplanations(unsatEntity); - cls2Explanations.put(unsatEntity, explanations); - } - - return cls2Explanations; - } - private OWLOntology getOntologyWithoutAnnotations(OWLOntology ontology){ try { - OWLOntologyManager man = ontology.getOWLOntologyManager(); - OWLOntology ontologyWithoutAnnotations = ontology.getOWLOntologyManager().createOntology(); + OWLOntologyManager man = OWLManager.createOWLOntologyManager(); + OWLOntology ontologyWithoutAnnotations = man.createOntology(); for(OWLAxiom ax : ontology.getLogicalAxioms()){ man.addAxiom(ontologyWithoutAnnotations, ax.getAxiomWithoutAnnotations()); } @@ -640,7 +645,6 @@ } private Set<Set<OWLAxiom>> computeExplanations(OWLEntity unsatEntity){ - logger.info(unsatEntity); return computeExplanations(unsatEntity, numberOfJustifications); } @@ -656,10 +660,10 @@ private PelletExplanation getExplanationGenerator(OWLEntity entity){ PelletExplanation expGen = entity2ExpGen.get(entity); - if(expGen == null){ +// if(expGen == null){ expGen = new PelletExplanation(PelletReasonerFactory.getInstance().createNonBufferingReasoner(getModule(entity))); - entity2ExpGen.put(entity, expGen); - } +// entity2ExpGen.put(entity, expGen); +// } return expGen; } @@ -668,42 +672,27 @@ HSTExplanationGenerator expGen = new HSTExplanationGenerator(singleExpGen); return expGen.getExplanations(unsatClass, limit); } + + private double getConfidence(OWLAxiom axiom){ + Set<OWLAxiom> axiomsWithAnnotations = ontology.getAxiomsIgnoreAnnotations(axiom); + if(axiomsWithAnnotations.isEmpty()){ + logger.info("Axiom with annotations not found: " + axiom); + logger.info("Ontology contains axiom: " + incoherentOntology.containsAxiomIgnoreAnnotations(axiom)); + logger.info("Original loaded ontology contains axiom: " + ontology.containsAxiomIgnoreAnnotations(axiom)); + System.out.println(ontology.getSubClassAxiomsForSubClass(factory.getOWLClass(IRI.create("http://dbpedia.org/ontology/Award")))); + } + OWLAxiom axiomWithAnnotations = axiomsWithAnnotations.iterator().next(); + Set<OWLAnnotation> annotations = axiomWithAnnotations.getAnnotations(confidenceProperty); + if(!annotations.isEmpty()){ + OWLAnnotation anno = annotations.iterator().next(); + OWLLiteral literal = (OWLLiteral) anno.getValue(); + return literal.parseDouble(); + } + return 2; + + } -// private Set<Set<OWLAxiom>> computeExplanationsBlackbox(OWLClass unsatClass, int limit){ -// BlackBoxExplanation b = new BlackBoxExplanation(incoherentOntology, reasonerFactory, hermitReasoner) -// MultipleExplanationGenerator expGen = new HSTExplanationGenerator(b); -// PelletExplanation expGen = new PelletExplanation(getModule(unsatClass)); -// return expGen.getUnsatisfiableExplanations(unsatClass, NUMBER_OF_JUSTIFICATIONS); -// } -// private OWLOntology getModule(OWLClass cls){ -// OWLOntology module = cls2ModuleMap.get(cls); -// new File("log").mkdir(); -// if(module == null){ -// md5.reset(); -// md5.update((ontology.getOWLOntologyManager().getOntologyDocumentIRI(ontology).toString() + cls.toStringID()).getBytes()); -// String hash = MD5.asHex(md5.digest()); -// String filename = "log/" + hash + ".owl"; -// File file = new File(filename); -// if(file.exists()){ -// module = loadModule(file); -// } else { -// module = OntologyUtils.getOntologyFromAxioms( -// ModularityUtils.extractModule(incoherentOntology, Collections.singleton((OWLEntity)cls), ModuleType.TOP_OF_BOT)); -// try { -// manager.saveOntology(module, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(filename))); -// } catch (OWLOntologyStorageException e) { -// e.printStackTrace(); -// } catch (FileNotFoundException e) { -// e.printStackTrace(); -// } -// } -// -// cls2ModuleMap.put(cls, module); -// } -// return module; -// } - public OWLOntology getModule(OWLEntity entity){ OWLOntology module = entity2ModuleMap.get(entity); new File("log").mkdir(); @@ -713,21 +702,29 @@ String hash = MD5.asHex(md5.digest()); String filename = "log/" + hash + ".owl"; File file = new File(filename); - if(file.exists()){ + boolean load = false; + if(load){//file.exists()){ module = loadModule(file); } else { + try { + module = OWLManager.createOWLOntologyManager().createOntology(ModularityUtils.extractModule(incoherentOntology, Collections.singleton(entity), ModuleType.TOP_OF_BOT)); + } catch (OWLOntologyCreationException e) { + e.printStackTrace(); + } + /* module = OntologyUtils.getOntologyFromAxioms( ModularityUtils.extractModule(incoherentOntology, Collections.singleton(entity), ModuleType.TOP_OF_BOT)); + try { manager.saveOntology(module, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(filename))); } catch (OWLOntologyStorageException e) { e.printStackTrace(); } catch (FileNotFoundException e) { e.printStackTrace(); - } + }*/ } - entity2ModuleMap.put(entity, module); + //entity2ModuleMap.put(entity, module); } return module; } @@ -765,24 +762,6 @@ return ontology; } -// private Map<OWLClass, OWLOntology> extractModules(Set<OWLClass> classes){ -// Map<OWLClass, OWLOntology> cls2ModuleMap = new HashMap<OWLClass, OWLOntology>(); -// for(OWLClass cls : classes){ -// OWLOntology module = getModule(cls); -// cls2ModuleMap.put(cls, module); -// } -// return cls2ModuleMap; -// } -// -// private Map<OWLObjectProperty, OWLOntology> extractModules(Set<OWLObjectProperty> objectProperties){ -// Map<OWLObjectProperty, OWLOntology> prop2ModuleMap = new HashMap<OWLObjectProperty, OWLOntology>(); -// for(OWLObjectProperty prop : objectProperties){ -// OWLOntology module = getModule(prop); -// prop2ModuleMap.put(prop, module); -// } -// return prop2ModuleMap; -// } - private Map<OWLEntity, OWLOntology> extractModules(Set<? extends OWLEntity> entities){ logger.info("Computing modules..."); long startTime = System.currentTimeMillis(); @@ -803,19 +782,54 @@ public void setComputeParallel(boolean computeParallel) { this.computeParallel = computeParallel; } + + public void setConfidencePropertyIRI(String iri){ + this.confidenceProperty = factory.getOWLAnnotationProperty(IRI.create(iri)); + } + private List<Entry<OWLAxiom, Integer>> sort(Map<OWLAxiom, Integer> map){ + List<Entry<OWLAxiom, Integer>> entries = new ArrayList<Entry<OWLAxiom, Integer>>(map.entrySet()); + Collections.sort(entries, new Comparator<Entry<OWLAxiom, Integer>>() { + + @Override + public int compare(Entry<OWLAxiom, Integer> o1, Entry<OWLAxiom, Integer> o2) { + int cmp = o2.getValue().compareTo(o1.getValue()); + //use as tie breaker the confidence value + if(cmp == 0){ + double conf1 = getConfidence(o1.getKey()); + double conf2 = getConfidence(o2.getKey()); + double diff = conf1-conf2; + if(diff > 0){ + return 1; + } else if(diff < 0){ + return -1; + } else { + return 0; + } +// return Double.compare(conf2, conf1); + } + return cmp; + } + }); + return entries; + } + public static void main(String[] args) throws Exception{ - Logger.getLogger(RBox.class.getName()).setLevel(Level.OFF); + Logger.getRootLogger().setLevel(Level.INFO); + Logger.getRootLogger().removeAllAppenders(); + Logger.getRootLogger().addAppender(new ConsoleAppender(new SimpleLayout())); + Logger.getRootLogger().addAppender(new FileAppender(new SimpleLayout(), "log/out.log")); OWLOntologyManager man = OWLManager.createOWLOntologyManager(); - if(args.length != 4){ - System.out.println("USAGE: JustificationBasedCoherentOntologyExtractor <incoherent.owl> <numberOfJustifcations> <preferRootClasses(true|false)> <computeParallel(true|false)>"); + if(args.length != 5){ + System.out.println("USAGE: JustificationBasedCoherentOntologyExtractor <incoherent.owl> <confidencePropertyIRI> <numberOfJustifcations> <preferRootClasses(true|false)> <computeParallel(true|false)>"); System.exit(0); } String filename = args[0]; - int numberOfJustifications = Integer.parseInt(args[1]); - boolean preferRoots = Boolean.valueOf(args[2]); - boolean computeParallel = Boolean.valueOf(args[3]); + String confidenceIRI = args[1]; + int numberOfJustifications = Integer.parseInt(args[2]); + boolean preferRoots = Boolean.valueOf(args[3]); + boolean computeParallel = Boolean.valueOf(args[4]); System.out.println("Loading ontology..."); InputStream is = new BufferedInputStream(new FileInputStream(filename)); @@ -823,34 +837,22 @@ is = new CompressorStreamFactory().createCompressorInputStream("bzip2", is); } OWLOntology schema = man.loadOntologyFromOntologyDocument(is); - man.removeAxioms(schema, schema.getAxioms(AxiomType.TRANSITIVE_OBJECT_PROPERTY)); - -// OWLOntology cleaned = man.createOntology(IRI.create("http://dbpedia_cleaned.owl")); -// man.addAxioms(cleaned, schema.getLogicalAxioms()); -// man.removeAxioms(cleaned, cleaned.getAxioms(AxiomType.TRANSITIVE_OBJECT_PROPERTY)); -// man.removeAxioms(cleaned, cleaned.getAxioms(AxiomType.REFLEXIVE_OBJECT_PROPERTY)); -// man.removeAxioms(cleaned, cleaned.getAxioms(AxiomType.IRREFLEXIVE_OBJECT_PROPERTY)); -// man.removeAxioms(cleaned, cleaned.getAxioms(AxiomType.SYMMETRIC_OBJECT_PROPERTY)); -// man.removeAxioms(cleaned, cleaned.getAxioms(AxiomType.ASYMMETRIC_OBJECT_PROPERTY)); -// man.removeAxioms(cleaned, cleaned.getAxioms(AxiomType.FUNCTIONAL_OBJECT_PROPERTY)); -// man.removeAxioms(cleaned, cleaned.getAxioms(AxiomType.INVERSE_FUNCTIONAL_OBJECT_PROPERTY)); -// man.saveOntology(cleaned, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(file.getParent() + "/cleaned.owl"))); -// OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("log/dbpedia_95.owl")); -// OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("/home/lorenz/arbeit/dbpedia_0.75_no_datapropaxioms.owl")); -// System.out.println(schema.getLogicalAxiomCount()); -// OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("log/dbpedia_coherent.owl")); -// System.out.println(schema.getLogicalAxiomCount()); + Set<OWLTransitiveObjectPropertyAxiom> removedAxioms = schema.getAxioms(AxiomType.TRANSITIVE_OBJECT_PROPERTY); + man.removeAxioms(schema, removedAxioms); System.out.println("...done."); JustificationBasedCoherentOntologyExtractor extractor = new JustificationBasedCoherentOntologyExtractor(); extractor.setNumberOfJustifications(numberOfJustifications); extractor.setComputeParallel(computeParallel); + extractor.setConfidencePropertyIRI(confidenceIRI); if(filename.indexOf('/') >= 0){ filename = filename.substring( filename.lastIndexOf('/')+1, filename.length() ); } extractor.setFileName(filename); OWLOntology coherentOntology = extractor.getCoherentOntology(schema, preferRoots); + man.addAxioms(coherentOntology, removedAxioms); + System.out.println("Coherent ontology contains " + coherentOntology.getLogicalAxiomCount() + " logical axioms."); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-28 15:46:10
|
Revision: 3596 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3596&view=rev Author: lorenz_b Date: 2012-02-28 15:46:01 +0000 (Tue, 28 Feb 2012) Log Message: ----------- Added property coherency check into script. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java 2012-02-28 15:32:59 UTC (rev 3595) +++ trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java 2012-02-28 15:46:01 UTC (rev 3596) @@ -12,6 +12,8 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.SortedSet; +import java.util.TreeSet; import java.util.Map.Entry; import java.util.Set; @@ -27,6 +29,8 @@ import org.semanticweb.owlapi.io.RDFXMLOntologyFormat; import org.semanticweb.owlapi.model.AxiomType; import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLDataFactory; +import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyManager; @@ -40,18 +44,22 @@ private static final org.apache.log4j.Logger logger = org.apache.log4j.Logger.getLogger(JustificationBasedCoherentOntologyExtractor.class); private double stepSize = 0.001; - private static final int ALLOWED_UNSATISFIABLE_CLASSES = 5; + private int allowedUnsatClasses = 5; + private int allowedUnsatProperties = 5; private OWLOntologyManager manager; private OWLOntology coherentOntology; + private OWLDataFactory factory; private IncrementalClassifier reasoner; public GreedyCohaerencyExtractor() { // TODO Auto-generated constructor stub } - public OWLOntology getCoherentOntology(OWLOntology ontology, String target, double stepSize) throws OWLOntologyCreationException{ + public OWLOntology getCoherentOntology(OWLOntology ontology, String target, double stepSize, int allowedUnsatClasses, int allowedUnsatProperties) throws OWLOntologyCreationException{ stepSize = stepSize/100; + this.allowedUnsatClasses = allowedUnsatClasses; + this.allowedUnsatProperties = allowedUnsatProperties; BidiMap<AxiomType<? extends OWLAxiom>, Integer> axiomType2CountMap = getAxiomTypeCount(ontology); Map<AxiomType<? extends OWLAxiom>, List<OWLAxiom>> axiomType2AxiomsMap = new HashMap<AxiomType<? extends OWLAxiom>, List<OWLAxiom>>(); @@ -73,6 +81,7 @@ manager = OWLManager.createOWLOntologyManager(); + factory = manager.getOWLDataFactory(); coherentOntology = manager.createOntology(); reasoner = new IncrementalClassifier(coherentOntology); @@ -92,7 +101,7 @@ Set<OWLAxiom> toAdd = new HashSet<OWLAxiom>(axiomType2AxiomsMap.get(type[i]).subList(0, x)); manager.addAxioms(coherentOntology, toAdd); axiomType2AxiomsMap.get(type[i]).removeAll(toAdd); - isCoherent = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom().size() <= ALLOWED_UNSATISFIABLE_CLASSES; + isCoherent = isCoherent(); if(!isCoherent){ manager.removeAxioms(coherentOntology, toAdd); logger.info("Incoherency detected. Undoing changes."); @@ -124,14 +133,35 @@ return coherentOntology; } + private boolean isCoherent(){ + return (reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom().size() <= allowedUnsatClasses) + && (getUnsatisfiableObjectProperties(reasoner).size() <= allowedUnsatProperties); + } + + private Set<OWLObjectProperty> getUnsatisfiableObjectProperties(IncrementalClassifier reasoner){ + logger.info("Computing unsatisfiable object properties..."); + long startTime = System.currentTimeMillis(); + SortedSet<OWLObjectProperty> properties = new TreeSet<OWLObjectProperty>(); + OWLDataFactory f = OWLManager.createOWLOntologyManager().getOWLDataFactory(); + for(OWLObjectProperty p : reasoner.getRootOntology().getObjectPropertiesInSignature()){ +// boolean satisfiable = reasoner.isSatisfiable(f.getOWLObjectExactCardinality(1, p)); + boolean satisfiable = reasoner.isSatisfiable(f.getOWLObjectSomeValuesFrom(p, factory.getOWLThing())); + if(!satisfiable){ + properties.add(p); + } + } + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); + return properties; + } + private Set<OWLAxiom> addAxioms(List<OWLAxiom> axioms){ Set<OWLAxiom> addedAxioms = new HashSet<OWLAxiom>(); Set<OWLAxiom> axiomSet = new HashSet<OWLAxiom>(axioms); manager.addAxioms(coherentOntology, axiomSet); reasoner.classify(); - boolean isCohaerent = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom().size() <= ALLOWED_UNSATISFIABLE_CLASSES; - if(!isCohaerent){ + boolean isCoherent = isCoherent(); + if(!isCoherent){ System.out.println("Incohaerency detected. Splitting..."); manager.removeAxioms(coherentOntology, axiomSet); if(axioms.size() == 1){ @@ -154,8 +184,8 @@ return addedAxioms; } - public OWLOntology getCoherentOntology(OWLReasoner reasoner, String target, double stepSize) throws OWLOntologyCreationException{ - return getCoherentOntology(reasoner.getRootOntology(), target, stepSize); + public OWLOntology getCoherentOntology(OWLReasoner reasoner, String target, double stepSize, int allowedUnsatClasses, int allowedUnsatProperties) throws OWLOntologyCreationException{ + return getCoherentOntology(reasoner.getRootOntology(), target, stepSize, allowedUnsatClasses, allowedUnsatProperties); } private BidiMap<AxiomType<? extends OWLAxiom>, Integer> getAxiomTypeCount(OWLOntology ontology){ @@ -178,13 +208,15 @@ Logger.getRootLogger().addAppender(new ConsoleAppender(new SimpleLayout())); Logger.getRootLogger().addAppender(new FileAppender(new SimpleLayout(), "log/greedy_out.log")); - if(args.length != 3){ - System.out.println("USAGE: GreedyCoherencyExtractor <incoherent.owl> <target.owl> <stepsizeInPercent>"); + if(args.length != 5){ + System.out.println("USAGE: GreedyCoherencyExtractor <incoherent.owl> <target.owl> <stepsizeInPercent> <nrOfallowedUnsatClasses> <nrOfallowedUnsatProperties>"); System.exit(0); } String filename = args[0]; String target = args[1]; double stepSize = Double.parseDouble(args[2]); + int nrOfallowedUnsatClasses = Integer.parseInt(args[3]); + int nrOfallowedUnsatProperties = Integer.parseInt(args[4]); System.out.println("Loading ontology..."); InputStream is = new BufferedInputStream(new FileInputStream(filename)); @@ -197,7 +229,7 @@ System.out.println("...done."); GreedyCohaerencyExtractor ge = new GreedyCohaerencyExtractor(); - ge.getCoherentOntology(schema, target, stepSize); + ge.getCoherentOntology(schema, target, stepSize, nrOfallowedUnsatClasses, nrOfallowedUnsatProperties); } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <seb...@us...> - 2012-02-28 15:33:09
|
Revision: 3595 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3595&view=rev Author: sebastianwtr Date: 2012-02-28 15:32:59 +0000 (Tue, 28 Feb 2012) Log Message: ----------- [tbsl exploration] fixed bug in generating Hypothesis Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Template.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/TemplateBuilder.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/testClass_new.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/utils_new.java Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Template.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Template.java 2012-02-28 13:39:21 UTC (rev 3594) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Template.java 2012-02-28 15:32:59 UTC (rev 3595) @@ -78,6 +78,7 @@ int anzahl = 1; for(ArrayList<Hypothesis> x : hypothesen){ System.out.println("\nSet of Hypothesen"+anzahl+":"); + anzahl+=1; for ( Hypothesis z : x){ z.printAll(); } Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/TemplateBuilder.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/TemplateBuilder.java 2012-02-28 13:39:21 UTC (rev 3594) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/TemplateBuilder.java 2012-02-28 15:32:59 UTC (rev 3595) @@ -32,13 +32,13 @@ Set<BasicQueryTemplate> querytemps = btemplator.buildBasicQueries(question); for (BasicQueryTemplate bqt : querytemps) { ArrayList<ArrayList<String>> condition = new ArrayList<ArrayList<String>>(); - ArrayList<ArrayList<Hypothesis>> hypotesen = new ArrayList<ArrayList<Hypothesis>>(); + //ArrayList<ArrayList<Hypothesis>> hypotesen = new ArrayList<ArrayList<Hypothesis>>(); String selectTerm = ""; String having= ""; String filter= ""; String OrderBy= ""; String limit= ""; - String condition_String = ""; + //String condition_String = ""; boolean addTemplate=true; try{ @@ -49,23 +49,24 @@ addTemplate=false; } - ArrayList<String> temp_array = new ArrayList<String>(); + //ArrayList<String> temp_array = new ArrayList<String>(); try{ - for(Path conditions1: bqt.getConditions()) condition_String=condition_String+(conditions1.toString())+"."; for(Path conditions1: bqt.getConditions()) { - temp_array.clear(); + ArrayList<String> temp_array = new ArrayList<String>(); String[] tmp_array = conditions1.toString().split(" -- "); for(String s: tmp_array){ + //System.out.println(s); temp_array.add(s); } condition.add(temp_array); - } - + + } } catch (Exception e){ - condition_String=""; + //condition_String=""; addTemplate=false; } + try{ for(SPARQL_Filter tmp : bqt.getFilters()) filter=filter+tmp+" "; @@ -108,17 +109,20 @@ if(addTemplate!=false){ + + /* + * SLOT_title: PROPERTY {title,name,label} mitfuehren + */ //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Template template = new Template(condition, having, filter, selectTerm,OrderBy, limit); //TODO: Iterate over slots ArrayList<Hypothesis> list_of_hypothesis = new ArrayList<Hypothesis>(); for(Slot slot : bqt.getSlots()){ - - if(slot.toString().contains("USPEC")){ + if(slot.toString().contains("UNSPEC")){ String tmp= slot.toString().replace(" UNSPEC {", ""); tmp=tmp.replace("}",""); String[] tmp_array = tmp.split(":"); - Hypothesis tmp_hypothesis = new Hypothesis("?"+tmp_array[0], tmp_array[1], "USPEC", 0); + Hypothesis tmp_hypothesis = new Hypothesis("?"+tmp_array[0], tmp_array[1], "UNSPEC", 0); list_of_hypothesis.add(tmp_hypothesis); } if(slot.toString().contains("PROPERTY")){ @@ -138,7 +142,6 @@ } } ArrayList<ArrayList<Hypothesis>> final_list_set_hypothesis = new ArrayList<ArrayList<Hypothesis>>(); - //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% for(Hypothesis x : list_of_hypothesis){ @@ -152,12 +155,17 @@ } for(String s : result){ ArrayList<Hypothesis> new_list = new ArrayList<Hypothesis>(); - new_list=list_of_hypothesis; - for(Hypothesis z : new_list){ - if(z.getUri().equals(x.getUri())){ - z.setUri(s); - z.setRank(1); + + //String variable, String uri, String type, float rank + for(Hypothesis h : list_of_hypothesis){ + if (h.getUri().equals(x.getUri())){ + Hypothesis new_h = new Hypothesis(h.getVariable(), s, h.getType(), 1); + new_list.add(new_h); } + else{ + Hypothesis new_h = new Hypothesis(h.getVariable(), h.getUri(), h.getType(), h.getRank()); + new_list.add(new_h); + } } final_list_set_hypothesis.add(new_list); } @@ -165,10 +173,11 @@ } + //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% for(ArrayList<Hypothesis> x : final_list_set_hypothesis){ for(Hypothesis h : x){ - if(h.getType().contains("PROPERTY")){ + if(h.getType().contains("PROPERTY") || h.getType().contains("UNSPEC")){ ArrayList<String> result= new ArrayList<String>(); try { result = utils_new.searchIndex(h.getUri(), 1, myindex); @@ -178,6 +187,8 @@ } else{ + String tmp = "http://dbpedia.org/ontology/"+h.getUri().toLowerCase(); + h.setUri(tmp); h.setRank(0); } } catch (SQLException e) { @@ -209,9 +220,9 @@ resultArrayList.add(template_reverse_conditions); } } - for(Template temp : resultArrayList){ + /*for(Template temp : resultArrayList){ temp.printAll(); - } + }*/ return resultArrayList; } } Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/testClass_new.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/testClass_new.java 2012-02-28 13:39:21 UTC (rev 3594) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/testClass_new.java 2012-02-28 15:32:59 UTC (rev 3595) @@ -1,7 +1,17 @@ package org.dllearner.algorithm.tbsl.exploration.Sparql; -import java.net.MalformedURLException; +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileWriter; +import java.io.IOException; +import java.io.InputStreamReader; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.regex.Matcher; +import java.util.regex.Pattern; public class testClass_new { @@ -9,22 +19,205 @@ * @param args * @throws SQLException * @throws ClassNotFoundException - * @throws MalformedURLException + * @throws IOException */ - public static void main(String[] args) throws MalformedURLException, ClassNotFoundException, SQLException { + public static void main(String[] args) throws ClassNotFoundException, SQLException, IOException { // TODO Auto-generated method stub + ArrayList<Template> temp_list_result = new ArrayList<Template>(); + TemplateBuilder testobject = new TemplateBuilder(); - String question = "Which books are written by Daniele Steel?"; - testobject.createTemplates(question); + //String question = "Is the wife of president Obama called Michelle?"; + //String question = "Who is the mayor of Berlin?"; + //temp_list_result=testobject.createTemplates(question); - question = "Give me all german femal chancelors"; - testobject.createTemplates(question); - question = "Who owns Aldi?"; - testobject.createTemplates(question); - question = "In which programming language is \"GIMP\" written?"; - testobject.createTemplates(question); + ArrayList<queryInformation> list_of_structs = new ArrayList<queryInformation>(); + //if you dont want to use the hints in the questions, use false + list_of_structs=generateStruct("/home/swalter/Dokumente/dbpedia-train.xml"); + System.out.println("Start Templating"); + for(queryInformation s : list_of_structs){ + System.out.println("In For Schleife"); + ArrayList<Template> temp_list = new ArrayList<Template>(); + temp_list=testobject.createTemplates(s.getQuery().replace("<[CDATA[", "").replace("]]>", "")); + for(Template t : temp_list){ + temp_list_result.add(t); + } + + } + + String result =""; + for(Template t: temp_list_result){ + //t.printAll(); + result+="###### Template ######\n"; + result+="condition: "+t.getCondition()+"\n"; + //System.out.println("hypotesen: "+hypothesen); + int anzahl = 1; + for(ArrayList<Hypothesis> x : t.getHypothesen()){ + result+="\nSet of Hypothesen"+anzahl+":\n"; + anzahl+=1; + for ( Hypothesis z : x){ + result+="%%%%%%%%%%%"+"\n"; + result+="Variable: "+z.getVariable()+"\n"; + result+="Uri: " + z.getUri()+"\n"; + result+="Type: " + z.getType()+"\n"; + result+="Rank: "+z.getRank()+"\n"; + result+="%%%%%%%%%%%"+"\n"; + } + } + result+="\n"; + result+="selectTerm: "+t.getSelectTerm()+"\n"; + result+="having: "+t.getHaving()+"\n"; + result+="filter: "+t.getFilter()+"\n"; + result+="OrderBy: "+t.getOrderBy()+"\n"; + result+="limit: "+t.getLimit()+"\n"; + result+="###### Template printed ######\n"; + } + + //System.out.println(result); + + File file = new File("/home/swalter/Dokumente/Ausgabe_temp.txt"); + BufferedWriter bw = new BufferedWriter(new FileWriter(file)); + + bw.write(result); + bw.flush(); + bw.close(); + } + + +private static ArrayList<queryInformation> generateStruct(String filename) { + System.out.println("In generate Struct"); + String XMLType=null; + + BufferedReader in = null; + + String tmp=""; + // Lies Textzeilen aus der Datei in einen Vector: + try { + in = new BufferedReader( + new InputStreamReader( + new FileInputStream(filename) ) ); + String s; + while( null != (s = in.readLine()) ) { + tmp=tmp+s; + //System.out.println(tmp); + } + } catch( FileNotFoundException ex ) { + } catch( Exception ex ) { + System.out.println( ex ); + } finally { + if( in != null ) + try { + in.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + + + // System.out.println("XML read in"); + //System.out.println(tmp); + String string=tmp; + Pattern p = Pattern.compile (".*\\<question(.*)\\</question\\>.*"); + Matcher m = p.matcher (string); + + + if(string.contains("id=\"dbpedia-train\"><question")){ + string=string.replace("id=\"dbpedia-train\"><question", ""); + XMLType="dbpedia-train"; + System.out.println("dbpedia-train"); + } + if(string.contains("id=\"dbpedia-test\"><question")){ + string=string.replace("id=\"dbpedia-test\"><question", ""); + XMLType="dbpedia-test"; + System.out.println("dbpedia-test"); + } + ArrayList<queryInformation> querylist = new ArrayList<queryInformation>(); + if(string.contains("</question><question")){ + System.out.println("true"); + } + else System.out.println("false"); + String [] bla = string.split("</question><question"); + System.out.println(bla.length); + for(String s : bla){ + System.out.println("in bla"); + String query=""; + String type=""; + boolean fusion=false; + boolean aggregation=false; + boolean yago=false; + String id=""; + + //Pattern p1= Pattern.compile("(id.*)\\</string\\>\\<keywords\\>.*\\</keywords\\>\\<query\\>.*"); + Pattern p1= Pattern.compile("(id.*)\\</string\\>\\<keywords\\>.*"); + Matcher m1 = p1.matcher(s); + //System.out.println(""); + while(m1.find()){ + //System.out.println(m1.group(1)); + Pattern p2= Pattern.compile(".*><string>(.*)"); + Matcher m2 = p2.matcher(m1.group(1)); + while(m2.find()){ + System.out.println("Query: "+ m2.group(1)); + query=m2.group(1).replace("<[CDATA[", ""); + query=query.replace("CDATA", ""); + query=query.replace("CDATA", ""); + query=query.replace("[", ""); + query=query.replace("<", ""); + } + Pattern p3= Pattern.compile("id=\"(.*)\" answer.*"); + Matcher m3 = p3.matcher(m1.group(1)); + while(m3.find()){ + //System.out.println("Id: "+ m3.group(1)); + id=m3.group(1); + } + + Pattern p4= Pattern.compile(".*answertype=\"(.*)\" fusion.*"); + Matcher m4 = p4.matcher(m1.group(1)); + while(m4.find()){ + //System.out.println("answertype: "+ m4.group(1)); + type=m4.group(1); + } + + Pattern p5= Pattern.compile(".*fusion=\"(.*)\" aggregation.*"); + Matcher m5 = p5.matcher(m1.group(1)); + while(m5.find()){ + //System.out.println("fusion: "+ m5.group(1)); + if(m5.group(1).contains("true"))fusion=true; + else fusion=false; + } + + Pattern p6= Pattern.compile(".*aggregation=\"(.*)\" yago.*"); + Matcher m6 = p6.matcher(m1.group(1)); + while(m6.find()){ + //System.out.println("aggregation: "+ m6.group(1)); + if(m6.group(1).contains("true"))aggregation=true; + else aggregation=false; + } + + Pattern p7= Pattern.compile(".*yago=\"(.*)\" ><string>.*"); + Matcher m7 = p7.matcher(m1.group(1)); + while(m7.find()){ + //System.out.println("yago: "+ m7.group(1)); + if(m7.group(1).contains("true"))yago=true; + else yago=false; + } + + + + } + queryInformation blaquery=new queryInformation(query, id,type,fusion,aggregation,yago,XMLType,false); + if(id!=""&&id!=null) querylist.add(blaquery); + } + /* for(queryInformation s : querylist){ + System.out.println(""); + if(s.getId()==""||s.getId()==null)System.out.println("NO"); + System.out.println("ID: "+s.getId()); + System.out.println("Query: "+s.getQuery()); + System.out.println("Type: "+s.getType()); + System.out.println("XMLType: "+s.getXMLtype()); + }*/ + return querylist; } +} \ No newline at end of file Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/utils_new.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/utils_new.java 2012-02-28 13:39:21 UTC (rev 3594) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/utils_new.java 2012-02-28 15:32:59 UTC (rev 3595) @@ -35,6 +35,7 @@ tmp2=myindex.getYagoURI(string.toLowerCase()); if(tmp1!=null) result_List.add(tmp1); if(tmp2!=null) result_List.add(tmp2); + //result_List.add("www.TEST.de"); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-28 13:39:32
|
Revision: 3594 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3594&view=rev Author: lorenz_b Date: 2012-02-28 13:39:21 +0000 (Tue, 28 Feb 2012) Log Message: ----------- Fixed bug. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java 2012-02-28 13:07:43 UTC (rev 3593) +++ trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java 2012-02-28 13:39:21 UTC (rev 3594) @@ -58,8 +58,6 @@ for(AxiomType<? extends OWLAxiom> type : AxiomType.AXIOM_TYPES){ axiomType2AxiomsMap.put(type, new ArrayList<OWLAxiom>(ontology.getAxioms(type))); } - //omit annotation axioms here - axiomType2AxiomsMap.remove(AxiomType.ANNOTATION_ASSERTION); logger.info("Source ontology contains " + ontology.getLogicalAxiomCount() + " logical axioms."); double[] stepSizeArray = new double[axiomType2CountMap.entrySet().size()]; @@ -195,6 +193,7 @@ } OWLOntologyManager man = OWLManager.createOWLOntologyManager(); OWLOntology schema = man.loadOntologyFromOntologyDocument(is); + man.removeAxioms(schema, schema.getAxioms(AxiomType.ANNOTATION_ASSERTION)); System.out.println("...done."); GreedyCohaerencyExtractor ge = new GreedyCohaerencyExtractor(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-28 13:07:54
|
Revision: 3593 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3593&view=rev Author: lorenz_b Date: 2012-02-28 13:07:43 +0000 (Tue, 28 Feb 2012) Log Message: ----------- Updated algorithm. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java 2012-02-28 02:25:27 UTC (rev 3592) +++ trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java 2012-02-28 13:07:43 UTC (rev 3593) @@ -1,9 +1,12 @@ package org.dllearner.utilities; +import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; +import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; +import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; @@ -14,6 +17,12 @@ import org.apache.commons.collections15.BidiMap; import org.apache.commons.collections15.bidimap.DualHashBidiMap; +import org.apache.commons.compress.compressors.CompressorStreamFactory; +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.FileAppender; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.apache.log4j.SimpleLayout; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.io.RDFXMLOntologyFormat; import org.semanticweb.owlapi.model.AxiomType; @@ -28,31 +37,37 @@ public class GreedyCohaerencyExtractor { - private static final double STEP_SIZE = 0.001; + private static final org.apache.log4j.Logger logger = org.apache.log4j.Logger.getLogger(JustificationBasedCoherentOntologyExtractor.class); + + private double stepSize = 0.001; private static final int ALLOWED_UNSATISFIABLE_CLASSES = 5; private OWLOntologyManager manager; - private OWLOntology cohaerentOntology; + private OWLOntology coherentOntology; private IncrementalClassifier reasoner; public GreedyCohaerencyExtractor() { // TODO Auto-generated constructor stub } - public OWLOntology getCoharentOntology(OWLOntology ontology) throws OWLOntologyCreationException{ + public OWLOntology getCoherentOntology(OWLOntology ontology, String target, double stepSize) throws OWLOntologyCreationException{ + stepSize = stepSize/100; BidiMap<AxiomType<? extends OWLAxiom>, Integer> axiomType2CountMap = getAxiomTypeCount(ontology); Map<AxiomType<? extends OWLAxiom>, List<OWLAxiom>> axiomType2AxiomsMap = new HashMap<AxiomType<? extends OWLAxiom>, List<OWLAxiom>>(); for(AxiomType<? extends OWLAxiom> type : AxiomType.AXIOM_TYPES){ axiomType2AxiomsMap.put(type, new ArrayList<OWLAxiom>(ontology.getAxioms(type))); } - System.out.println(ontology.getLogicalAxiomCount()); - double[] stepSize = new double[axiomType2CountMap.entrySet().size()]; + //omit annotation axioms here + axiomType2AxiomsMap.remove(AxiomType.ANNOTATION_ASSERTION); + + logger.info("Source ontology contains " + ontology.getLogicalAxiomCount() + " logical axioms."); + double[] stepSizeArray = new double[axiomType2CountMap.entrySet().size()]; double[] cnt = new double[axiomType2CountMap.entrySet().size()]; AxiomType[] type = new AxiomType[axiomType2CountMap.entrySet().size()]; int i=0; for(Entry<AxiomType<? extends OWLAxiom>, Integer> entry : axiomType2CountMap.entrySet()){ - stepSize[i] = STEP_SIZE * entry.getValue(); + stepSizeArray[i] = stepSize * entry.getValue(); type[i] = entry.getKey(); cnt[i] = 0; i++; @@ -60,71 +75,67 @@ manager = OWLManager.createOWLOntologyManager(); - cohaerentOntology = manager.createOntology(); + coherentOntology = manager.createOntology(); - reasoner = new IncrementalClassifier(cohaerentOntology); - manager.addOntologyChangeListener(reasoner); + reasoner = new IncrementalClassifier(coherentOntology); + reasoner.setMultiThreaded(false); +// manager.addOntologyChangeListener(reasoner); reasoner.classify(); - boolean isCohaerent = true; - for(double j = 0; j < 1; j += STEP_SIZE){System.out.println(j); - if(isCohaerent){ - for(i = 0; i < stepSize.length; i++){ - cnt[i] = cnt[i] + stepSize[i]; + boolean isCoherent = true; + for(double j = 0; j < 1; j += stepSize){//increase by stepsize p until 100% + if(isCoherent){ + for(i = 0; i < stepSizeArray.length; i++){//for each axiomtype + cnt[i] = cnt[i] + stepSizeArray[i];//sum up value which was computed by p * #axioms int x = (int)cnt[i]; - System.out.println("Adding " + x + " " + type[i] + " axioms from " + axiomType2CountMap.get(type[i])); -// System.out.println(axiomType2AxiomsMap.get(type[i]).size()); -// for(int k = 0; k < x; k++){ -// OWLAxiom ax = axiomType2AxiomsMap.get(type[i]).remove(0); -// man.addAxiom(cohaerentOntology, ax); -// isCohaerent = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom().isEmpty(); -// if(!isCohaerent){ -// man.removeAxiom(cohaerentOntology, ax); + if(x > 0){ + logger.info("Adding " + x + " " + type[i] + " axioms from " + axiomType2CountMap.get(type[i])); + Set<OWLAxiom> toAdd = new HashSet<OWLAxiom>(axiomType2AxiomsMap.get(type[i]).subList(0, x)); + manager.addAxioms(coherentOntology, toAdd); + axiomType2AxiomsMap.get(type[i]).removeAll(toAdd); + isCoherent = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom().size() <= ALLOWED_UNSATISFIABLE_CLASSES; + if(!isCoherent){ + manager.removeAxioms(coherentOntology, toAdd); + logger.info("Incoherency detected. Undoing changes."); + isCoherent = true; // break; -// } -// } + } + } - /*Set<OWLAxiom> toAdd = new HashSet<OWLAxiom>(axiomType2AxiomsMap.get(type[i]).subList(0, x)); - manager.addAxioms(cohaerentOntology, toAdd); - axiomType2AxiomsMap.get(type[i]).removeAll(toAdd); - isCohaerent = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom().size() <= ALLOWED_UNSATISFIABLE_CLASSES; - if(!isCohaerent){ - manager.removeAxioms(cohaerentOntology, toAdd);System.out.println("Incohaerency detected"); - break; - }*/ - - List<OWLAxiom> toAdd = axiomType2AxiomsMap.get(type[i]).subList(0, x); + //same procedure with divide and conquer optimization + /*List<OWLAxiom> toAdd = axiomType2AxiomsMap.get(type[i]).subList(0, x); addAxioms(toAdd); axiomType2AxiomsMap.get(type[i]).removeAll(toAdd); + */ cnt[i] = cnt[i] - x; } } - System.out.println(cohaerentOntology.getLogicalAxiomCount()); + logger.info("Coherent ontology contains " + coherentOntology.getLogicalAxiomCount() + " logical axioms."); } try { - manager.saveOntology(cohaerentOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(new File("coherent.owl")))); + manager.saveOntology(coherentOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(new File(target)))); } catch (OWLOntologyStorageException e) { e.printStackTrace(); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } - return cohaerentOntology; + return coherentOntology; } private Set<OWLAxiom> addAxioms(List<OWLAxiom> axioms){ Set<OWLAxiom> addedAxioms = new HashSet<OWLAxiom>(); Set<OWLAxiom> axiomSet = new HashSet<OWLAxiom>(axioms); - manager.addAxioms(cohaerentOntology, axiomSet); + manager.addAxioms(coherentOntology, axiomSet); reasoner.classify(); boolean isCohaerent = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom().size() <= ALLOWED_UNSATISFIABLE_CLASSES; if(!isCohaerent){ System.out.println("Incohaerency detected. Splitting..."); - manager.removeAxioms(cohaerentOntology, axiomSet); + manager.removeAxioms(coherentOntology, axiomSet); if(axioms.size() == 1){ return addedAxioms; } @@ -145,8 +156,8 @@ return addedAxioms; } - public OWLOntology getCoharentOntology(OWLReasoner reasoner) throws OWLOntologyCreationException{ - return getCoharentOntology(reasoner.getRootOntology()); + public OWLOntology getCoherentOntology(OWLReasoner reasoner, String target, double stepSize) throws OWLOntologyCreationException{ + return getCoherentOntology(reasoner.getRootOntology(), target, stepSize); } private BidiMap<AxiomType<? extends OWLAxiom>, Integer> getAxiomTypeCount(OWLOntology ontology){ @@ -164,11 +175,30 @@ } public static void main(String[] args) throws Exception{ + Logger.getRootLogger().setLevel(Level.INFO); + Logger.getRootLogger().removeAllAppenders(); + Logger.getRootLogger().addAppender(new ConsoleAppender(new SimpleLayout())); + Logger.getRootLogger().addAppender(new FileAppender(new SimpleLayout(), "log/greedy_out.log")); + + if(args.length != 3){ + System.out.println("USAGE: GreedyCoherencyExtractor <incoherent.owl> <target.owl> <stepsizeInPercent>"); + System.exit(0); + } + String filename = args[0]; + String target = args[1]; + double stepSize = Double.parseDouble(args[2]); + + System.out.println("Loading ontology..."); + InputStream is = new BufferedInputStream(new FileInputStream(filename)); + if(args[0].endsWith("bz2")){ + is = new CompressorStreamFactory().createCompressorInputStream("bzip2", is); + } OWLOntologyManager man = OWLManager.createOWLOntologyManager(); - OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("/home/lorenz/arbeit/papers/ESWC2012/dbpedia_0.75_no_datapropaxioms.owl")); + OWLOntology schema = man.loadOntologyFromOntologyDocument(is); + System.out.println("...done."); GreedyCohaerencyExtractor ge = new GreedyCohaerencyExtractor(); - ge.getCoharentOntology(schema); + ge.getCoherentOntology(schema, target, stepSize); } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-28 02:25:33
|
Revision: 3592 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3592&view=rev Author: lorenz_b Date: 2012-02-28 02:25:27 +0000 (Tue, 28 Feb 2012) Log Message: ----------- Small change in execution. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java 2012-02-27 16:02:32 UTC (rev 3591) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java 2012-02-28 02:25:27 UTC (rev 3592) @@ -196,8 +196,6 @@ } else { evalAxiom = new EvaluatedAxiom(new DisjointObjectPropertyAxiom(propertyToDescribe, p), new AxiomScore(1)); } - evalAxiom = new EvaluatedAxiom(new DisjointObjectPropertyAxiom(propertyToDescribe, p), - new AxiomScore(1)); axioms.add(evalAxiom); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-27 16:02:38
|
Revision: 3591 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3591&view=rev Author: lorenz_b Date: 2012-02-27 16:02:32 +0000 (Mon, 27 Feb 2012) Log Message: ----------- Continued. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java 2012-02-27 13:00:31 UTC (rev 3590) +++ trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java 2012-02-27 16:02:32 UTC (rev 3591) @@ -31,6 +31,10 @@ private static final double STEP_SIZE = 0.001; private static final int ALLOWED_UNSATISFIABLE_CLASSES = 5; + private OWLOntologyManager manager; + private OWLOntology cohaerentOntology; + private IncrementalClassifier reasoner; + public GreedyCohaerencyExtractor() { // TODO Auto-generated constructor stub } @@ -55,11 +59,11 @@ } - OWLOntologyManager man = OWLManager.createOWLOntologyManager(); - OWLOntology cohaerentOntology = man.createOntology(); + manager = OWLManager.createOWLOntologyManager(); + cohaerentOntology = manager.createOntology(); - IncrementalClassifier reasoner = new IncrementalClassifier(cohaerentOntology); - man.addOntologyChangeListener(reasoner); + reasoner = new IncrementalClassifier(cohaerentOntology); + manager.addOntologyChangeListener(reasoner); reasoner.classify(); @@ -80,14 +84,20 @@ // break; // } // } - Set<OWLAxiom> toAdd = new HashSet<OWLAxiom>(axiomType2AxiomsMap.get(type[i]).subList(0, x)); - man.addAxioms(cohaerentOntology, toAdd); + + /*Set<OWLAxiom> toAdd = new HashSet<OWLAxiom>(axiomType2AxiomsMap.get(type[i]).subList(0, x)); + manager.addAxioms(cohaerentOntology, toAdd); axiomType2AxiomsMap.get(type[i]).removeAll(toAdd); isCohaerent = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom().size() <= ALLOWED_UNSATISFIABLE_CLASSES; if(!isCohaerent){ - man.removeAxioms(cohaerentOntology, toAdd);System.out.println("Incohaerency detected"); + manager.removeAxioms(cohaerentOntology, toAdd);System.out.println("Incohaerency detected"); break; - } + }*/ + + List<OWLAxiom> toAdd = axiomType2AxiomsMap.get(type[i]).subList(0, x); + addAxioms(toAdd); + axiomType2AxiomsMap.get(type[i]).removeAll(toAdd); + cnt[i] = cnt[i] - x; } } @@ -95,7 +105,7 @@ } try { - man.saveOntology(cohaerentOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(new File("coherent.owl")))); + manager.saveOntology(cohaerentOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(new File("coherent.owl")))); } catch (OWLOntologyStorageException e) { e.printStackTrace(); } catch (FileNotFoundException e) { @@ -105,6 +115,36 @@ return cohaerentOntology; } + private Set<OWLAxiom> addAxioms(List<OWLAxiom> axioms){ + Set<OWLAxiom> addedAxioms = new HashSet<OWLAxiom>(); + + Set<OWLAxiom> axiomSet = new HashSet<OWLAxiom>(axioms); + manager.addAxioms(cohaerentOntology, axiomSet); + reasoner.classify(); + boolean isCohaerent = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom().size() <= ALLOWED_UNSATISFIABLE_CLASSES; + if(!isCohaerent){ + System.out.println("Incohaerency detected. Splitting..."); + manager.removeAxioms(cohaerentOntology, axiomSet); + if(axioms.size() == 1){ + return addedAxioms; + } + + int size = axioms.size(); + int pivot = size/2; + + List<OWLAxiom> left = axioms.subList(0, pivot); + List<OWLAxiom> right = axioms.subList(pivot, size-1); + + addedAxioms.addAll(addAxioms(left)); + addedAxioms.addAll(addAxioms(right)); + + } else { + addedAxioms.addAll(axioms); + } + + return addedAxioms; + } + public OWLOntology getCoharentOntology(OWLReasoner reasoner) throws OWLOntologyCreationException{ return getCoharentOntology(reasoner.getRootOntology()); } @@ -125,7 +165,7 @@ public static void main(String[] args) throws Exception{ OWLOntologyManager man = OWLManager.createOWLOntologyManager(); - OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("/home/lorenz/arbeit/dbpedia_0.75_no_datapropaxioms.owl")); + OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("/home/lorenz/arbeit/papers/ESWC2012/dbpedia_0.75_no_datapropaxioms.owl")); GreedyCohaerencyExtractor ge = new GreedyCohaerencyExtractor(); ge.getCoharentOntology(schema); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-27 13:00:41
|
Revision: 3590 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3590&view=rev Author: lorenz_b Date: 2012-02-27 13:00:31 +0000 (Mon, 27 Feb 2012) Log Message: ----------- Integrated parallel computation of explanations. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java trunk/components-core/src/main/java/org/dllearner/utilities/StructureBasedRootClassFinder.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-26 10:02:29 UTC (rev 3589) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-27 13:00:31 UTC (rev 3590) @@ -12,15 +12,19 @@ import java.net.URL; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; -import java.util.Map.Entry; -import java.util.Set; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; import java.util.logging.Level; import java.util.logging.Logger; @@ -47,7 +51,6 @@ import org.semanticweb.owlapi.model.OWLOntologyStorageException; import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom; import org.semanticweb.owlapi.model.RemoveAxiom; -import org.semanticweb.owlapi.owllink.builtin.requests.LoadOntologies; import org.semanticweb.owlapi.reasoner.IllegalConfigurationException; import org.semanticweb.owlapi.reasoner.OWLReasoner; import org.semanticweb.owlapi.reasoner.OWLReasonerConfiguration; @@ -61,7 +64,7 @@ import com.clarkparsia.owlapi.explanation.HSTExplanationGenerator; import com.clarkparsia.owlapi.explanation.PelletExplanation; import com.clarkparsia.owlapiv3.OntologyUtils; -import com.clarkparsia.pellet.owlapiv3.PelletReasoner; +import com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory; public class JustificationBasedCoherentOntologyExtractor implements CoherentOntologyExtractor{ @@ -82,6 +85,7 @@ private Map<OWLEntity, OWLOntology> entity2ModuleMap = new HashMap<OWLEntity, OWLOntology>(); private Map<OWLEntity, Set<Set<OWLAxiom>>> entity2Explanations = new HashMap<OWLEntity, Set<Set<OWLAxiom>>>(); + private Map<OWLEntity, PelletExplanation> entity2ExpGen = new HashMap<OWLEntity, PelletExplanation>(); private OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); MessageDigest md5; @@ -177,7 +181,7 @@ //compute the unsatisfiable classes logger.info("Computing root/derived unsatisfiable classes..."); long startTime = System.currentTimeMillis(); - StructureBasedRootClassFinder rootFinder = new StructureBasedRootClassFinder(reasoner); + StructureBasedRootClassFinder rootFinder = new StructureBasedRootClassFinder(reasoner, this); Set<OWLClass> unsatClasses = rootFinder.getRootUnsatisfiableClasses(); Set<OWLClass> derivedUnsatClasses = rootFinder.getDerivedUnsatisfiableClasses(); @@ -212,7 +216,7 @@ //compute initial explanations for each unsatisfiable class logger.info("Computing initial explanations..."); startTime = System.currentTimeMillis(); - entity2Explanations.putAll(getInitialExplanations(unsatClasses)); + computeExplanations(unsatClasses); if(computeParallel){ entity2Explanations.putAll(getInitialExplanations(unsatObjectProperties)); } @@ -274,9 +278,9 @@ //recompute explanations if necessary logger.info("Recomputing explanations..."); startTime = System.currentTimeMillis(); - refillExplanations(unsatClasses, entity2Explanations); + computeExplanations(unsatClasses); if(computeParallel){ - refillExplanations(unsatObjectProperties, entity2Explanations); + computeExplanations(unsatObjectProperties); } logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); @@ -290,6 +294,11 @@ logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); entity2ModuleMap.putAll(extractModules(unsatObjectProperties)); + + logger.info("Recomputing explanations..."); + startTime = System.currentTimeMillis(); + computeExplanations(unsatObjectProperties); + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); while(!unsatObjectProperties.isEmpty()){ //we remove the most appropriate axiom from the ontology removeAppropriateAxiom(); @@ -313,7 +322,7 @@ //recompute explanations if necessary logger.info("Recomputing explanations..."); startTime = System.currentTimeMillis(); - refillExplanations(unsatObjectProperties, entity2Explanations); + computeExplanations(unsatObjectProperties); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); System.gc(); @@ -400,6 +409,74 @@ return getOntologyWithAnnotations(incoherentOntology); } + private OWLOntology computeCoherentOntology2(OWLOntology ontology) { + //compute the unsatisfiable classes + logger.info("Computing unsatisfiable classes..."); + long startTime = System.currentTimeMillis(); + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); + Set<OWLClass> unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); + int cnt = unsatClasses.size(); + logger.info("Detected " + cnt + " unsatisfiable classes."); + + //if the ontology is not incoherent we return it here + if(unsatClasses.isEmpty()){ + return incoherentOntology; + } + + //compute initial explanations for each unsatisfiable class + logger.info("Computing initial explanations..."); + startTime = System.currentTimeMillis(); + PelletExplanation expGen = new PelletExplanation(reasoner.getReasoner()); + Set<Set<OWLAxiom>> explanations; + for(OWLClass unsatCls : unsatClasses){ + explanations = expGen.getUnsatisfiableExplanations(unsatCls, numberOfJustifications); + logger.info(unsatCls); + entity2Explanations.put(unsatCls, explanations); + } + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); + + while(!unsatClasses.isEmpty() && !unsatObjectProperties.isEmpty()){ + //we remove the most appropriate axiom from the ontology + removeAppropriateAxiom(); + + //recompute the unsatisfiable classes + logger.info("Reclassifying..."); + startTime = System.currentTimeMillis(); + reasoner.classify(); + unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); + logger.info("Remaining unsatisfiable classes: " + unsatClasses.size()); + + //save + if(cnt - unsatClasses.size() >= 1){ + cnt = unsatClasses.size(); + save("log/" + fileName + "_" + cnt + "cls" + ".owl"); + } + + //recompute explanations if necessary + logger.info("Recomputing explanations..."); + startTime = System.currentTimeMillis(); + for(OWLClass unsatCls : unsatClasses){ + if(entity2Explanations.get(unsatCls).size() < numberOfJustifications){ + explanations = expGen.getUnsatisfiableExplanations(unsatCls, numberOfJustifications); + entity2Explanations.put(unsatCls, explanations); + } + } + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); + + } + try { + incoherentOntology.getOWLOntologyManager().saveOntology(getOntologyWithAnnotations(incoherentOntology), new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/dbpedia_coherent.owl"))); + } catch (OWLOntologyStorageException e) { + e.printStackTrace(); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } + System.out.println(incoherentOntology.getLogicalAxiomCount()); + + return getOntologyWithAnnotations(incoherentOntology); + } + private void removeAppropriateAxiom(){ //get frequency for each axiom Map<OWLAxiom, Integer> axiom2CountMap = getAxiomFrequency(entity2Explanations); @@ -496,16 +573,40 @@ return axiom2CountMap; } + private void computeExplanations(Set<? extends OWLEntity> unsatEntities){ + + ExecutorService executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); + List<Future<Void>> list = new ArrayList<Future<Void>>(); + + for(final OWLEntity unsatEntity : unsatEntities){ + Set<Set<OWLAxiom>> precomputedExplanations = entity2Explanations.get(unsatEntity); + if(precomputedExplanations == null || precomputedExplanations.size() < numberOfJustifications){ + executor.execute(new Runnable(){ + + @Override + public void run() { + Set<Set<OWLAxiom>> explanations = computeExplanations(unsatEntity); + entity2Explanations.put(unsatEntity, explanations); + } + + }); + } + } + + executor.shutdown(); + while (!executor.isTerminated()) { + + } + + } + + private Map<OWLEntity, Set<Set<OWLAxiom>>> getInitialExplanations(Set<? extends OWLEntity> unsatEntities){ Map<OWLEntity, Set<Set<OWLAxiom>>> cls2Explanations = new HashMap<OWLEntity, Set<Set<OWLAxiom>>>(); + Set<Set<OWLAxiom>> explanations; for(OWLEntity unsatEntity : unsatEntities){ - Set<Set<OWLAxiom>> explanations = null; - if(unsatEntity instanceof OWLClass){ - explanations = computeExplanations((OWLClass) unsatEntity); - } else if(unsatEntity instanceof OWLObjectProperty){ - explanations = computeExplanations((OWLObjectProperty) unsatEntity); - } + explanations = computeExplanations(unsatEntity); cls2Explanations.put(unsatEntity, explanations); } @@ -539,17 +640,12 @@ } private Set<Set<OWLAxiom>> computeExplanations(OWLEntity unsatEntity){ - PelletExplanation expGen = new PelletExplanation(getModule(unsatEntity)); - if(unsatEntity instanceof OWLClass){ - return expGen.getUnsatisfiableExplanations((OWLClass) unsatEntity, numberOfJustifications); - } else if(unsatEntity instanceof OWLObjectProperty){ - return expGen.getUnsatisfiableExplanations(factory.getOWLObjectExactCardinality(1, (OWLObjectProperty)unsatEntity), numberOfJustifications); - } - return null; + logger.info(unsatEntity); + return computeExplanations(unsatEntity, numberOfJustifications); } private Set<Set<OWLAxiom>> computeExplanations(OWLEntity unsatEntity, int limit){ - PelletExplanation expGen = new PelletExplanation(getModule(unsatEntity)); + PelletExplanation expGen = getExplanationGenerator(unsatEntity); if(unsatEntity instanceof OWLClass){ return expGen.getUnsatisfiableExplanations((OWLClass) unsatEntity, limit); } else if(unsatEntity instanceof OWLObjectProperty){ @@ -558,6 +654,14 @@ return null; } + private PelletExplanation getExplanationGenerator(OWLEntity entity){ + PelletExplanation expGen = entity2ExpGen.get(entity); + if(expGen == null){ + expGen = new PelletExplanation(PelletReasonerFactory.getInstance().createNonBufferingReasoner(getModule(entity))); + entity2ExpGen.put(entity, expGen); + } + return expGen; + } private Set<Set<OWLAxiom>> computeExplanationsBlackBox(OWLClass unsatClass, int limit){ BlackBoxExplanation singleExpGen = new BlackBoxExplanation(incoherentOntology, new HermiTReasonerFactory(), hermitReasoner); @@ -600,7 +704,7 @@ // return module; // } - private OWLOntology getModule(OWLEntity entity){ + public OWLOntology getModule(OWLEntity entity){ OWLOntology module = entity2ModuleMap.get(entity); new File("log").mkdir(); if(module == null){ Modified: trunk/components-core/src/main/java/org/dllearner/utilities/StructureBasedRootClassFinder.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/StructureBasedRootClassFinder.java 2012-02-26 10:02:29 UTC (rev 3589) +++ trunk/components-core/src/main/java/org/dllearner/utilities/StructureBasedRootClassFinder.java 2012-02-27 13:00:31 UTC (rev 3590) @@ -8,6 +8,7 @@ import java.util.Map; import java.util.Set; +import org.apache.commons.httpclient.methods.GetMethod; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.AddAxiom; import org.semanticweb.owlapi.model.AxiomType; @@ -76,6 +77,8 @@ private Map<OWLClass, Set<OWLClass>> child2Parents; private Map<OWLClass, Set<OWLClass>> parent2Children; + private JustificationBasedCoherentOntologyExtractor extractor; + // private Map<OWLClass, Map<OWLAxiom, Set<OWLClass>>> class2Dependency; public StructureBasedRootClassFinder(OWLReasoner reasoner){ @@ -105,6 +108,33 @@ } + public StructureBasedRootClassFinder(OWLReasoner reasoner, JustificationBasedCoherentOntologyExtractor extractor){ + this.extractor = extractor; + this.manager = OWLManager.createOWLOntologyManager(); + this.reasoner = reasoner; + this.reasonerFactory = new PelletReasonerFactory(); + try { + this.ontology = manager.createOntology(IRI.create("http://all"), reasoner.getRootOntology().getImportsClosure()); + } catch (OWLOntologyCreationException e) { + e.printStackTrace(); + } catch (OWLOntologyChangeException e) { + e.printStackTrace(); + } + rootClasses = new HashSet<OWLClass>(); + derivedClasses = new HashSet<OWLClass>(); + unsatClasses = new HashSet<OWLClass>(); + + depend2Classes = new HashSet<OWLClass>(); + depth2UniversalRestrictionPropertyMap = new HashMap<Integer, Set<OWLObjectAllValuesFrom>>(); + depth2ExistsRestrictionPropertyMap = new HashMap<Integer, Set<OWLObjectPropertyExpression>>(); + + child2Parents = new HashMap<OWLClass, Set<OWLClass>>(); + parent2Children = new HashMap<OWLClass, Set<OWLClass>>(); + +// class2Dependency = new HashMap<OWLClass, Map<OWLAxiom, Set<OWLClass>>>(); + + } + public void computeRootDerivedClasses(){ unsatClasses.clear(); rootClasses.clear(); @@ -180,8 +210,7 @@ OWLReasoner checker = null; for (OWLClass root : new ArrayList<OWLClass>(roots)) { - checker = reasonerFactory.createNonBufferingReasoner(manager.createOntology(ModularityUtils.extractModule - (ontology, root.getSignature(), ModuleType.TOP_OF_BOT))); + checker = reasonerFactory.createNonBufferingReasoner(extractor.getModule(root)); if (!potentialRoots.contains(root) && checker.isSatisfiable(root)) { rootClasses.remove(root); } @@ -195,10 +224,9 @@ } } catch (OWLOntologyChangeException e) { e.printStackTrace(); - } catch (OWLOntologyCreationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } + } + + } private void reset(){ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-26 10:02:35
|
Revision: 3589 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3589&view=rev Author: lorenz_b Date: 2012-02-26 10:02:29 +0000 (Sun, 26 Feb 2012) Log Message: ----------- Small changes to save ontologies with given filename. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-24 14:15:56 UTC (rev 3588) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-26 10:02:29 UTC (rev 3589) @@ -94,7 +94,8 @@ private OWLOntology dbpediaOntology; - private String fileName; + private String fileName = "dbpedia"; + private String diffFileName = "diff.owl"; public JustificationBasedCoherentOntologyExtractor() { try { @@ -117,14 +118,6 @@ this.ontology = ontology; this.incoherentOntology = getOntologyWithoutAnnotations(ontology); - IRI iri = ontology.getOWLOntologyManager().getOntologyDocumentIRI(ontology); - fileName = "dbpedia"; - if(iri != null){ - fileName = iri.toString().substring( iri.toString().lastIndexOf('/')+1, iri.toString().length() ); - } else { - - } - new File("log").mkdir(); File diffFile = new File("log/" + DIFF_ONTOLOGY_NAME); @@ -165,6 +158,11 @@ } } + public void setFileName(String fileName) { + this.fileName = fileName; + diffFileName = "diff_" + fileName; + } + private OWLOntology computeCoherentOntologyRootBased(OWLOntology ontology) { // startTime = System.currentTimeMillis(); @@ -426,7 +424,7 @@ OWLOntology toSave = getOntologyWithAnnotations(incoherentOntology); try { toSave.getOWLOntologyManager().saveOntology(incoherentOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(fileName))); - toSave.getOWLOntologyManager().saveOntology(diffOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/diff.owl"))); + toSave.getOWLOntologyManager().saveOntology(diffOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/" + diffFileName))); } catch (OWLOntologyStorageException e) { e.printStackTrace(); } catch (FileNotFoundException e) { @@ -743,6 +741,11 @@ JustificationBasedCoherentOntologyExtractor extractor = new JustificationBasedCoherentOntologyExtractor(); extractor.setNumberOfJustifications(numberOfJustifications); extractor.setComputeParallel(computeParallel); + if(filename.indexOf('/') >= 0){ + filename = filename.substring( filename.lastIndexOf('/')+1, filename.length() ); + } + + extractor.setFileName(filename); OWLOntology coherentOntology = extractor.getCoherentOntology(schema, preferRoots); System.out.println("Coherent ontology contains " + coherentOntology.getLogicalAxiomCount() + " logical axioms."); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2012-02-24 14:16:05
|
Revision: 3588 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3588&view=rev Author: jenslehmann Date: 2012-02-24 14:15:56 +0000 (Fri, 24 Feb 2012) Log Message: ----------- Oxtractor helper class Added Paths: ----------- trunk/scripts/src/main/java/org/dllearner/scripts/OxtractorHelper.java Added: trunk/scripts/src/main/java/org/dllearner/scripts/OxtractorHelper.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/OxtractorHelper.java (rev 0) +++ trunk/scripts/src/main/java/org/dllearner/scripts/OxtractorHelper.java 2012-02-24 14:15:56 UTC (rev 3588) @@ -0,0 +1,64 @@ +package org.dllearner.scripts; + +import java.util.Set; +import java.util.TreeSet; + +import com.hp.hpl.jena.query.QuerySolution; +import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; + +public class OxtractorHelper { + + private String endpoint = "http://live.dbpedia.org/sparql"; + private String defaultGraph = "http://dbpedia.org"; + + public OxtractorHelper(String endpoint) { + this(endpoint,null); + } + + public OxtractorHelper(String endpoint, String defaultGraph) { + this.endpoint = endpoint; + this.defaultGraph = defaultGraph; + } + + public Set<String> getCategories(String keyword) { + String sparqlQuery = "SELECT DISTINCT ?cat { ?cat <http://purl.org/dc/terms/subject> ?subject . ?subject <http://www.w3.org/2000/01/rdf-schema#label> ?label . FILTER( bif:contains(?label, \""+keyword+"\" ) ) } LIMIT 100"; + ResultSet rs = executeSelectQuery(sparqlQuery); + QuerySolution qs; + Set<String> categories = new TreeSet<String>(); + while(rs.hasNext()){ + qs = rs.next(); + categories.add(qs.get("cat").toString()); + } + return categories; + } + + public Set<String> getInstances(String category) { + String sparqlQuery = "SELECT ?instance { ?instance <http://purl.org/dc/terms/subject> <"+category+"> }"; + ResultSet rs = executeSelectQuery(sparqlQuery); + QuerySolution qs; + Set<String> instances = new TreeSet<String>(); + while(rs.hasNext()){ + qs = rs.next(); + instances.add(qs.get("instance").toString()); + } + return instances; + } + + private ResultSet executeSelectQuery(String query) { + System.out.println("Sending query: " + query); + QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint, query); + queryExecution.addDefaultGraph(defaultGraph); + return queryExecution.execSelect(); + } + + /** + * @param args + */ + public static void main(String[] args) { + OxtractorHelper oh = new OxtractorHelper("http://live.dbpedia.org/sparql","http://dbpedia.org"); + System.out.println(oh.getInstances("http://dbpedia.org/resource/Category:Cities_in_Saxony")); + System.out.println(oh.getCategories("room")); + } + +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-24 10:38:50
|
Revision: 3587 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3587&view=rev Author: lorenz_b Date: 2012-02-24 10:38:39 +0000 (Fri, 24 Feb 2012) Log Message: ----------- Updated TreeTagger. Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/nlp/TreeTagger.java Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/nlp/TreeTagger.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/nlp/TreeTagger.java 2012-02-24 10:36:50 UTC (rev 3586) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/nlp/TreeTagger.java 2012-02-24 10:38:39 UTC (rev 3587) @@ -1,6 +1,7 @@ package org.dllearner.algorithm.tbsl.nlp; import java.io.IOException; +import java.util.Collections; import java.util.List; import java.util.Arrays; @@ -14,35 +15,37 @@ TreeTaggerWrapper<String> tt; + private String tagging; + public TreeTagger() throws IOException { - System.setProperty("treetagger.home","/home/christina/Software/TreeTagger"); + System.setProperty("treetagger.home","/home/lorenz/Downloads/TreeTagger"); tt = new TreeTaggerWrapper<String>(); - tt.setModel("/home/christina/Software/TreeTagger/lib/english.par:iso8859-1"); + tt.setModel(this.getClass().getClassLoader().getResource("tbsl/models/treetagger/english.par").getPath()); } public String tag(String s) { - + tagging = ""; List<String> input = Arrays.asList(s.split(" ")); try { tt.setHandler(new TokenHandler<String>() { public void token(String token, String pos, String lemma) { - System.out.println(token+"/"+pos+"/"+lemma); + tagging += token+"/"+pos + " "; } }); - System.out.println("Tagged with TreeTagger:\n"); tt.process(input); - System.out.println(tt.getStatus()); } catch (IOException e) { e.printStackTrace(); } catch (TreeTaggerException e) { e.printStackTrace(); } - finally { - tt.destroy(); - } - return ""; + + return tagging.trim(); } + public void close(){ + tt.destroy(); + } + @Override public String getName() { return "Tree Tagger"; @@ -56,7 +59,6 @@ @Override public List<String> tagTopK(String sentence) { - // TODO Auto-generated method stub - return null; + return Collections.singletonList(tag(sentence)); } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-24 10:37:01
|
Revision: 3586 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3586&view=rev Author: lorenz_b Date: 2012-02-24 10:36:50 +0000 (Fri, 24 Feb 2012) Log Message: ----------- Small changes for filename. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-24 08:59:06 UTC (rev 3585) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-24 10:36:50 UTC (rev 3586) @@ -94,6 +94,8 @@ private OWLOntology dbpediaOntology; + private String fileName; + public JustificationBasedCoherentOntologyExtractor() { try { md5 = MessageDigest.getInstance("MD5"); @@ -115,6 +117,14 @@ this.ontology = ontology; this.incoherentOntology = getOntologyWithoutAnnotations(ontology); + IRI iri = ontology.getOWLOntologyManager().getOntologyDocumentIRI(ontology); + fileName = "dbpedia"; + if(iri != null){ + fileName = iri.toString().substring( iri.toString().lastIndexOf('/')+1, iri.toString().length() ); + } else { + + } + new File("log").mkdir(); File diffFile = new File("log/" + DIFF_ONTOLOGY_NAME); @@ -176,6 +186,12 @@ logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); int rootCnt = unsatClasses.size(); int derivedCnt = derivedUnsatClasses.size(); + + //if no roots are found we use all unsat classes + if(rootCnt == 0){ + unsatClasses = derivedUnsatClasses; + } + // Set<OWLClass> unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); int cnt = rootCnt + derivedCnt; int unsatPropCnt = unsatObjectProperties.size(); @@ -229,6 +245,11 @@ derivedCnt = derivedUnsatClasses.size(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); + //if no roots are found we use all unsat classes + if(rootCnt == 0){ + unsatClasses = derivedUnsatClasses; + } + logger.info("Remaining unsatisfiable classes: " + (rootCnt + derivedCnt) + "(" + rootCnt + " roots)."); if(unsatClasses.isEmpty()){ @@ -244,7 +265,7 @@ //save if(cnt - (rootCnt+derivedCnt) >= 1 || (unsatPropCnt - unsatObjectProperties.size()) >= 1){ cnt = rootCnt + derivedCnt; - save("log/dbpedia_" + cnt + "cls" + unsatPropCnt + "prop.owl"); + save("log/" + fileName + "_" + cnt + "cls" + unsatPropCnt + "prop.owl"); cnt = rootCnt + derivedCnt; unsatPropCnt = unsatObjectProperties.size(); if(computeParallel){ @@ -287,7 +308,7 @@ //save if((unsatPropCnt - unsatObjectProperties.size()) >= 1){ - save("log/dbpedia_" + cnt + "cls" + unsatPropCnt + "prop.owl"); + save("log/" + fileName + "_" + cnt + "cls" + unsatPropCnt + "prop.owl"); unsatPropCnt = unsatObjectProperties.size(); } @@ -338,7 +359,7 @@ entity2Explanations.putAll(getInitialExplanations(unsatObjectProperties)); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); - while(!unsatClasses.isEmpty()){ + while(!unsatClasses.isEmpty() && !unsatObjectProperties.isEmpty()){ //we remove the most appropriate axiom from the ontology removeAppropriateAxiom(); @@ -357,7 +378,7 @@ //save if(cnt - unsatClasses.size() >= 10){ cnt = unsatClasses.size(); - save("log/dbpedia_" + cnt + ".owl"); + save("log/" + fileName + "_" + cnt + "cls" + unsatObjectProperties.size() + "prop.owl"); } //recompute explanations if necessary This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-24 08:59:17
|
Revision: 3585 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3585&view=rev Author: lorenz_b Date: 2012-02-24 08:59:06 +0000 (Fri, 24 Feb 2012) Log Message: ----------- Updated SOLR dependency. Modified Paths: -------------- trunk/components-ext/pom.xml trunk/pom.xml Modified: trunk/components-ext/pom.xml =================================================================== --- trunk/components-ext/pom.xml 2012-02-23 15:16:14 UTC (rev 3584) +++ trunk/components-ext/pom.xml 2012-02-24 08:59:06 UTC (rev 3585) @@ -164,7 +164,7 @@ <dependency> <groupId>org.annolab.tt4j</groupId> <artifactId>org.annolab.tt4j</artifactId> - <version>1.0.14</version> + <version>1.0.16</version> </dependency> <dependency> <groupId>org.ini4j</groupId> Modified: trunk/pom.xml =================================================================== --- trunk/pom.xml 2012-02-23 15:16:14 UTC (rev 3584) +++ trunk/pom.xml 2012-02-24 08:59:06 UTC (rev 3585) @@ -127,7 +127,7 @@ <dependency> <groupId>org.apache.solr</groupId> <artifactId>solr-core</artifactId> - <version>3.3.0</version> + <version>3.5.0</version> <exclusions> <exclusion> <groupId>commons-logging</groupId> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <seb...@us...> - 2012-02-23 15:16:25
|
Revision: 3584 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3584&view=rev Author: sebastianwtr Date: 2012-02-23 15:16:14 +0000 (Thu, 23 Feb 2012) Log Message: ----------- [tbsl exploration] now generates templates with a set of hypothesis Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Hypothesis.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Template.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/TemplateBuilder.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/testClass_new.java Added Paths: ----------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/utils_new.java Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Hypothesis.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Hypothesis.java 2012-02-22 21:03:09 UTC (rev 3583) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Hypothesis.java 2012-02-23 15:16:14 UTC (rev 3584) @@ -1,9 +1,17 @@ package org.dllearner.algorithm.tbsl.exploration.Sparql; +import edu.stanford.nlp.io.EncodingPrintWriter.out; + public class Hypothesis { private String variable; private String uri; private float rank; + +/** + * RESOURCE,PROPERTY,UNSPEC + */ +private String type; + public String getUri() { return uri; } @@ -23,9 +31,27 @@ this.rank = rank; } -public Hypothesis(String variable, String uri, float rank){ +public Hypothesis(String variable, String uri, String type, float rank){ setRank(rank); setVariable(variable); setUri(uri); + setType(type); } + +public String getType() { + return type; } +public void setType(String type) { + this.type = type; +} + +public void printAll(){ + System.out.println("%%%%%%%%%%%"); + System.out.println("Variable: "+variable); + System.out.println("Uri: " + uri); + System.out.println("Type: " + type); + System.out.println("Rank: "+rank); + System.out.println("%%%%%%%%%%%"); +} + +} Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Template.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Template.java 2012-02-22 21:03:09 UTC (rev 3583) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Template.java 2012-02-23 15:16:14 UTC (rev 3584) @@ -5,7 +5,7 @@ public class Template { private ArrayList<ArrayList<String>> condition = new ArrayList<ArrayList<String>>(); - private ArrayList<ArrayList<Hypothesis>> hypotesen = new ArrayList<ArrayList<Hypothesis>>(); + private ArrayList<ArrayList<Hypothesis>> hypothesen = new ArrayList<ArrayList<Hypothesis>>(); private String selectTerm; private String having; private String filter; @@ -59,22 +59,30 @@ public void setCondition(ArrayList<ArrayList<String>> condition) { this.condition = condition; } - public ArrayList<ArrayList<Hypothesis>> getHypotesen() { - return hypotesen; + public ArrayList<ArrayList<Hypothesis>> getHypothesen() { + return hypothesen; } - public void setHypotesen(ArrayList<ArrayList<Hypothesis>> hypotesen) { - this.hypotesen = hypotesen; + public void setHypothesen(ArrayList<ArrayList<Hypothesis>> hypotesen) { + this.hypothesen = hypotesen; } public void addHypothese(ArrayList<Hypothesis> ht){ - this.hypotesen.add(ht); + this.hypothesen.add(ht); } public void printAll(){ System.out.println("###### Template ######"); System.out.println("condition: "+condition); - System.out.println("hypotesen: "+hypotesen); + //System.out.println("hypotesen: "+hypothesen); + int anzahl = 1; + for(ArrayList<Hypothesis> x : hypothesen){ + System.out.println("\nSet of Hypothesen"+anzahl+":"); + for ( Hypothesis z : x){ + z.printAll(); + } + } + System.out.print("\n"); System.out.println("selectTerm: "+selectTerm); System.out.println("having: "+having); System.out.println("filter: "+filter); Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/TemplateBuilder.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/TemplateBuilder.java 2012-02-22 21:03:09 UTC (rev 3583) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/TemplateBuilder.java 2012-02-23 15:16:14 UTC (rev 3584) @@ -10,18 +10,20 @@ import org.dllearner.algorithm.tbsl.sparql.SPARQL_Filter; import org.dllearner.algorithm.tbsl.sparql.SPARQL_Having; import org.dllearner.algorithm.tbsl.sparql.SPARQL_Term; +import org.dllearner.algorithm.tbsl.sparql.Slot; import org.dllearner.algorithm.tbsl.templator.BasicTemplator; public class TemplateBuilder { - static BasicTemplator btemplator; +static BasicTemplator btemplator; +private static mySQLDictionary myindex; public TemplateBuilder() throws MalformedURLException, ClassNotFoundException, SQLException{ TemplateBuilder.btemplator = new BasicTemplator(); //btemplator.UNTAGGED_INPUT = false; - //Object_new.myindex = new mySQLDictionary(); + TemplateBuilder.myindex = new mySQLDictionary(); } @@ -84,7 +86,6 @@ OrderBy="ORDER BY "; try{ for(SPARQL_Term tmp : bqt.getOrderBy()) { - System.out.println("Yeah"); OrderBy=OrderBy+tmp+" "; } if((bqt.getOrderBy()).size()==0)OrderBy=""; @@ -105,13 +106,108 @@ addTemplate=false; } - Template template = new Template(condition, having, filter, selectTerm,OrderBy, limit); - - //TODO: Add Hypothesis - //TODO: Take Template like it is and change Condition - - - resultArrayList.add(template); + if(addTemplate!=false){ + + //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + Template template = new Template(condition, having, filter, selectTerm,OrderBy, limit); + //TODO: Iterate over slots + ArrayList<Hypothesis> list_of_hypothesis = new ArrayList<Hypothesis>(); + for(Slot slot : bqt.getSlots()){ + + if(slot.toString().contains("USPEC")){ + String tmp= slot.toString().replace(" UNSPEC {", ""); + tmp=tmp.replace("}",""); + String[] tmp_array = tmp.split(":"); + Hypothesis tmp_hypothesis = new Hypothesis("?"+tmp_array[0], tmp_array[1], "USPEC", 0); + list_of_hypothesis.add(tmp_hypothesis); + } + if(slot.toString().contains("PROPERTY")){ + String tmp= slot.toString().replace(" PROPERTY {", ""); + tmp=tmp.replace("}",""); + String[] tmp_array = tmp.split(":"); + Hypothesis tmp_hypothesis = new Hypothesis("?"+tmp_array[0], tmp_array[1], "PROPERTY", 0); + list_of_hypothesis.add(tmp_hypothesis); + + } + if(slot.toString().contains("RESOURCE")){ + String tmp= slot.toString().replace(" RESOURCE {", ""); + tmp=tmp.replace("}",""); + String[] tmp_array = tmp.split(":"); + Hypothesis tmp_hypothesis = new Hypothesis("?"+tmp_array[0], tmp_array[1], "RESOURCE", 0); + list_of_hypothesis.add(tmp_hypothesis); + } + } + ArrayList<ArrayList<Hypothesis>> final_list_set_hypothesis = new ArrayList<ArrayList<Hypothesis>>(); + + //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + for(Hypothesis x : list_of_hypothesis){ + if(x.getType().contains("RESOURCE")){ + ArrayList<String> result= new ArrayList<String>(); + try { + result = utils_new.searchIndex(x.getUri(), 3, myindex); + } catch (SQLException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + for(String s : result){ + ArrayList<Hypothesis> new_list = new ArrayList<Hypothesis>(); + new_list=list_of_hypothesis; + for(Hypothesis z : new_list){ + if(z.getUri().equals(x.getUri())){ + z.setUri(s); + z.setRank(1); + } + } + final_list_set_hypothesis.add(new_list); + } + } + } + + + //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + for(ArrayList<Hypothesis> x : final_list_set_hypothesis){ + for(Hypothesis h : x){ + if(h.getType().contains("PROPERTY")){ + ArrayList<String> result= new ArrayList<String>(); + try { + result = utils_new.searchIndex(h.getUri(), 1, myindex); + if(!result.isEmpty()){ + h.setUri(result.get(0)); + h.setRank(1); + } + + else{ + h.setRank(0); + } + } catch (SQLException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + } + } + + template.setHypothesen(final_list_set_hypothesis); + //TODO: Take Template like it is and change Condition + Template template_reverse_conditions = new Template(template.getCondition(), template.getHaving(), template.getFilter(), template.getSelectTerm(), template.getOrderBy(), template.getLimit()); + + //= template; + ArrayList<ArrayList<String>> condition_template_reverse_conditions = template_reverse_conditions.getCondition(); + ArrayList<ArrayList<String>> condition_reverse_new= new ArrayList<ArrayList<String>>(); + for (ArrayList<String> x : condition_template_reverse_conditions){ + ArrayList<String> new_list = new ArrayList<String>(); + new_list.add(x.get(2)); + new_list.add(x.get(1)); + new_list.add(x.get(0)); + condition_reverse_new.add(new_list); + } + + template_reverse_conditions.setCondition(condition_reverse_new); + + resultArrayList.add(template); + resultArrayList.add(template_reverse_conditions); + } } for(Template temp : resultArrayList){ temp.printAll(); Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/testClass_new.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/testClass_new.java 2012-02-22 21:03:09 UTC (rev 3583) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/testClass_new.java 2012-02-23 15:16:14 UTC (rev 3584) @@ -16,6 +16,15 @@ TemplateBuilder testobject = new TemplateBuilder(); String question = "Which books are written by Daniele Steel?"; testobject.createTemplates(question); + + question = "Give me all german femal chancelors"; + testobject.createTemplates(question); + + question = "Who owns Aldi?"; + testobject.createTemplates(question); + + question = "In which programming language is \"GIMP\" written?"; + testobject.createTemplates(question); } } Added: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/utils_new.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/utils_new.java (rev 0) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/utils_new.java 2012-02-23 15:16:14 UTC (rev 3584) @@ -0,0 +1,54 @@ +package org.dllearner.algorithm.tbsl.exploration.Sparql; + +import java.sql.SQLException; +import java.util.ArrayList; + +public class utils_new { + + /** + * + * @param string + * @param fall 1=Property, 0=Resource, 2=OntologyClass/Yago, 2=resource+yago+ontlogy + * @return + * @throws SQLException + */ + public static ArrayList<String> searchIndex(String string, int fall, mySQLDictionary myindex) throws SQLException{ + + String originalString=string; + string=string.replace("_", " "); + string=string.replace("-", " "); + string=string.replace(".", " "); + String result=null; + String tmp1=null; + String tmp2 = null; + ArrayList<String> result_List = new ArrayList<String>(); + + if(fall==0 || fall==3){ + + result=myindex.getResourceURI(string.toLowerCase()); + result_List.add(result); + + } + if(fall==2||fall==3){ + + tmp1=myindex.getontologyClassURI(string.toLowerCase()); + tmp2=myindex.getYagoURI(string.toLowerCase()); + if(tmp1!=null) result_List.add(tmp1); + if(tmp2!=null) result_List.add(tmp2); + } + + + if(fall==1){ + tmp1=myindex.getPropertyURI(string.toLowerCase()); + tmp2=myindex.getontologyURI(string.toLowerCase()); + if(tmp1!=null) result_List.add(tmp1); + if(tmp2!=null) result_List.add(tmp2); + + } + + return result_List; + } + + + +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-22 21:03:15
|
Revision: 3583 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3583&view=rev Author: lorenz_b Date: 2012-02-22 21:03:09 +0000 (Wed, 22 Feb 2012) Log Message: ----------- Fixed problem when no root class was detected. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-22 17:56:55 UTC (rev 3582) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-22 21:03:09 UTC (rev 3583) @@ -172,6 +172,7 @@ StructureBasedRootClassFinder rootFinder = new StructureBasedRootClassFinder(reasoner); Set<OWLClass> unsatClasses = rootFinder.getRootUnsatisfiableClasses(); Set<OWLClass> derivedUnsatClasses = rootFinder.getDerivedUnsatisfiableClasses(); + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); int rootCnt = unsatClasses.size(); int derivedCnt = derivedUnsatClasses.size(); @@ -180,6 +181,10 @@ int unsatPropCnt = unsatObjectProperties.size(); logger.info("Detected " + cnt + " unsatisfiable classes, " + rootCnt + " of them as root."); + if(unsatClasses.isEmpty()){ + unsatClasses = derivedUnsatClasses; + } + //if the ontology is not incoherent we return it here if(unsatClasses.isEmpty()){ return incoherentOntology; @@ -226,6 +231,10 @@ logger.info("Remaining unsatisfiable classes: " + (rootCnt + derivedCnt) + "(" + rootCnt + " roots)."); + if(unsatClasses.isEmpty()){ + unsatClasses = derivedUnsatClasses; + } + //recompute unsatisfiable object properties if(computeParallel){ unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |