From: <lor...@us...> - 2011-12-19 13:18:39
|
Revision: 3507 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3507&view=rev Author: lorenz_b Date: 2011-12-19 13:18:28 +0000 (Mon, 19 Dec 2011) Log Message: ----------- Improved script. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2011-12-19 11:22:43 UTC (rev 3506) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2011-12-19 13:18:28 UTC (rev 3507) @@ -31,13 +31,15 @@ public class JustificationBasedCoherentOntologyExtractor implements CoherentOntologyExtractor{ - private static final int NUMBER_OF_JUSTIFICATIONS = 1; + private static final int NUMBER_OF_JUSTIFICATIONS = 2; // private PelletReasoner reasoner; private IncrementalClassifier reasoner; private OWLOntology incoherentOntology; private OWLOntology ontology; + private Map<OWLClass, OWLOntology> cls2ModuleMap; + static {PelletExplanation.setup();} @Override @@ -60,6 +62,8 @@ return incoherentOntology; } + cls2ModuleMap = extractModules(unsatClasses); + while(!unsatClasses.isEmpty()){ //for each unsatisfiable class we compute n justifications here and count how often each axiom occurs globally Map<OWLAxiom, Integer> axiom2CountMap = new HashMap<OWLAxiom, Integer>(); @@ -120,17 +124,26 @@ } private Set<Set<OWLAxiom>> computeExplanations(OWLClass unsatClass){ - OWLOntology module = OntologyUtils.getOntologyFromAxioms( - ModularityUtils.extractModule(incoherentOntology, Collections.singleton((OWLEntity)unsatClass), ModuleType.TOP_OF_BOT)); - PelletExplanation expGen = new PelletExplanation(module); + PelletExplanation expGen = new PelletExplanation(cls2ModuleMap.get(unsatClass)); return expGen.getUnsatisfiableExplanations(unsatClass, NUMBER_OF_JUSTIFICATIONS); } + private Map<OWLClass, OWLOntology> extractModules(Set<OWLClass> classes){ + Map<OWLClass, OWLOntology> cls2ModuleMap = new HashMap<OWLClass, OWLOntology>(); + for(OWLClass cls : classes){ + OWLOntology module = OntologyUtils.getOntologyFromAxioms( + ModularityUtils.extractModule(incoherentOntology, Collections.singleton((OWLEntity)cls), ModuleType.TOP_OF_BOT)); + cls2ModuleMap.put(cls, module); + } + return cls2ModuleMap; + } + public static void main(String[] args) throws Exception{ Logger.getLogger(RBox.class.getName()).setLevel(Level.OFF); OWLOntologyManager man = OWLManager.createOWLOntologyManager(); - OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("../components-core/cohaerent.owl")); -// OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("/home/lorenz/arbeit/dbpedia_0.75_no_datapropaxioms.owl")); +// OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("../components-core/cohaerent.owl")); +// System.out.println(schema.getLogicalAxiomCount()); + OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("/home/lorenz/arbeit/dbpedia_0.75_no_datapropaxioms.owl")); JustificationBasedCoherentOntologyExtractor extractor = new JustificationBasedCoherentOntologyExtractor(); OWLOntology coherentOntology = extractor.getCoherentOntology(schema);System.out.println(coherentOntology.getLogicalAxiomCount()); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2011-12-21 10:15:23
|
Revision: 3512 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3512&view=rev Author: lorenz_b Date: 2011-12-21 10:15:12 +0000 (Wed, 21 Dec 2011) Log Message: ----------- Extended script to firstly handle the root unsatisfiable classes. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2011-12-20 11:43:21 UTC (rev 3511) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2011-12-21 10:15:12 UTC (rev 3512) @@ -1,6 +1,9 @@ package org.dllearner.utilities; +import java.io.BufferedOutputStream; import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; @@ -11,8 +14,10 @@ import java.util.logging.Level; import java.util.logging.Logger; +import org.aksw.mole.ore.rootderived.StructureBasedRootClassFinder; import org.mindswap.pellet.RBox; import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.io.RDFXMLOntologyFormat; import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLEntity; @@ -20,6 +25,7 @@ import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.model.OWLOntologyStorageException; import org.semanticweb.owlapi.model.RemoveAxiom; import uk.ac.manchester.cs.owlapi.modularity.ModuleType; @@ -31,14 +37,14 @@ public class JustificationBasedCoherentOntologyExtractor implements CoherentOntologyExtractor{ - private static final int NUMBER_OF_JUSTIFICATIONS = 2; + private static final int NUMBER_OF_JUSTIFICATIONS = 5; // private PelletReasoner reasoner; private IncrementalClassifier reasoner; private OWLOntology incoherentOntology; private OWLOntology ontology; - private Map<OWLClass, OWLOntology> cls2ModuleMap; + private Map<OWLClass, OWLOntology> cls2ModuleMap = new HashMap<OWLClass, OWLOntology>(); static {PelletExplanation.setup();} @@ -54,49 +60,134 @@ OWLOntologyManager man = incoherentOntology.getOWLOntologyManager(); // man.addOntologyChangeListener(reasoner); + StructureBasedRootClassFinder rootFinder = new StructureBasedRootClassFinder(reasoner); + rootFinder.computeRootDerivedClasses(); + Set<OWLClass> unsatClasses = rootFinder.getRootUnsatisfiableClasses();//reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); + int cnt = unsatClasses.size(); - Set<OWLClass> unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); - //if the ontology is not incoherent we return it here if(unsatClasses.isEmpty()){ return incoherentOntology; } - + //compute the logical modules for each unsatisfiable class cls2ModuleMap = extractModules(unsatClasses); + + //compute initial explanations for each unsatisfiable class + Map<OWLClass, Set<Set<OWLAxiom>>> cls2Explanations = getInitialExplanationsForUnsatClasses(unsatClasses); while(!unsatClasses.isEmpty()){ - //for each unsatisfiable class we compute n justifications here and count how often each axiom occurs globally - Map<OWLAxiom, Integer> axiom2CountMap = new HashMap<OWLAxiom, Integer>(); - for(OWLClass unsatClass : unsatClasses){ - Set<Set<OWLAxiom>> explanations = computeExplanations(unsatClass); - for(Set<OWLAxiom> explanation : explanations){ - for(OWLAxiom ax : explanation){ - Integer cnt = axiom2CountMap.get(ax); - if(cnt == null){ - cnt = 0; - } - cnt = cnt + 1; - axiom2CountMap.put(ax, cnt); - } - } - } + //get frequency for each axiom + Map<OWLAxiom, Integer> axiom2CountMap = getAxiomFrequency(cls2Explanations); + //get a sorted list of entries with the highest axiom count first List<Entry<OWLAxiom, Integer>> sortedEntries = MapUtils.sortByValues(axiom2CountMap); for(Entry<OWLAxiom, Integer> entry : sortedEntries){ System.out.println(entry.getKey() + ":" + entry.getValue()); } - //we remove the most occuring axiom + //we remove the most frequent axiom from the ontology OWLAxiom toRemove = sortedEntries.get(0).getKey(); + System.out.println("Removing axiom " + toRemove); man.removeAxiom(incoherentOntology, toRemove); man.applyChange(new RemoveAxiom(incoherentOntology, toRemove)); + removeFromExplanations(cls2Explanations, toRemove); + removeFromModules(toRemove); + + //recompute the unsatisfiable classes reasoner.classify(); - unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); + rootFinder = new StructureBasedRootClassFinder(reasoner); + rootFinder.computeRootDerivedClasses(); + unsatClasses = rootFinder.getRootUnsatisfiableClasses();//reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); + System.out.println("Remaining unsatisfiable classes: " + unsatClasses.size()); + + //save + if(cnt - unsatClasses.size() >= 10){ + OWLOntology toSave = getOntologyWithAnnotations(incoherentOntology); + try { + toSave.getOWLOntologyManager().saveOntology(incoherentOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/dbpedia_" + unsatClasses.size() + ".owl"))); + } catch (OWLOntologyStorageException e) { + e.printStackTrace(); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } + cnt = unsatClasses.size(); + } + + //recompute explanations if necessary + refillExplanations(unsatClasses, cls2Explanations); + + System.gc(); } + try { + incoherentOntology.getOWLOntologyManager().saveOntology(incoherentOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/dbpedia_coherent.owl"))); + } catch (OWLOntologyStorageException e) { + e.printStackTrace(); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } System.out.println(incoherentOntology.getLogicalAxiomCount()); return getOntologyWithAnnotations(incoherentOntology); } + private void removeFromModules(OWLAxiom axiom){ + OWLOntology module; + for(Entry<OWLClass, OWLOntology> entry : cls2ModuleMap.entrySet()){ + module = entry.getValue(); + module.getOWLOntologyManager().removeAxiom(module, axiom); + } + } + + private void removeFromExplanations(Map<OWLClass, Set<Set<OWLAxiom>>> cls2Explanations, OWLAxiom axiom){ + for(Entry<OWLClass, Set<Set<OWLAxiom>>> entry : cls2Explanations.entrySet()){ + for (Iterator<Set<OWLAxiom>> iterator = entry.getValue().iterator(); iterator.hasNext();) { + Set<OWLAxiom> explanation = iterator.next(); + if(explanation.contains(axiom)){ + iterator.remove(); + } + } + } + } + + private void refillExplanations(Set<OWLClass> unsatClasses, Map<OWLClass, Set<Set<OWLAxiom>>> cls2Explanations){ + for(OWLClass unsatClass : unsatClasses){ + Set<Set<OWLAxiom>> precomputedExplanations = cls2Explanations.get(unsatClass); + if(precomputedExplanations == null || precomputedExplanations.size() < NUMBER_OF_JUSTIFICATIONS){ + Set<Set<OWLAxiom>> newExplanations = computeExplanations(unsatClass, NUMBER_OF_JUSTIFICATIONS); + cls2Explanations.put(unsatClass, newExplanations); + } + } + } + + private Map<OWLAxiom, Integer> getAxiomFrequency(Map<OWLClass, Set<Set<OWLAxiom>>> cls2Explanations){ + Map<OWLAxiom, Integer> axiom2CountMap = new HashMap<OWLAxiom, Integer>(); + + for(Entry<OWLClass, Set<Set<OWLAxiom>>> entry : cls2Explanations.entrySet()){ + for(Set<OWLAxiom> explanation : entry.getValue()){ + for(OWLAxiom ax : explanation){ + Integer cnt = axiom2CountMap.get(ax); + if(cnt == null){ + cnt = 0; + } + cnt = cnt + 1; + axiom2CountMap.put(ax, cnt); + } + } + } + + return axiom2CountMap; + } + + private Map<OWLClass, Set<Set<OWLAxiom>>> getInitialExplanationsForUnsatClasses(Set<OWLClass> unsatClasses){ + Map<OWLClass, Set<Set<OWLAxiom>>> cls2Explanations = new HashMap<OWLClass, Set<Set<OWLAxiom>>>(); + + for(OWLClass unsatClass : unsatClasses){ + Set<Set<OWLAxiom>> explanations = computeExplanations(unsatClass); + cls2Explanations.put(unsatClass, explanations); + } + + return cls2Explanations; + } + private OWLOntology getOntologyWithoutAnnotations(OWLOntology ontology){ try { OWLOntologyManager man = ontology.getOWLOntologyManager(); @@ -124,15 +215,29 @@ } private Set<Set<OWLAxiom>> computeExplanations(OWLClass unsatClass){ - PelletExplanation expGen = new PelletExplanation(cls2ModuleMap.get(unsatClass)); + PelletExplanation expGen = new PelletExplanation(getModule(unsatClass)); return expGen.getUnsatisfiableExplanations(unsatClass, NUMBER_OF_JUSTIFICATIONS); } + private Set<Set<OWLAxiom>> computeExplanations(OWLClass unsatClass, int limit){ + PelletExplanation expGen = new PelletExplanation(getModule(unsatClass)); + return expGen.getUnsatisfiableExplanations(unsatClass, NUMBER_OF_JUSTIFICATIONS); + } + + private OWLOntology getModule(OWLClass cls){ + OWLOntology module = cls2ModuleMap.get(cls); + if(module == null){ + module = OntologyUtils.getOntologyFromAxioms( + ModularityUtils.extractModule(incoherentOntology, Collections.singleton((OWLEntity)cls), ModuleType.TOP_OF_BOT)); + cls2ModuleMap.put(cls, module); + } + return module; + } + private Map<OWLClass, OWLOntology> extractModules(Set<OWLClass> classes){ Map<OWLClass, OWLOntology> cls2ModuleMap = new HashMap<OWLClass, OWLOntology>(); for(OWLClass cls : classes){ - OWLOntology module = OntologyUtils.getOntologyFromAxioms( - ModularityUtils.extractModule(incoherentOntology, Collections.singleton((OWLEntity)cls), ModuleType.TOP_OF_BOT)); + OWLOntology module = getModule(cls); cls2ModuleMap.put(cls, module); } return cls2ModuleMap; @@ -144,6 +249,9 @@ // OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("../components-core/cohaerent.owl")); // System.out.println(schema.getLogicalAxiomCount()); OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("/home/lorenz/arbeit/dbpedia_0.75_no_datapropaxioms.owl")); +// System.out.println(schema.getLogicalAxiomCount()); +// OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("log/dbpedia_coherent.owl")); + System.out.println(schema.getLogicalAxiomCount()); JustificationBasedCoherentOntologyExtractor extractor = new JustificationBasedCoherentOntologyExtractor(); OWLOntology coherentOntology = extractor.getCoherentOntology(schema);System.out.println(coherentOntology.getLogicalAxiomCount()); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-01 14:31:35
|
Revision: 3562 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3562&view=rev Author: lorenz_b Date: 2012-02-01 14:31:26 +0000 (Wed, 01 Feb 2012) Log Message: ----------- Started integration of debugging unsatisfiable object properties. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-01 11:36:50 UTC (rev 3561) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-01 14:31:26 UTC (rev 3562) @@ -11,6 +11,8 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.SortedSet; +import java.util.TreeSet; import java.util.Map.Entry; import java.util.Set; import java.util.logging.Level; @@ -29,10 +31,12 @@ import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLEntity; import org.semanticweb.owlapi.model.OWLLogicalAxiom; +import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyManager; import org.semanticweb.owlapi.model.OWLOntologyStorageException; +import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom; import org.semanticweb.owlapi.model.RemoveAxiom; import org.semanticweb.owlapi.reasoner.IllegalConfigurationException; import org.semanticweb.owlapi.reasoner.OWLReasoner; @@ -47,6 +51,7 @@ import com.clarkparsia.owlapi.explanation.HSTExplanationGenerator; import com.clarkparsia.owlapi.explanation.PelletExplanation; import com.clarkparsia.owlapiv3.OntologyUtils; +import com.clarkparsia.pellet.owlapiv3.PelletReasoner; public class JustificationBasedCoherentOntologyExtractor implements CoherentOntologyExtractor{ @@ -62,10 +67,14 @@ private OWLDataFactory factory; private Map<OWLClass, OWLOntology> cls2ModuleMap = new HashMap<OWLClass, OWLOntology>(); + private Map<OWLEntity, OWLOntology> entity2ModuleMap = new HashMap<OWLEntity, OWLOntology>(); private OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); MessageDigest md5; + private Set<OWLTransitiveObjectPropertyAxiom> removedTransitiveAxioms; + private Set<OWLObjectProperty> unsatObjectProperties; + public JustificationBasedCoherentOntologyExtractor() { try { md5 = MessageDigest.getInstance("MD5"); @@ -83,25 +92,34 @@ @Override public OWLOntology getCoherentOntology(OWLOntology ontology, boolean preferRoots){ - if(preferRoots){ - return computeCoherentOntologyRootBased(ontology); - } else { - return computeCoherentOntology(ontology); - } - } - - private OWLOntology computeCoherentOntologyRootBased(OWLOntology ontology) { this.ontology = ontology; this.incoherentOntology = getOntologyWithoutAnnotations(ontology); //only for debugging - ontology.getOWLOntologyManager().removeAxioms(ontology, ontology.getAxioms(AxiomType.TRANSITIVE_OBJECT_PROPERTY)); + removedTransitiveAxioms = ontology.getAxioms(AxiomType.TRANSITIVE_OBJECT_PROPERTY); + incoherentOntology.getOWLOntologyManager().removeAxioms(ontology, removedTransitiveAxioms); + manager = incoherentOntology.getOWLOntologyManager(); + factory = manager.getOWLDataFactory(); + long startTime = System.currentTimeMillis(); reasoner = new IncrementalClassifier(incoherentOntology); reasoner.classify(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); + //compute the unsatisfiable object properties + unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); + + //start main process, either preferring root classes or not + if(preferRoots){ + return computeCoherentOntologyRootBased(incoherentOntology); + } else { + return computeCoherentOntology(incoherentOntology); + } + } + + private OWLOntology computeCoherentOntologyRootBased(OWLOntology ontology) { + // startTime = System.currentTimeMillis(); // hermitReasoner = new Reasoner(incoherentOntology); // hermitReasoner.classifyClasses(); @@ -113,7 +131,7 @@ //compute the unsatisfiable classes logger.info("Computing root/derived unsatisfiable classes..."); - startTime = System.currentTimeMillis(); + long startTime = System.currentTimeMillis(); StructureBasedRootClassFinder rootFinder = new StructureBasedRootClassFinder(reasoner); Set<OWLClass> unsatClasses = rootFinder.getRootUnsatisfiableClasses(); Set<OWLClass> derivedUnsatClasses = rootFinder.getDerivedUnsatisfiableClasses(); @@ -179,14 +197,7 @@ //save if(cnt - (rootCnt+derivedCnt) >= 10){ cnt = rootCnt + derivedCnt; - OWLOntology toSave = getOntologyWithAnnotations(incoherentOntology); - try { - toSave.getOWLOntologyManager().saveOntology(incoherentOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/dbpedia_" + cnt + ".owl"))); - } catch (OWLOntologyStorageException e) { - e.printStackTrace(); - } catch (FileNotFoundException e) { - e.printStackTrace(); - } + save("log/dbpedia_" + cnt + ".owl"); cnt = rootCnt + derivedCnt; } @@ -211,23 +222,9 @@ } private OWLOntology computeCoherentOntology(OWLOntology ontology) { - this.ontology = ontology; - this.incoherentOntology = getOntologyWithoutAnnotations(ontology); - - //only for debugging - ontology.getOWLOntologyManager().removeAxioms(ontology, ontology.getAxioms(AxiomType.TRANSITIVE_OBJECT_PROPERTY)); - - long startTime = System.currentTimeMillis(); - reasoner = new IncrementalClassifier(incoherentOntology); - reasoner.classify(); - logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); - - OWLOntologyManager man = incoherentOntology.getOWLOntologyManager(); - factory = man.getOWLDataFactory(); - //compute the unsatisfiable classes logger.info("Computing unsatisfiable classes..."); - startTime = System.currentTimeMillis(); + long startTime = System.currentTimeMillis(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); Set<OWLClass> unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); int cnt = unsatClasses.size(); @@ -247,6 +244,7 @@ logger.info("Computing initial explanations..."); startTime = System.currentTimeMillis(); Map<OWLClass, Set<Set<OWLAxiom>>> cls2Explanations = getInitialExplanationsForUnsatClasses(unsatClasses); + Map<OWLObjectProperty, Set<Set<OWLAxiom>>> prop2Explanations = getInitialExplanationsForUnsatObjectProperties(unsatObjectProperties); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); while(!unsatClasses.isEmpty()){ @@ -261,8 +259,8 @@ //we remove the most frequent axiom from the ontology OWLAxiom toRemove = sortedEntries.get(0).getKey(); logger.info("Removing axiom " + toRemove + "."); - man.removeAxiom(incoherentOntology, toRemove); - man.applyChange(new RemoveAxiom(incoherentOntology, toRemove)); + manager.removeAxiom(incoherentOntology, toRemove); + manager.applyChange(new RemoveAxiom(incoherentOntology, toRemove)); removeFromExplanations(cls2Explanations, toRemove); removeFromModules(toRemove); @@ -277,14 +275,7 @@ //save if(cnt - unsatClasses.size() >= 10){ cnt = unsatClasses.size(); - OWLOntology toSave = getOntologyWithAnnotations(incoherentOntology); - try { - toSave.getOWLOntologyManager().saveOntology(incoherentOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/dbpedia_" + cnt + ".owl"))); - } catch (OWLOntologyStorageException e) { - e.printStackTrace(); - } catch (FileNotFoundException e) { - e.printStackTrace(); - } + save("log/dbpedia_" + cnt + ".owl"); } //recompute explanations if necessary @@ -307,6 +298,30 @@ return getOntologyWithAnnotations(incoherentOntology); } + private void save(String fileName){ + OWLOntology toSave = getOntologyWithAnnotations(incoherentOntology); + try { + toSave.getOWLOntologyManager().saveOntology(incoherentOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(fileName))); + } catch (OWLOntologyStorageException e) { + e.printStackTrace(); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } + } + + private Set<OWLObjectProperty> getUnsatisfiableObjectProperties(IncrementalClassifier reasoner){ + SortedSet<OWLObjectProperty> properties = new TreeSet<OWLObjectProperty>(); + OWLDataFactory f = OWLManager.createOWLOntologyManager().getOWLDataFactory(); + for(OWLObjectProperty p : reasoner.getRootOntology().getObjectPropertiesInSignature()){ +// boolean satisfiable = reasoner.isSatisfiable(f.getOWLObjectExactCardinality(1, p)); + boolean satisfiable = reasoner.isSatisfiable(f.getOWLObjectSomeValuesFrom(p, factory.getOWLThing())); + if(!satisfiable){ + properties.add(p); + } + } + return properties; + } + private void removeFromModules(OWLAxiom axiom){ OWLOntology module; for(Entry<OWLClass, OWLOntology> entry : cls2ModuleMap.entrySet()){ @@ -366,6 +381,17 @@ return cls2Explanations; } + private Map<OWLObjectProperty, Set<Set<OWLAxiom>>> getInitialExplanationsForUnsatObjectProperties(Set<OWLObjectProperty> unsatObjProperties){ + Map<OWLObjectProperty, Set<Set<OWLAxiom>>> prop2Explanations = new HashMap<OWLObjectProperty, Set<Set<OWLAxiom>>>(); + + for(OWLObjectProperty unsatClass : unsatObjProperties){ + Set<Set<OWLAxiom>> explanations = computeExplanations(unsatClass); + prop2Explanations.put(unsatClass, explanations); + } + + return prop2Explanations; + } + private OWLOntology getOntologyWithoutAnnotations(OWLOntology ontology){ try { OWLOntologyManager man = ontology.getOWLOntologyManager(); @@ -397,6 +423,11 @@ return expGen.getUnsatisfiableExplanations(unsatClass, numberOfJustifications); } + private Set<Set<OWLAxiom>> computeExplanations(OWLObjectProperty unsatProp){ + PelletExplanation expGen = new PelletExplanation(getModule(unsatProp)); + return expGen.getUnsatisfiableExplanations(factory.getOWLObjectExactCardinality(1, unsatProp), numberOfJustifications); + } + private Set<Set<OWLAxiom>> computeExplanations(OWLClass unsatClass, int limit){ PelletExplanation expGen = new PelletExplanation(getModule(unsatClass)); return expGen.getUnsatisfiableExplanations(unsatClass, numberOfJustifications); @@ -415,7 +446,7 @@ // return expGen.getUnsatisfiableExplanations(unsatClass, NUMBER_OF_JUSTIFICATIONS); // } - private OWLOntology getModule(OWLClass cls){System.out.println(cls); + private OWLOntology getModule(OWLClass cls){ OWLOntology module = cls2ModuleMap.get(cls); new File("log").mkdir(); if(module == null){ @@ -443,6 +474,35 @@ return module; } + private OWLOntology getModule(OWLEntity entity){ + OWLOntology module = entity2ModuleMap.get(entity); + new File("log").mkdir(); + if(module == null){ + md5.reset(); + md5.update((ontology.getOWLOntologyManager().getOntologyDocumentIRI(ontology).toString() + entity.toStringID()).getBytes()); + String hash = MD5.asHex(md5.digest()); + String filename = "log/" + hash + ".owl"; + File file = new File(filename); + if(file.exists()){ + module = loadModule(file); + } else { + module = OntologyUtils.getOntologyFromAxioms( + ModularityUtils.extractModule(incoherentOntology, Collections.singleton(entity), ModuleType.TOP_OF_BOT)); + try { + manager.saveOntology(module, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(filename))); + } catch (OWLOntologyStorageException e) { + e.printStackTrace(); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } + } + + entity2ModuleMap.put(entity, module); + } + return module; + } + + private OWLOntology loadModule(File file){ OWLOntology module = null; try { @@ -453,13 +513,31 @@ return module; } - private Map<OWLClass, OWLOntology> extractModules(Set<OWLClass> classes){ - Map<OWLClass, OWLOntology> cls2ModuleMap = new HashMap<OWLClass, OWLOntology>(); - for(OWLClass cls : classes){ - OWLOntology module = getModule(cls); - cls2ModuleMap.put(cls, module); +// private Map<OWLClass, OWLOntology> extractModules(Set<OWLClass> classes){ +// Map<OWLClass, OWLOntology> cls2ModuleMap = new HashMap<OWLClass, OWLOntology>(); +// for(OWLClass cls : classes){ +// OWLOntology module = getModule(cls); +// cls2ModuleMap.put(cls, module); +// } +// return cls2ModuleMap; +// } +// +// private Map<OWLObjectProperty, OWLOntology> extractModules(Set<OWLObjectProperty> objectProperties){ +// Map<OWLObjectProperty, OWLOntology> prop2ModuleMap = new HashMap<OWLObjectProperty, OWLOntology>(); +// for(OWLObjectProperty prop : objectProperties){ +// OWLOntology module = getModule(prop); +// prop2ModuleMap.put(prop, module); +// } +// return prop2ModuleMap; +// } + + private <T extends OWLEntity> Map<T, OWLOntology> extractModules(Set<T> entities){ + Map<T, OWLOntology> entity2ModuleMap = new HashMap<T, OWLOntology>(); + for(T entity : entities){ + OWLOntology module = getModule(entity); + entity2ModuleMap.put(entity, module); } - return cls2ModuleMap; + return entity2ModuleMap; } public void setNumberOfJustifications(int numberOfJustifications) { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-02 13:17:22
|
Revision: 3563 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3563&view=rev Author: lorenz_b Date: 2012-02-02 13:17:12 +0000 (Thu, 02 Feb 2012) Log Message: ----------- Continued integration of debugging unsatisfiable object properties. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-01 14:31:26 UTC (rev 3562) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-02 13:17:12 UTC (rev 3563) @@ -66,7 +66,6 @@ private OWLOntology ontology; private OWLDataFactory factory; - private Map<OWLClass, OWLOntology> cls2ModuleMap = new HashMap<OWLClass, OWLOntology>(); private Map<OWLEntity, OWLOntology> entity2ModuleMap = new HashMap<OWLEntity, OWLOntology>(); private OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); @@ -96,8 +95,8 @@ this.incoherentOntology = getOntologyWithoutAnnotations(ontology); //only for debugging - removedTransitiveAxioms = ontology.getAxioms(AxiomType.TRANSITIVE_OBJECT_PROPERTY); - incoherentOntology.getOWLOntologyManager().removeAxioms(ontology, removedTransitiveAxioms); + removedTransitiveAxioms = incoherentOntology.getAxioms(AxiomType.TRANSITIVE_OBJECT_PROPERTY); + incoherentOntology.getOWLOntologyManager().removeAxioms(incoherentOntology, removedTransitiveAxioms); manager = incoherentOntology.getOWLOntologyManager(); factory = manager.getOWLDataFactory(); @@ -107,8 +106,10 @@ reasoner.classify(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); - //compute the unsatisfiable object properties + //compute the unsatisfiable object properties and their corresponding modules unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); + logger.info("Found unsatisfiable object properties: " + unsatObjectProperties.size()); + entity2ModuleMap.putAll(extractModules(unsatObjectProperties)); //start main process, either preferring root classes or not if(preferRoots){ @@ -147,20 +148,22 @@ return incoherentOntology; } //compute the logical modules for each unsatisfiable class - logger.info("Computing module for each unsatisfiable class..."); + logger.info("Computing module for each unsatisfiable entity..."); startTime = System.currentTimeMillis(); - cls2ModuleMap = extractModules(unsatClasses); + entity2ModuleMap.putAll(extractModules(unsatClasses)); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); //compute initial explanations for each unsatisfiable class logger.info("Computing initial explanations..."); startTime = System.currentTimeMillis(); - Map<OWLClass, Set<Set<OWLAxiom>>> cls2Explanations = getInitialExplanationsForUnsatClasses(unsatClasses); + Map<OWLEntity, Set<Set<OWLAxiom>>> entity2Explanations = new HashMap<OWLEntity, Set<Set<OWLAxiom>>>(); + entity2Explanations.putAll(getInitialExplanations(unsatClasses)); + entity2Explanations.putAll(getInitialExplanations(unsatObjectProperties)); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); while(!unsatClasses.isEmpty()){ //get frequency for each axiom - Map<OWLAxiom, Integer> axiom2CountMap = getAxiomFrequency(cls2Explanations); + Map<OWLAxiom, Integer> axiom2CountMap = getAxiomFrequency(entity2Explanations); //get a sorted list of entries with the highest axiom count first List<Entry<OWLAxiom, Integer>> sortedEntries = MapUtils.sortByValues(axiom2CountMap); @@ -172,7 +175,7 @@ logger.info("Removing axiom " + toRemove + "."); man.removeAxiom(incoherentOntology, toRemove); man.applyChange(new RemoveAxiom(incoherentOntology, toRemove)); - removeFromExplanations(cls2Explanations, toRemove); + removeFromExplanations(entity2Explanations, toRemove); removeFromModules(toRemove); //recompute the unsatisfiable classes @@ -194,6 +197,10 @@ logger.info("Remaining unsatisfiable classes: " + (rootCnt + derivedCnt) + "(" + rootCnt + " roots)."); + //recompute unsatisfiable object properties + unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); + logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); + //save if(cnt - (rootCnt+derivedCnt) >= 10){ cnt = rootCnt + derivedCnt; @@ -204,7 +211,8 @@ //recompute explanations if necessary logger.info("Recomputing explanations..."); startTime = System.currentTimeMillis(); - refillExplanations(unsatClasses, cls2Explanations); + refillExplanations(unsatClasses, entity2Explanations); + refillExplanations(unsatObjectProperties, entity2Explanations); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); System.gc(); @@ -237,19 +245,20 @@ //compute the logical modules for each unsatisfiable class logger.info("Computing module for each unsatisfiable class..."); startTime = System.currentTimeMillis(); - cls2ModuleMap = extractModules(unsatClasses); + entity2ModuleMap = extractModules(unsatClasses); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); //compute initial explanations for each unsatisfiable class logger.info("Computing initial explanations..."); startTime = System.currentTimeMillis(); - Map<OWLClass, Set<Set<OWLAxiom>>> cls2Explanations = getInitialExplanationsForUnsatClasses(unsatClasses); - Map<OWLObjectProperty, Set<Set<OWLAxiom>>> prop2Explanations = getInitialExplanationsForUnsatObjectProperties(unsatObjectProperties); + Map<OWLEntity, Set<Set<OWLAxiom>>> entity2Explanations = new HashMap<OWLEntity, Set<Set<OWLAxiom>>>(); + entity2Explanations.putAll(getInitialExplanations(unsatClasses)); + entity2Explanations.putAll(getInitialExplanations(unsatObjectProperties)); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); while(!unsatClasses.isEmpty()){ //get frequency for each axiom - Map<OWLAxiom, Integer> axiom2CountMap = getAxiomFrequency(cls2Explanations); + Map<OWLAxiom, Integer> axiom2CountMap = getAxiomFrequency(entity2Explanations); //get a sorted list of entries with the highest axiom count first List<Entry<OWLAxiom, Integer>> sortedEntries = MapUtils.sortByValues(axiom2CountMap); @@ -261,7 +270,7 @@ logger.info("Removing axiom " + toRemove + "."); manager.removeAxiom(incoherentOntology, toRemove); manager.applyChange(new RemoveAxiom(incoherentOntology, toRemove)); - removeFromExplanations(cls2Explanations, toRemove); + removeFromExplanations(entity2Explanations, toRemove); removeFromModules(toRemove); //recompute the unsatisfiable classes @@ -272,6 +281,10 @@ logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); logger.info("Remaining unsatisfiable classes: " + unsatClasses.size()); + //recompute unsatisfiable object properties + unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); + logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); + //save if(cnt - unsatClasses.size() >= 10){ cnt = unsatClasses.size(); @@ -281,7 +294,8 @@ //recompute explanations if necessary logger.info("Recomputing explanations..."); startTime = System.currentTimeMillis(); - refillExplanations(unsatClasses, cls2Explanations); + refillExplanations(unsatClasses, entity2Explanations); + refillExplanations(unsatObjectProperties, entity2Explanations); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); System.gc(); @@ -310,6 +324,8 @@ } private Set<OWLObjectProperty> getUnsatisfiableObjectProperties(IncrementalClassifier reasoner){ + logger.info("Computing unsatisfiable object properties..."); + long startTime = System.currentTimeMillis(); SortedSet<OWLObjectProperty> properties = new TreeSet<OWLObjectProperty>(); OWLDataFactory f = OWLManager.createOWLOntologyManager().getOWLDataFactory(); for(OWLObjectProperty p : reasoner.getRootOntology().getObjectPropertiesInSignature()){ @@ -319,19 +335,20 @@ properties.add(p); } } + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); return properties; } private void removeFromModules(OWLAxiom axiom){ OWLOntology module; - for(Entry<OWLClass, OWLOntology> entry : cls2ModuleMap.entrySet()){ + for(Entry<? extends OWLEntity, OWLOntology> entry : entity2ModuleMap.entrySet()){ module = entry.getValue(); module.getOWLOntologyManager().removeAxiom(module, axiom); } } - private void removeFromExplanations(Map<OWLClass, Set<Set<OWLAxiom>>> cls2Explanations, OWLAxiom axiom){ - for(Entry<OWLClass, Set<Set<OWLAxiom>>> entry : cls2Explanations.entrySet()){ + private void removeFromExplanations(Map<OWLEntity, Set<Set<OWLAxiom>>> entity2Explanations, OWLAxiom axiom){ + for(Entry<OWLEntity, Set<Set<OWLAxiom>>> entry : entity2Explanations.entrySet()){ for (Iterator<Set<OWLAxiom>> iterator = entry.getValue().iterator(); iterator.hasNext();) { Set<OWLAxiom> explanation = iterator.next(); if(explanation.contains(axiom)){ @@ -341,20 +358,20 @@ } } - private void refillExplanations(Set<OWLClass> unsatClasses, Map<OWLClass, Set<Set<OWLAxiom>>> cls2Explanations){ - for(OWLClass unsatClass : unsatClasses){ - Set<Set<OWLAxiom>> precomputedExplanations = cls2Explanations.get(unsatClass); + private void refillExplanations(Set<? extends OWLEntity> unsatEntities, Map<OWLEntity, Set<Set<OWLAxiom>>> entity2Explanations){ + for(OWLEntity unsatClass : unsatEntities){ + Set<Set<OWLAxiom>> precomputedExplanations = entity2Explanations.get(unsatClass); if(precomputedExplanations == null || precomputedExplanations.size() < numberOfJustifications){ Set<Set<OWLAxiom>> newExplanations = computeExplanations(unsatClass, numberOfJustifications); - cls2Explanations.put(unsatClass, newExplanations); + entity2Explanations.put(unsatClass, newExplanations); } } } - private Map<OWLAxiom, Integer> getAxiomFrequency(Map<OWLClass, Set<Set<OWLAxiom>>> cls2Explanations){ + private Map<OWLAxiom, Integer> getAxiomFrequency(Map<OWLEntity, Set<Set<OWLAxiom>>> entity2Explanations){ Map<OWLAxiom, Integer> axiom2CountMap = new HashMap<OWLAxiom, Integer>(); - for(Entry<OWLClass, Set<Set<OWLAxiom>>> entry : cls2Explanations.entrySet()){ + for(Entry<OWLEntity, Set<Set<OWLAxiom>>> entry : entity2Explanations.entrySet()){ for(Set<OWLAxiom> explanation : entry.getValue()){ for(OWLAxiom ax : explanation){ Integer cnt = axiom2CountMap.get(ax); @@ -370,28 +387,22 @@ return axiom2CountMap; } - private Map<OWLClass, Set<Set<OWLAxiom>>> getInitialExplanationsForUnsatClasses(Set<OWLClass> unsatClasses){ - Map<OWLClass, Set<Set<OWLAxiom>>> cls2Explanations = new HashMap<OWLClass, Set<Set<OWLAxiom>>>(); + private Map<OWLEntity, Set<Set<OWLAxiom>>> getInitialExplanations(Set<? extends OWLEntity> unsatEntities){ + Map<OWLEntity, Set<Set<OWLAxiom>>> cls2Explanations = new HashMap<OWLEntity, Set<Set<OWLAxiom>>>(); - for(OWLClass unsatClass : unsatClasses){ - Set<Set<OWLAxiom>> explanations = computeExplanations(unsatClass); - cls2Explanations.put(unsatClass, explanations); + for(OWLEntity unsatEntity : unsatEntities){ + Set<Set<OWLAxiom>> explanations = null; + if(unsatEntity instanceof OWLClass){ + explanations = computeExplanations((OWLClass) unsatEntity); + } else if(unsatEntity instanceof OWLObjectProperty){ + explanations = computeExplanations((OWLObjectProperty) unsatEntity); + } + cls2Explanations.put(unsatEntity, explanations); } return cls2Explanations; } - private Map<OWLObjectProperty, Set<Set<OWLAxiom>>> getInitialExplanationsForUnsatObjectProperties(Set<OWLObjectProperty> unsatObjProperties){ - Map<OWLObjectProperty, Set<Set<OWLAxiom>>> prop2Explanations = new HashMap<OWLObjectProperty, Set<Set<OWLAxiom>>>(); - - for(OWLObjectProperty unsatClass : unsatObjProperties){ - Set<Set<OWLAxiom>> explanations = computeExplanations(unsatClass); - prop2Explanations.put(unsatClass, explanations); - } - - return prop2Explanations; - } - private OWLOntology getOntologyWithoutAnnotations(OWLOntology ontology){ try { OWLOntologyManager man = ontology.getOWLOntologyManager(); @@ -418,20 +429,26 @@ return ontology; } - private Set<Set<OWLAxiom>> computeExplanations(OWLClass unsatClass){ - PelletExplanation expGen = new PelletExplanation(getModule(unsatClass)); - return expGen.getUnsatisfiableExplanations(unsatClass, numberOfJustifications); + private Set<Set<OWLAxiom>> computeExplanations(OWLEntity unsatEntity){ + PelletExplanation expGen = new PelletExplanation(getModule(unsatEntity)); + if(unsatEntity instanceof OWLClass){ + return expGen.getUnsatisfiableExplanations((OWLClass) unsatEntity, numberOfJustifications); + } else if(unsatEntity instanceof OWLObjectProperty){ + return expGen.getUnsatisfiableExplanations(factory.getOWLObjectExactCardinality(1, (OWLObjectProperty)unsatEntity), numberOfJustifications); + } + return null; } - private Set<Set<OWLAxiom>> computeExplanations(OWLObjectProperty unsatProp){ - PelletExplanation expGen = new PelletExplanation(getModule(unsatProp)); - return expGen.getUnsatisfiableExplanations(factory.getOWLObjectExactCardinality(1, unsatProp), numberOfJustifications); + private Set<Set<OWLAxiom>> computeExplanations(OWLEntity unsatEntity, int limit){ + PelletExplanation expGen = new PelletExplanation(getModule(unsatEntity)); + if(unsatEntity instanceof OWLClass){ + return expGen.getUnsatisfiableExplanations((OWLClass) unsatEntity, limit); + } else if(unsatEntity instanceof OWLObjectProperty){ + return expGen.getUnsatisfiableExplanations(factory.getOWLObjectExactCardinality(1, (OWLObjectProperty)unsatEntity), limit); + } + return null; } - private Set<Set<OWLAxiom>> computeExplanations(OWLClass unsatClass, int limit){ - PelletExplanation expGen = new PelletExplanation(getModule(unsatClass)); - return expGen.getUnsatisfiableExplanations(unsatClass, numberOfJustifications); - } private Set<Set<OWLAxiom>> computeExplanationsBlackBox(OWLClass unsatClass, int limit){ BlackBoxExplanation singleExpGen = new BlackBoxExplanation(incoherentOntology, new HermiTReasonerFactory(), hermitReasoner); @@ -446,33 +463,33 @@ // return expGen.getUnsatisfiableExplanations(unsatClass, NUMBER_OF_JUSTIFICATIONS); // } - private OWLOntology getModule(OWLClass cls){ - OWLOntology module = cls2ModuleMap.get(cls); - new File("log").mkdir(); - if(module == null){ - md5.reset(); - md5.update((ontology.getOWLOntologyManager().getOntologyDocumentIRI(ontology).toString() + cls.toStringID()).getBytes()); - String hash = MD5.asHex(md5.digest()); - String filename = "log/" + hash + ".owl"; - File file = new File(filename); - if(file.exists()){ - module = loadModule(file); - } else { - module = OntologyUtils.getOntologyFromAxioms( - ModularityUtils.extractModule(incoherentOntology, Collections.singleton((OWLEntity)cls), ModuleType.TOP_OF_BOT)); - try { - manager.saveOntology(module, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(filename))); - } catch (OWLOntologyStorageException e) { - e.printStackTrace(); - } catch (FileNotFoundException e) { - e.printStackTrace(); - } - } - - cls2ModuleMap.put(cls, module); - } - return module; - } +// private OWLOntology getModule(OWLClass cls){ +// OWLOntology module = cls2ModuleMap.get(cls); +// new File("log").mkdir(); +// if(module == null){ +// md5.reset(); +// md5.update((ontology.getOWLOntologyManager().getOntologyDocumentIRI(ontology).toString() + cls.toStringID()).getBytes()); +// String hash = MD5.asHex(md5.digest()); +// String filename = "log/" + hash + ".owl"; +// File file = new File(filename); +// if(file.exists()){ +// module = loadModule(file); +// } else { +// module = OntologyUtils.getOntologyFromAxioms( +// ModularityUtils.extractModule(incoherentOntology, Collections.singleton((OWLEntity)cls), ModuleType.TOP_OF_BOT)); +// try { +// manager.saveOntology(module, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(filename))); +// } catch (OWLOntologyStorageException e) { +// e.printStackTrace(); +// } catch (FileNotFoundException e) { +// e.printStackTrace(); +// } +// } +// +// cls2ModuleMap.put(cls, module); +// } +// return module; +// } private OWLOntology getModule(OWLEntity entity){ OWLOntology module = entity2ModuleMap.get(entity); @@ -531,12 +548,16 @@ // return prop2ModuleMap; // } - private <T extends OWLEntity> Map<T, OWLOntology> extractModules(Set<T> entities){ - Map<T, OWLOntology> entity2ModuleMap = new HashMap<T, OWLOntology>(); - for(T entity : entities){ + private Map<OWLEntity, OWLOntology> extractModules(Set<? extends OWLEntity> entities){ + logger.info("Computing modules..."); + long startTime = System.currentTimeMillis(); + Map<OWLEntity, OWLOntology> entity2ModuleMap = new HashMap<OWLEntity, OWLOntology>(); + for(OWLEntity entity : entities){ + logger.info(" for " + entity.toStringID()); OWLOntology module = getModule(entity); entity2ModuleMap.put(entity, module); } + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); return entity2ModuleMap; } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-06 09:38:33
|
Revision: 3566 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3566&view=rev Author: lorenz_b Date: 2012-02-06 09:38:22 +0000 (Mon, 06 Feb 2012) Log Message: ----------- Saving diff. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-05 18:27:34 UTC (rev 3565) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-06 09:38:22 UTC (rev 3566) @@ -26,6 +26,7 @@ import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.io.RDFXMLOntologyFormat; import org.semanticweb.owlapi.model.AxiomType; +import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLDataFactory; @@ -56,7 +57,8 @@ public class JustificationBasedCoherentOntologyExtractor implements CoherentOntologyExtractor{ private static final org.apache.log4j.Logger logger = org.apache.log4j.Logger.getLogger(JustificationBasedCoherentOntologyExtractor.class); - + private static final String DIFF_ONTOLOGY_NAME = "diff.owl"; + private int numberOfJustifications = 10; // private PelletReasoner reasoner; private IncrementalClassifier reasoner; @@ -66,7 +68,11 @@ private OWLOntology ontology; private OWLDataFactory factory; + //we store the removed axioms in it + private OWLOntology diffOntology; + private Map<OWLEntity, OWLOntology> entity2ModuleMap = new HashMap<OWLEntity, OWLOntology>(); + private Map<OWLEntity, Set<Set<OWLAxiom>>> entity2Explanations = new HashMap<OWLEntity, Set<Set<OWLAxiom>>>(); private OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); MessageDigest md5; @@ -94,6 +100,17 @@ this.ontology = ontology; this.incoherentOntology = getOntologyWithoutAnnotations(ontology); + File diffFile = new File(new File(ontology.getOWLOntologyManager().getOntologyDocumentIRI(ontology).toURI()).getParent() + "/" + DIFF_ONTOLOGY_NAME); + try {System.out.println(diffFile); + if(diffFile.exists()){ + diffOntology = manager.loadOntologyFromOntologyDocument(diffFile); + } else { + diffOntology = manager.createOntology(IRI.create("http://diff.org/")); + } + } catch (OWLOntologyCreationException e1) { + e1.printStackTrace(); + } + //only for debugging removedTransitiveAxioms = incoherentOntology.getAxioms(AxiomType.TRANSITIVE_OBJECT_PROPERTY); incoherentOntology.getOWLOntologyManager().removeAxioms(incoherentOntology, removedTransitiveAxioms); @@ -141,6 +158,7 @@ int derivedCnt = derivedUnsatClasses.size(); // Set<OWLClass> unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); int cnt = rootCnt + derivedCnt; + int unsatPropCnt = unsatObjectProperties.size(); logger.info("Detected " + cnt + " unsatisfiable classes, " + rootCnt + " of them as root."); //if the ontology is not incoherent we return it here @@ -156,7 +174,6 @@ //compute initial explanations for each unsatisfiable class logger.info("Computing initial explanations..."); startTime = System.currentTimeMillis(); - Map<OWLEntity, Set<Set<OWLAxiom>>> entity2Explanations = new HashMap<OWLEntity, Set<Set<OWLAxiom>>>(); entity2Explanations.putAll(getInitialExplanations(unsatClasses)); entity2Explanations.putAll(getInitialExplanations(unsatObjectProperties)); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); @@ -172,11 +189,7 @@ } //we remove the most frequent axiom from the ontology OWLAxiom toRemove = sortedEntries.get(0).getKey(); - logger.info("Removing axiom " + toRemove + "."); - man.removeAxiom(incoherentOntology, toRemove); - man.applyChange(new RemoveAxiom(incoherentOntology, toRemove)); - removeFromExplanations(entity2Explanations, toRemove); - removeFromModules(toRemove); + removeAxiom(toRemove); //recompute the unsatisfiable classes logger.info("Reclassifying..."); @@ -202,10 +215,11 @@ logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); //save - if(cnt - (rootCnt+derivedCnt) >= 10){ + if(cnt - (rootCnt+derivedCnt) >= 1 || (unsatPropCnt - unsatObjectProperties.size()) >= 1){ cnt = rootCnt + derivedCnt; - save("log/dbpedia_" + cnt + ".owl"); + save("log/dbpedia_" + cnt + "cls" + unsatPropCnt + "prop.owl"); cnt = rootCnt + derivedCnt; + unsatPropCnt = unsatObjectProperties.size(); } //recompute explanations if necessary @@ -251,7 +265,6 @@ //compute initial explanations for each unsatisfiable class logger.info("Computing initial explanations..."); startTime = System.currentTimeMillis(); - Map<OWLEntity, Set<Set<OWLAxiom>>> entity2Explanations = new HashMap<OWLEntity, Set<Set<OWLAxiom>>>(); entity2Explanations.putAll(getInitialExplanations(unsatClasses)); entity2Explanations.putAll(getInitialExplanations(unsatObjectProperties)); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); @@ -267,11 +280,7 @@ } //we remove the most frequent axiom from the ontology OWLAxiom toRemove = sortedEntries.get(0).getKey(); - logger.info("Removing axiom " + toRemove + "."); - manager.removeAxiom(incoherentOntology, toRemove); - manager.applyChange(new RemoveAxiom(incoherentOntology, toRemove)); - removeFromExplanations(entity2Explanations, toRemove); - removeFromModules(toRemove); + removeAxiom(toRemove); //recompute the unsatisfiable classes logger.info("Reclassifying..."); @@ -312,10 +321,20 @@ return getOntologyWithAnnotations(incoherentOntology); } + private void removeAxiom(OWLAxiom axiom){ + logger.info("Removing axiom " + axiom + "."); + manager.removeAxiom(incoherentOntology, axiom); + manager.addAxiom(diffOntology, axiom); + manager.applyChange(new RemoveAxiom(incoherentOntology, axiom)); + removeFromExplanations(entity2Explanations, axiom); + removeFromModules(axiom); + } + private void save(String fileName){ OWLOntology toSave = getOntologyWithAnnotations(incoherentOntology); try { toSave.getOWLOntologyManager().saveOntology(incoherentOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(fileName))); + toSave.getOWLOntologyManager().saveOntology(diffOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/diff.owl"))); } catch (OWLOntologyStorageException e) { e.printStackTrace(); } catch (FileNotFoundException e) { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-07 09:48:13
|
Revision: 3573 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3573&view=rev Author: lorenz_b Date: 2012-02-07 09:48:07 +0000 (Tue, 07 Feb 2012) Log Message: ----------- Added option to debug classes and properties in one step or sequentially. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-06 20:22:07 UTC (rev 3572) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-07 09:48:07 UTC (rev 3573) @@ -80,6 +80,9 @@ private Set<OWLTransitiveObjectPropertyAxiom> removedTransitiveAxioms; private Set<OWLObjectProperty> unsatObjectProperties; + //whether to debug classes and properties in parallel + private boolean computeParallel = false; + public JustificationBasedCoherentOntologyExtractor() { try { md5 = MessageDigest.getInstance("MD5"); @@ -101,7 +104,7 @@ this.incoherentOntology = getOntologyWithoutAnnotations(ontology); File diffFile = new File(new File(ontology.getOWLOntologyManager().getOntologyDocumentIRI(ontology).toURI()).getParent() + "/" + DIFF_ONTOLOGY_NAME); - try {System.out.println(diffFile); + try { if(diffFile.exists()){ diffOntology = manager.loadOntologyFromOntologyDocument(diffFile); } else { @@ -175,7 +178,9 @@ logger.info("Computing initial explanations..."); startTime = System.currentTimeMillis(); entity2Explanations.putAll(getInitialExplanations(unsatClasses)); - entity2Explanations.putAll(getInitialExplanations(unsatObjectProperties)); + if(computeParallel){ + entity2Explanations.putAll(getInitialExplanations(unsatObjectProperties)); + } logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); while(!unsatClasses.isEmpty()){ @@ -211,8 +216,10 @@ logger.info("Remaining unsatisfiable classes: " + (rootCnt + derivedCnt) + "(" + rootCnt + " roots)."); //recompute unsatisfiable object properties - unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); - logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); + if(computeParallel){ + unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); + logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); + } //save if(cnt - (rootCnt+derivedCnt) >= 1 || (unsatPropCnt - unsatObjectProperties.size()) >= 1){ @@ -226,11 +233,52 @@ logger.info("Recomputing explanations..."); startTime = System.currentTimeMillis(); refillExplanations(unsatClasses, entity2Explanations); - refillExplanations(unsatObjectProperties, entity2Explanations); + if(computeParallel){ + refillExplanations(unsatObjectProperties, entity2Explanations); + } logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); System.gc(); } + if(!computeParallel){ + unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); + logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); + while(!unsatObjectProperties.isEmpty()){ + //get frequency for each axiom + Map<OWLAxiom, Integer> axiom2CountMap = getAxiomFrequency(entity2Explanations); + + //get a sorted list of entries with the highest axiom count first + List<Entry<OWLAxiom, Integer>> sortedEntries = MapUtils.sortByValues(axiom2CountMap); + //we remove the most frequent axiom from the ontology + OWLAxiom toRemove = sortedEntries.get(0).getKey(); + removeAxiom(toRemove); + + //recompute the unsatisfiable classes + logger.info("Reclassifying..."); + startTime = System.currentTimeMillis(); + reasoner.classify(); + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); + + //recompute unsatisfiable object properties + unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); + logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); + + //save + if((unsatPropCnt - unsatObjectProperties.size()) >= 1){ + save("log/dbpedia_" + cnt + "cls" + unsatPropCnt + "prop.owl"); + unsatPropCnt = unsatObjectProperties.size(); + } + + //recompute explanations if necessary + logger.info("Recomputing explanations..."); + startTime = System.currentTimeMillis(); + refillExplanations(unsatObjectProperties, entity2Explanations); + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); + + System.gc(); + } + } + try { incoherentOntology.getOWLOntologyManager().saveOntology(getOntologyWithAnnotations(incoherentOntology), new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/dbpedia_coherent.owl"))); } catch (OWLOntologyStorageException e) { @@ -238,7 +286,6 @@ } catch (FileNotFoundException e) { e.printStackTrace(); } - System.out.println(incoherentOntology.getLogicalAxiomCount()); return getOntologyWithAnnotations(incoherentOntology); } @@ -584,17 +631,22 @@ this.numberOfJustifications = numberOfJustifications; } + public void setComputeParallel(boolean computeParallel) { + this.computeParallel = computeParallel; + } + public static void main(String[] args) throws Exception{ Logger.getLogger(RBox.class.getName()).setLevel(Level.OFF); OWLOntologyManager man = OWLManager.createOWLOntologyManager(); - if(args.length != 3){ - System.out.println("USAGE: JustificationBasedCoherentOntologyExtractor <incoherent.owl> <numberOfJustifcations> <preferRootClasses(true|false)>"); + if(args.length != 4){ + System.out.println("USAGE: JustificationBasedCoherentOntologyExtractor <incoherent.owl> <numberOfJustifcations> <preferRootClasses(true|false)> <computeParallel(true|false)>"); System.exit(0); } String filename = args[0]; int numberOfJustifications = Integer.parseInt(args[1]); boolean preferRoots = Boolean.valueOf(args[2]); + boolean computeParallel = Boolean.valueOf(args[3]); System.out.println("Loading ontology..."); File file = new File(filename); @@ -620,6 +672,7 @@ JustificationBasedCoherentOntologyExtractor extractor = new JustificationBasedCoherentOntologyExtractor(); extractor.setNumberOfJustifications(numberOfJustifications); + extractor.setComputeParallel(computeParallel); OWLOntology coherentOntology = extractor.getCoherentOntology(schema, preferRoots); System.out.println("Coherent ontology contains " + coherentOntology.getLogicalAxiomCount() + " logical axioms."); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-07 09:53:39
|
Revision: 3574 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3574&view=rev Author: lorenz_b Date: 2012-02-07 09:53:33 +0000 (Tue, 07 Feb 2012) Log Message: ----------- Continued option to debug classes and properties in one step or sequentially. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-07 09:48:07 UTC (rev 3573) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-07 09:53:33 UTC (rev 3574) @@ -129,7 +129,9 @@ //compute the unsatisfiable object properties and their corresponding modules unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); logger.info("Found unsatisfiable object properties: " + unsatObjectProperties.size()); - entity2ModuleMap.putAll(extractModules(unsatObjectProperties)); + if(computeParallel){ + entity2ModuleMap.putAll(extractModules(unsatObjectProperties)); + } //start main process, either preferring root classes or not if(preferRoots){ @@ -240,9 +242,14 @@ System.gc(); } + entity2Explanations.clear(); + entity2ModuleMap.clear(); + if(!computeParallel){ unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); + + entity2ModuleMap.putAll(extractModules(unsatObjectProperties)); while(!unsatObjectProperties.isEmpty()){ //get frequency for each axiom Map<OWLAxiom, Integer> axiom2CountMap = getAxiomFrequency(entity2Explanations); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-08 11:49:26
|
Revision: 3576 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3576&view=rev Author: lorenz_b Date: 2012-02-08 11:49:16 +0000 (Wed, 08 Feb 2012) Log Message: ----------- Remove only axioms not contained in the original DBpedia ontology. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-07 21:43:20 UTC (rev 3575) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-08 11:49:16 UTC (rev 3576) @@ -4,6 +4,10 @@ import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.MalformedURLException; +import java.net.URL; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Collections; @@ -20,6 +24,8 @@ import openlink.util.MD5; +import org.apache.commons.compress.compressors.CompressorException; +import org.apache.commons.compress.compressors.CompressorStreamFactory; import org.mindswap.pellet.RBox; import org.semanticweb.HermiT.Configuration; import org.semanticweb.HermiT.Reasoner; @@ -39,6 +45,7 @@ import org.semanticweb.owlapi.model.OWLOntologyStorageException; import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom; import org.semanticweb.owlapi.model.RemoveAxiom; +import org.semanticweb.owlapi.owllink.builtin.requests.LoadOntologies; import org.semanticweb.owlapi.reasoner.IllegalConfigurationException; import org.semanticweb.owlapi.reasoner.OWLReasoner; import org.semanticweb.owlapi.reasoner.OWLReasonerConfiguration; @@ -83,12 +90,15 @@ //whether to debug classes and properties in parallel private boolean computeParallel = false; + private OWLOntology dbpediaOntology; + public JustificationBasedCoherentOntologyExtractor() { try { md5 = MessageDigest.getInstance("MD5"); } catch (NoSuchAlgorithmException e) { e.printStackTrace(); } + dbpediaOntology = loadDBpediaOntology(); } static {PelletExplanation.setup();} @@ -185,19 +195,14 @@ } logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); - while(!unsatClasses.isEmpty()){ - //get frequency for each axiom - Map<OWLAxiom, Integer> axiom2CountMap = getAxiomFrequency(entity2Explanations); + if(computeParallel){ + cnt += unsatPropCnt; + } + + while(cnt >= 0){ + //we remove the most appropriate axiom from the ontology + removeAppropriateAxiom(); - //get a sorted list of entries with the highest axiom count first - List<Entry<OWLAxiom, Integer>> sortedEntries = MapUtils.sortByValues(axiom2CountMap); - for(Entry<OWLAxiom, Integer> entry : sortedEntries){ -// System.out.println(entry.getKey() + ":" + entry.getValue()); - } - //we remove the most frequent axiom from the ontology - OWLAxiom toRemove = sortedEntries.get(0).getKey(); - removeAxiom(toRemove); - //recompute the unsatisfiable classes logger.info("Reclassifying..."); startTime = System.currentTimeMillis(); @@ -229,6 +234,9 @@ save("log/dbpedia_" + cnt + "cls" + unsatPropCnt + "prop.owl"); cnt = rootCnt + derivedCnt; unsatPropCnt = unsatObjectProperties.size(); + if(computeParallel){ + cnt += unsatPropCnt; + } } //recompute explanations if necessary @@ -251,15 +259,9 @@ entity2ModuleMap.putAll(extractModules(unsatObjectProperties)); while(!unsatObjectProperties.isEmpty()){ - //get frequency for each axiom - Map<OWLAxiom, Integer> axiom2CountMap = getAxiomFrequency(entity2Explanations); + //we remove the most appropriate axiom from the ontology + removeAppropriateAxiom(); - //get a sorted list of entries with the highest axiom count first - List<Entry<OWLAxiom, Integer>> sortedEntries = MapUtils.sortByValues(axiom2CountMap); - //we remove the most frequent axiom from the ontology - OWLAxiom toRemove = sortedEntries.get(0).getKey(); - removeAxiom(toRemove); - //recompute the unsatisfiable classes logger.info("Reclassifying..."); startTime = System.currentTimeMillis(); @@ -324,18 +326,9 @@ logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); while(!unsatClasses.isEmpty()){ - //get frequency for each axiom - Map<OWLAxiom, Integer> axiom2CountMap = getAxiomFrequency(entity2Explanations); + //we remove the most appropriate axiom from the ontology + removeAppropriateAxiom(); - //get a sorted list of entries with the highest axiom count first - List<Entry<OWLAxiom, Integer>> sortedEntries = MapUtils.sortByValues(axiom2CountMap); - for(Entry<OWLAxiom, Integer> entry : sortedEntries){ -// System.out.println(entry.getKey() + ":" + entry.getValue()); - } - //we remove the most frequent axiom from the ontology - OWLAxiom toRemove = sortedEntries.get(0).getKey(); - removeAxiom(toRemove); - //recompute the unsatisfiable classes logger.info("Reclassifying..."); startTime = System.currentTimeMillis(); @@ -375,13 +368,24 @@ return getOntologyWithAnnotations(incoherentOntology); } - private void removeAxiom(OWLAxiom axiom){ - logger.info("Removing axiom " + axiom + "."); - manager.removeAxiom(incoherentOntology, axiom); - manager.addAxiom(diffOntology, axiom); - manager.applyChange(new RemoveAxiom(incoherentOntology, axiom)); - removeFromExplanations(entity2Explanations, axiom); - removeFromModules(axiom); + private void removeAppropriateAxiom(){ + //get frequency for each axiom + Map<OWLAxiom, Integer> axiom2CountMap = getAxiomFrequency(entity2Explanations); + //get a sorted list of entries with the highest axiom count first + List<Entry<OWLAxiom, Integer>> sortedEntries = MapUtils.sortByValues(axiom2CountMap); + //we remove the most frequent axiom from the ontology which is not contained in the original DBpedia ontology + for(Entry<OWLAxiom, Integer> e : sortedEntries){ + OWLAxiom axiom = e.getKey(); + if(!dbpediaOntology.containsAxiomIgnoreAnnotations(axiom)){ + logger.info("Removing axiom " + axiom + "."); + manager.removeAxiom(incoherentOntology, axiom); + manager.addAxiom(diffOntology, axiom); + manager.applyChange(new RemoveAxiom(incoherentOntology, axiom)); + removeFromExplanations(entity2Explanations, axiom); + removeFromModules(axiom); + return; + } + } } private void save(String fileName){ @@ -603,6 +607,28 @@ return module; } + private OWLOntology loadDBpediaOntology() { + long startTime = System.currentTimeMillis(); + logger.info("Loading DBpedia reference ontology..."); + OWLOntology ontology = null; + try { + URL dbpediaURL = new URL("http://downloads.dbpedia.org/3.7/dbpedia_3.7.owl.bz2"); + InputStream is = dbpediaURL.openStream(); + is = new CompressorStreamFactory().createCompressorInputStream("bzip2", is); + ontology = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(is); + } catch (MalformedURLException e) { + e.printStackTrace(); + } catch (OWLOntologyCreationException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } catch (CompressorException e) { + e.printStackTrace(); + } + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); + return ontology; + } + // private Map<OWLClass, OWLOntology> extractModules(Set<OWLClass> classes){ // Map<OWLClass, OWLOntology> cls2ModuleMap = new HashMap<OWLClass, OWLOntology>(); // for(OWLClass cls : classes){ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-08 13:23:18
|
Revision: 3577 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3577&view=rev Author: lorenz_b Date: 2012-02-08 13:23:08 +0000 (Wed, 08 Feb 2012) Log Message: ----------- Reading ontologies from BZIP2. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-08 11:49:16 UTC (rev 3576) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-08 13:23:08 UTC (rev 3577) @@ -1,7 +1,9 @@ package org.dllearner.utilities; +import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; +import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; @@ -682,8 +684,11 @@ boolean computeParallel = Boolean.valueOf(args[3]); System.out.println("Loading ontology..."); - File file = new File(filename); - OWLOntology schema = man.loadOntologyFromOntologyDocument(file); + InputStream is = new BufferedInputStream(new FileInputStream(filename)); + if(args[0].endsWith("bz2")){ + is = new CompressorStreamFactory().createCompressorInputStream("bzip2", is); + } + OWLOntology schema = man.loadOntologyFromOntologyDocument(is); man.removeAxioms(schema, schema.getAxioms(AxiomType.TRANSITIVE_OBJECT_PROPERTY)); // OWLOntology cleaned = man.createOntology(IRI.create("http://dbpedia_cleaned.owl")); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-08 13:48:13
|
Revision: 3578 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3578&view=rev Author: lorenz_b Date: 2012-02-08 13:48:02 +0000 (Wed, 08 Feb 2012) Log Message: ----------- Fixed bug. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-08 13:23:08 UTC (rev 3577) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-08 13:48:02 UTC (rev 3578) @@ -115,7 +115,9 @@ this.ontology = ontology; this.incoherentOntology = getOntologyWithoutAnnotations(ontology); - File diffFile = new File(new File(ontology.getOWLOntologyManager().getOntologyDocumentIRI(ontology).toURI()).getParent() + "/" + DIFF_ONTOLOGY_NAME); + new File("log").mkdir(); + + File diffFile = new File("log/" + DIFF_ONTOLOGY_NAME); try { if(diffFile.exists()){ diffOntology = manager.loadOntologyFromOntologyDocument(diffFile); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-22 21:03:15
|
Revision: 3583 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3583&view=rev Author: lorenz_b Date: 2012-02-22 21:03:09 +0000 (Wed, 22 Feb 2012) Log Message: ----------- Fixed problem when no root class was detected. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-22 17:56:55 UTC (rev 3582) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-22 21:03:09 UTC (rev 3583) @@ -172,6 +172,7 @@ StructureBasedRootClassFinder rootFinder = new StructureBasedRootClassFinder(reasoner); Set<OWLClass> unsatClasses = rootFinder.getRootUnsatisfiableClasses(); Set<OWLClass> derivedUnsatClasses = rootFinder.getDerivedUnsatisfiableClasses(); + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); int rootCnt = unsatClasses.size(); int derivedCnt = derivedUnsatClasses.size(); @@ -180,6 +181,10 @@ int unsatPropCnt = unsatObjectProperties.size(); logger.info("Detected " + cnt + " unsatisfiable classes, " + rootCnt + " of them as root."); + if(unsatClasses.isEmpty()){ + unsatClasses = derivedUnsatClasses; + } + //if the ontology is not incoherent we return it here if(unsatClasses.isEmpty()){ return incoherentOntology; @@ -226,6 +231,10 @@ logger.info("Remaining unsatisfiable classes: " + (rootCnt + derivedCnt) + "(" + rootCnt + " roots)."); + if(unsatClasses.isEmpty()){ + unsatClasses = derivedUnsatClasses; + } + //recompute unsatisfiable object properties if(computeParallel){ unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-24 10:37:01
|
Revision: 3586 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3586&view=rev Author: lorenz_b Date: 2012-02-24 10:36:50 +0000 (Fri, 24 Feb 2012) Log Message: ----------- Small changes for filename. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-24 08:59:06 UTC (rev 3585) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-24 10:36:50 UTC (rev 3586) @@ -94,6 +94,8 @@ private OWLOntology dbpediaOntology; + private String fileName; + public JustificationBasedCoherentOntologyExtractor() { try { md5 = MessageDigest.getInstance("MD5"); @@ -115,6 +117,14 @@ this.ontology = ontology; this.incoherentOntology = getOntologyWithoutAnnotations(ontology); + IRI iri = ontology.getOWLOntologyManager().getOntologyDocumentIRI(ontology); + fileName = "dbpedia"; + if(iri != null){ + fileName = iri.toString().substring( iri.toString().lastIndexOf('/')+1, iri.toString().length() ); + } else { + + } + new File("log").mkdir(); File diffFile = new File("log/" + DIFF_ONTOLOGY_NAME); @@ -176,6 +186,12 @@ logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); int rootCnt = unsatClasses.size(); int derivedCnt = derivedUnsatClasses.size(); + + //if no roots are found we use all unsat classes + if(rootCnt == 0){ + unsatClasses = derivedUnsatClasses; + } + // Set<OWLClass> unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); int cnt = rootCnt + derivedCnt; int unsatPropCnt = unsatObjectProperties.size(); @@ -229,6 +245,11 @@ derivedCnt = derivedUnsatClasses.size(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); + //if no roots are found we use all unsat classes + if(rootCnt == 0){ + unsatClasses = derivedUnsatClasses; + } + logger.info("Remaining unsatisfiable classes: " + (rootCnt + derivedCnt) + "(" + rootCnt + " roots)."); if(unsatClasses.isEmpty()){ @@ -244,7 +265,7 @@ //save if(cnt - (rootCnt+derivedCnt) >= 1 || (unsatPropCnt - unsatObjectProperties.size()) >= 1){ cnt = rootCnt + derivedCnt; - save("log/dbpedia_" + cnt + "cls" + unsatPropCnt + "prop.owl"); + save("log/" + fileName + "_" + cnt + "cls" + unsatPropCnt + "prop.owl"); cnt = rootCnt + derivedCnt; unsatPropCnt = unsatObjectProperties.size(); if(computeParallel){ @@ -287,7 +308,7 @@ //save if((unsatPropCnt - unsatObjectProperties.size()) >= 1){ - save("log/dbpedia_" + cnt + "cls" + unsatPropCnt + "prop.owl"); + save("log/" + fileName + "_" + cnt + "cls" + unsatPropCnt + "prop.owl"); unsatPropCnt = unsatObjectProperties.size(); } @@ -338,7 +359,7 @@ entity2Explanations.putAll(getInitialExplanations(unsatObjectProperties)); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); - while(!unsatClasses.isEmpty()){ + while(!unsatClasses.isEmpty() && !unsatObjectProperties.isEmpty()){ //we remove the most appropriate axiom from the ontology removeAppropriateAxiom(); @@ -357,7 +378,7 @@ //save if(cnt - unsatClasses.size() >= 10){ cnt = unsatClasses.size(); - save("log/dbpedia_" + cnt + ".owl"); + save("log/" + fileName + "_" + cnt + "cls" + unsatObjectProperties.size() + "prop.owl"); } //recompute explanations if necessary This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-26 10:02:35
|
Revision: 3589 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3589&view=rev Author: lorenz_b Date: 2012-02-26 10:02:29 +0000 (Sun, 26 Feb 2012) Log Message: ----------- Small changes to save ontologies with given filename. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-24 14:15:56 UTC (rev 3588) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-26 10:02:29 UTC (rev 3589) @@ -94,7 +94,8 @@ private OWLOntology dbpediaOntology; - private String fileName; + private String fileName = "dbpedia"; + private String diffFileName = "diff.owl"; public JustificationBasedCoherentOntologyExtractor() { try { @@ -117,14 +118,6 @@ this.ontology = ontology; this.incoherentOntology = getOntologyWithoutAnnotations(ontology); - IRI iri = ontology.getOWLOntologyManager().getOntologyDocumentIRI(ontology); - fileName = "dbpedia"; - if(iri != null){ - fileName = iri.toString().substring( iri.toString().lastIndexOf('/')+1, iri.toString().length() ); - } else { - - } - new File("log").mkdir(); File diffFile = new File("log/" + DIFF_ONTOLOGY_NAME); @@ -165,6 +158,11 @@ } } + public void setFileName(String fileName) { + this.fileName = fileName; + diffFileName = "diff_" + fileName; + } + private OWLOntology computeCoherentOntologyRootBased(OWLOntology ontology) { // startTime = System.currentTimeMillis(); @@ -426,7 +424,7 @@ OWLOntology toSave = getOntologyWithAnnotations(incoherentOntology); try { toSave.getOWLOntologyManager().saveOntology(incoherentOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(fileName))); - toSave.getOWLOntologyManager().saveOntology(diffOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/diff.owl"))); + toSave.getOWLOntologyManager().saveOntology(diffOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/" + diffFileName))); } catch (OWLOntologyStorageException e) { e.printStackTrace(); } catch (FileNotFoundException e) { @@ -743,6 +741,11 @@ JustificationBasedCoherentOntologyExtractor extractor = new JustificationBasedCoherentOntologyExtractor(); extractor.setNumberOfJustifications(numberOfJustifications); extractor.setComputeParallel(computeParallel); + if(filename.indexOf('/') >= 0){ + filename = filename.substring( filename.lastIndexOf('/')+1, filename.length() ); + } + + extractor.setFileName(filename); OWLOntology coherentOntology = extractor.getCoherentOntology(schema, preferRoots); System.out.println("Coherent ontology contains " + coherentOntology.getLogicalAxiomCount() + " logical axioms."); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-28 15:56:27
|
Revision: 3597 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3597&view=rev Author: lorenz_b Date: 2012-02-28 15:56:18 +0000 (Tue, 28 Feb 2012) Log Message: ----------- Many modifications to work for eval. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-28 15:46:01 UTC (rev 3596) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-28 15:56:18 UTC (rev 3597) @@ -14,7 +14,9 @@ import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -24,25 +26,29 @@ import java.util.TreeSet; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.logging.Level; -import java.util.logging.Logger; import openlink.util.MD5; import org.apache.commons.compress.compressors.CompressorException; import org.apache.commons.compress.compressors.CompressorStreamFactory; -import org.mindswap.pellet.RBox; +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.FileAppender; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.apache.log4j.SimpleLayout; import org.semanticweb.HermiT.Configuration; import org.semanticweb.HermiT.Reasoner; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.io.RDFXMLOntologyFormat; import org.semanticweb.owlapi.model.AxiomType; import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLAnnotation; +import org.semanticweb.owlapi.model.OWLAnnotationProperty; import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLEntity; +import org.semanticweb.owlapi.model.OWLLiteral; import org.semanticweb.owlapi.model.OWLLogicalAxiom; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLOntology; @@ -56,6 +62,7 @@ import org.semanticweb.owlapi.reasoner.OWLReasonerConfiguration; import org.semanticweb.owlapi.reasoner.OWLReasonerFactory; +import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; import uk.ac.manchester.cs.owlapi.modularity.ModuleType; import com.clarkparsia.modularity.IncrementalClassifier; @@ -63,7 +70,6 @@ import com.clarkparsia.owlapi.explanation.BlackBoxExplanation; import com.clarkparsia.owlapi.explanation.HSTExplanationGenerator; import com.clarkparsia.owlapi.explanation.PelletExplanation; -import com.clarkparsia.owlapiv3.OntologyUtils; import com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory; public class JustificationBasedCoherentOntologyExtractor implements CoherentOntologyExtractor{ @@ -78,7 +84,7 @@ private OWLOntology incoherentOntology; private OWLOntology ontology; - private OWLDataFactory factory; + private OWLDataFactory factory = new OWLDataFactoryImpl();; //we store the removed axioms in it private OWLOntology diffOntology; @@ -86,6 +92,8 @@ private Map<OWLEntity, OWLOntology> entity2ModuleMap = new HashMap<OWLEntity, OWLOntology>(); private Map<OWLEntity, Set<Set<OWLAxiom>>> entity2Explanations = new HashMap<OWLEntity, Set<Set<OWLAxiom>>>(); private Map<OWLEntity, PelletExplanation> entity2ExpGen = new HashMap<OWLEntity, PelletExplanation>(); + private Set<OWLEntity> entitiesWithLessExplanations = new HashSet<OWLEntity>(); + private OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); MessageDigest md5; @@ -96,6 +104,8 @@ //whether to debug classes and properties in parallel private boolean computeParallel = false; + private OWLAnnotationProperty confidenceProperty; + private OWLOntology dbpediaOntology; private String fileName = "dbpedia"; @@ -119,6 +129,8 @@ @Override public OWLOntology getCoherentOntology(OWLOntology ontology, boolean preferRoots){ + ontology.getOWLOntologyManager().addAxioms(ontology, dbpediaOntology.getLogicalAxioms()); + this.ontology = ontology; this.incoherentOntology = getOntologyWithoutAnnotations(ontology); @@ -218,7 +230,7 @@ startTime = System.currentTimeMillis(); computeExplanations(unsatClasses); if(computeParallel){ - entity2Explanations.putAll(getInitialExplanations(unsatObjectProperties)); + computeExplanations(unsatObjectProperties); } logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); @@ -235,12 +247,12 @@ startTime = System.currentTimeMillis(); reasoner.classify(); // hermitReasoner.classifyClasses(); -// unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); + //Set<OWLClass> unsatClasses2 = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); logger.info("Computing root/derived unsatisfiable classes..."); startTime = System.currentTimeMillis(); - rootFinder = new StructureBasedRootClassFinder(reasoner); + rootFinder = new StructureBasedRootClassFinder(reasoner, this); unsatClasses = rootFinder.getRootUnsatisfiableClasses(); derivedUnsatClasses = rootFinder.getDerivedUnsatisfiableClasses(); rootCnt = unsatClasses.size(); @@ -253,19 +265,19 @@ } logger.info("Remaining unsatisfiable classes: " + (rootCnt + derivedCnt) + "(" + rootCnt + " roots)."); - + if(unsatClasses.isEmpty()){ unsatClasses = derivedUnsatClasses; } //recompute unsatisfiable object properties - if(computeParallel){ + // if(computeParallel){ unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); - } + // } //save - if(cnt - (rootCnt+derivedCnt) >= 1 || (unsatPropCnt - unsatObjectProperties.size()) >= 1){ + if(cnt - (rootCnt+derivedCnt) >= 5 || (unsatPropCnt - unsatObjectProperties.size()) >= 5){ cnt = rootCnt + derivedCnt; save("log/" + fileName + "_" + cnt + "cls" + unsatPropCnt + "prop.owl"); cnt = rootCnt + derivedCnt; @@ -362,11 +374,11 @@ //compute initial explanations for each unsatisfiable class logger.info("Computing initial explanations..."); startTime = System.currentTimeMillis(); - entity2Explanations.putAll(getInitialExplanations(unsatClasses)); - entity2Explanations.putAll(getInitialExplanations(unsatObjectProperties)); + computeExplanations(unsatClasses); +// entity2Explanations.putAll(getInitialExplanations(unsatObjectProperties)); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); - while(!unsatClasses.isEmpty() && !unsatObjectProperties.isEmpty()){ + while(!unsatClasses.isEmpty()){// && !unsatObjectProperties.isEmpty()){ //we remove the most appropriate axiom from the ontology removeAppropriateAxiom(); @@ -392,7 +404,7 @@ logger.info("Recomputing explanations..."); startTime = System.currentTimeMillis(); refillExplanations(unsatClasses, entity2Explanations); - refillExplanations(unsatObjectProperties, entity2Explanations); + //refillExplanations(unsatObjectProperties, entity2Explanations); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); System.gc(); @@ -478,10 +490,11 @@ } private void removeAppropriateAxiom(){ + logger.info("Searching for appropriate axiom to remove..."); //get frequency for each axiom Map<OWLAxiom, Integer> axiom2CountMap = getAxiomFrequency(entity2Explanations); //get a sorted list of entries with the highest axiom count first - List<Entry<OWLAxiom, Integer>> sortedEntries = MapUtils.sortByValues(axiom2CountMap); + List<Entry<OWLAxiom, Integer>> sortedEntries = sort(axiom2CountMap); //we remove the most frequent axiom from the ontology which is not contained in the original DBpedia ontology for(Entry<OWLAxiom, Integer> e : sortedEntries){ OWLAxiom axiom = e.getKey(); @@ -500,7 +513,7 @@ private void save(String fileName){ OWLOntology toSave = getOntologyWithAnnotations(incoherentOntology); try { - toSave.getOWLOntologyManager().saveOntology(incoherentOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(fileName))); + toSave.getOWLOntologyManager().saveOntology(toSave, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(fileName))); toSave.getOWLOntologyManager().saveOntology(diffOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/" + diffFileName))); } catch (OWLOntologyStorageException e) { e.printStackTrace(); @@ -549,6 +562,7 @@ Set<Set<OWLAxiom>> precomputedExplanations = entity2Explanations.get(unsatClass); if(precomputedExplanations == null || precomputedExplanations.size() < numberOfJustifications){ Set<Set<OWLAxiom>> newExplanations = computeExplanations(unsatClass, numberOfJustifications); + logger.info(unsatClass + ": " + newExplanations.size()); entity2Explanations.put(unsatClass, newExplanations); } } @@ -576,17 +590,20 @@ private void computeExplanations(Set<? extends OWLEntity> unsatEntities){ ExecutorService executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); - List<Future<Void>> list = new ArrayList<Future<Void>>(); for(final OWLEntity unsatEntity : unsatEntities){ Set<Set<OWLAxiom>> precomputedExplanations = entity2Explanations.get(unsatEntity); - if(precomputedExplanations == null || precomputedExplanations.size() < numberOfJustifications){ + if(precomputedExplanations == null || (!entitiesWithLessExplanations.contains(unsatEntity) && precomputedExplanations.size() < numberOfJustifications)){ executor.execute(new Runnable(){ @Override public void run() { Set<Set<OWLAxiom>> explanations = computeExplanations(unsatEntity); + logger.info("Computed "+ explanations.size() + " explanations for " + unsatEntity); entity2Explanations.put(unsatEntity, explanations); + if(explanations.size() < numberOfJustifications){ + entitiesWithLessExplanations.add(unsatEntity); + } } }); @@ -601,22 +618,10 @@ } - private Map<OWLEntity, Set<Set<OWLAxiom>>> getInitialExplanations(Set<? extends OWLEntity> unsatEntities){ - Map<OWLEntity, Set<Set<OWLAxiom>>> cls2Explanations = new HashMap<OWLEntity, Set<Set<OWLAxiom>>>(); - - Set<Set<OWLAxiom>> explanations; - for(OWLEntity unsatEntity : unsatEntities){ - explanations = computeExplanations(unsatEntity); - cls2Explanations.put(unsatEntity, explanations); - } - - return cls2Explanations; - } - private OWLOntology getOntologyWithoutAnnotations(OWLOntology ontology){ try { - OWLOntologyManager man = ontology.getOWLOntologyManager(); - OWLOntology ontologyWithoutAnnotations = ontology.getOWLOntologyManager().createOntology(); + OWLOntologyManager man = OWLManager.createOWLOntologyManager(); + OWLOntology ontologyWithoutAnnotations = man.createOntology(); for(OWLAxiom ax : ontology.getLogicalAxioms()){ man.addAxiom(ontologyWithoutAnnotations, ax.getAxiomWithoutAnnotations()); } @@ -640,7 +645,6 @@ } private Set<Set<OWLAxiom>> computeExplanations(OWLEntity unsatEntity){ - logger.info(unsatEntity); return computeExplanations(unsatEntity, numberOfJustifications); } @@ -656,10 +660,10 @@ private PelletExplanation getExplanationGenerator(OWLEntity entity){ PelletExplanation expGen = entity2ExpGen.get(entity); - if(expGen == null){ +// if(expGen == null){ expGen = new PelletExplanation(PelletReasonerFactory.getInstance().createNonBufferingReasoner(getModule(entity))); - entity2ExpGen.put(entity, expGen); - } +// entity2ExpGen.put(entity, expGen); +// } return expGen; } @@ -668,42 +672,27 @@ HSTExplanationGenerator expGen = new HSTExplanationGenerator(singleExpGen); return expGen.getExplanations(unsatClass, limit); } + + private double getConfidence(OWLAxiom axiom){ + Set<OWLAxiom> axiomsWithAnnotations = ontology.getAxiomsIgnoreAnnotations(axiom); + if(axiomsWithAnnotations.isEmpty()){ + logger.info("Axiom with annotations not found: " + axiom); + logger.info("Ontology contains axiom: " + incoherentOntology.containsAxiomIgnoreAnnotations(axiom)); + logger.info("Original loaded ontology contains axiom: " + ontology.containsAxiomIgnoreAnnotations(axiom)); + System.out.println(ontology.getSubClassAxiomsForSubClass(factory.getOWLClass(IRI.create("http://dbpedia.org/ontology/Award")))); + } + OWLAxiom axiomWithAnnotations = axiomsWithAnnotations.iterator().next(); + Set<OWLAnnotation> annotations = axiomWithAnnotations.getAnnotations(confidenceProperty); + if(!annotations.isEmpty()){ + OWLAnnotation anno = annotations.iterator().next(); + OWLLiteral literal = (OWLLiteral) anno.getValue(); + return literal.parseDouble(); + } + return 2; + + } -// private Set<Set<OWLAxiom>> computeExplanationsBlackbox(OWLClass unsatClass, int limit){ -// BlackBoxExplanation b = new BlackBoxExplanation(incoherentOntology, reasonerFactory, hermitReasoner) -// MultipleExplanationGenerator expGen = new HSTExplanationGenerator(b); -// PelletExplanation expGen = new PelletExplanation(getModule(unsatClass)); -// return expGen.getUnsatisfiableExplanations(unsatClass, NUMBER_OF_JUSTIFICATIONS); -// } -// private OWLOntology getModule(OWLClass cls){ -// OWLOntology module = cls2ModuleMap.get(cls); -// new File("log").mkdir(); -// if(module == null){ -// md5.reset(); -// md5.update((ontology.getOWLOntologyManager().getOntologyDocumentIRI(ontology).toString() + cls.toStringID()).getBytes()); -// String hash = MD5.asHex(md5.digest()); -// String filename = "log/" + hash + ".owl"; -// File file = new File(filename); -// if(file.exists()){ -// module = loadModule(file); -// } else { -// module = OntologyUtils.getOntologyFromAxioms( -// ModularityUtils.extractModule(incoherentOntology, Collections.singleton((OWLEntity)cls), ModuleType.TOP_OF_BOT)); -// try { -// manager.saveOntology(module, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(filename))); -// } catch (OWLOntologyStorageException e) { -// e.printStackTrace(); -// } catch (FileNotFoundException e) { -// e.printStackTrace(); -// } -// } -// -// cls2ModuleMap.put(cls, module); -// } -// return module; -// } - public OWLOntology getModule(OWLEntity entity){ OWLOntology module = entity2ModuleMap.get(entity); new File("log").mkdir(); @@ -713,21 +702,29 @@ String hash = MD5.asHex(md5.digest()); String filename = "log/" + hash + ".owl"; File file = new File(filename); - if(file.exists()){ + boolean load = false; + if(load){//file.exists()){ module = loadModule(file); } else { + try { + module = OWLManager.createOWLOntologyManager().createOntology(ModularityUtils.extractModule(incoherentOntology, Collections.singleton(entity), ModuleType.TOP_OF_BOT)); + } catch (OWLOntologyCreationException e) { + e.printStackTrace(); + } + /* module = OntologyUtils.getOntologyFromAxioms( ModularityUtils.extractModule(incoherentOntology, Collections.singleton(entity), ModuleType.TOP_OF_BOT)); + try { manager.saveOntology(module, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(filename))); } catch (OWLOntologyStorageException e) { e.printStackTrace(); } catch (FileNotFoundException e) { e.printStackTrace(); - } + }*/ } - entity2ModuleMap.put(entity, module); + //entity2ModuleMap.put(entity, module); } return module; } @@ -765,24 +762,6 @@ return ontology; } -// private Map<OWLClass, OWLOntology> extractModules(Set<OWLClass> classes){ -// Map<OWLClass, OWLOntology> cls2ModuleMap = new HashMap<OWLClass, OWLOntology>(); -// for(OWLClass cls : classes){ -// OWLOntology module = getModule(cls); -// cls2ModuleMap.put(cls, module); -// } -// return cls2ModuleMap; -// } -// -// private Map<OWLObjectProperty, OWLOntology> extractModules(Set<OWLObjectProperty> objectProperties){ -// Map<OWLObjectProperty, OWLOntology> prop2ModuleMap = new HashMap<OWLObjectProperty, OWLOntology>(); -// for(OWLObjectProperty prop : objectProperties){ -// OWLOntology module = getModule(prop); -// prop2ModuleMap.put(prop, module); -// } -// return prop2ModuleMap; -// } - private Map<OWLEntity, OWLOntology> extractModules(Set<? extends OWLEntity> entities){ logger.info("Computing modules..."); long startTime = System.currentTimeMillis(); @@ -803,19 +782,54 @@ public void setComputeParallel(boolean computeParallel) { this.computeParallel = computeParallel; } + + public void setConfidencePropertyIRI(String iri){ + this.confidenceProperty = factory.getOWLAnnotationProperty(IRI.create(iri)); + } + private List<Entry<OWLAxiom, Integer>> sort(Map<OWLAxiom, Integer> map){ + List<Entry<OWLAxiom, Integer>> entries = new ArrayList<Entry<OWLAxiom, Integer>>(map.entrySet()); + Collections.sort(entries, new Comparator<Entry<OWLAxiom, Integer>>() { + + @Override + public int compare(Entry<OWLAxiom, Integer> o1, Entry<OWLAxiom, Integer> o2) { + int cmp = o2.getValue().compareTo(o1.getValue()); + //use as tie breaker the confidence value + if(cmp == 0){ + double conf1 = getConfidence(o1.getKey()); + double conf2 = getConfidence(o2.getKey()); + double diff = conf1-conf2; + if(diff > 0){ + return 1; + } else if(diff < 0){ + return -1; + } else { + return 0; + } +// return Double.compare(conf2, conf1); + } + return cmp; + } + }); + return entries; + } + public static void main(String[] args) throws Exception{ - Logger.getLogger(RBox.class.getName()).setLevel(Level.OFF); + Logger.getRootLogger().setLevel(Level.INFO); + Logger.getRootLogger().removeAllAppenders(); + Logger.getRootLogger().addAppender(new ConsoleAppender(new SimpleLayout())); + Logger.getRootLogger().addAppender(new FileAppender(new SimpleLayout(), "log/out.log")); OWLOntologyManager man = OWLManager.createOWLOntologyManager(); - if(args.length != 4){ - System.out.println("USAGE: JustificationBasedCoherentOntologyExtractor <incoherent.owl> <numberOfJustifcations> <preferRootClasses(true|false)> <computeParallel(true|false)>"); + if(args.length != 5){ + System.out.println("USAGE: JustificationBasedCoherentOntologyExtractor <incoherent.owl> <confidencePropertyIRI> <numberOfJustifcations> <preferRootClasses(true|false)> <computeParallel(true|false)>"); System.exit(0); } String filename = args[0]; - int numberOfJustifications = Integer.parseInt(args[1]); - boolean preferRoots = Boolean.valueOf(args[2]); - boolean computeParallel = Boolean.valueOf(args[3]); + String confidenceIRI = args[1]; + int numberOfJustifications = Integer.parseInt(args[2]); + boolean preferRoots = Boolean.valueOf(args[3]); + boolean computeParallel = Boolean.valueOf(args[4]); System.out.println("Loading ontology..."); InputStream is = new BufferedInputStream(new FileInputStream(filename)); @@ -823,34 +837,22 @@ is = new CompressorStreamFactory().createCompressorInputStream("bzip2", is); } OWLOntology schema = man.loadOntologyFromOntologyDocument(is); - man.removeAxioms(schema, schema.getAxioms(AxiomType.TRANSITIVE_OBJECT_PROPERTY)); - -// OWLOntology cleaned = man.createOntology(IRI.create("http://dbpedia_cleaned.owl")); -// man.addAxioms(cleaned, schema.getLogicalAxioms()); -// man.removeAxioms(cleaned, cleaned.getAxioms(AxiomType.TRANSITIVE_OBJECT_PROPERTY)); -// man.removeAxioms(cleaned, cleaned.getAxioms(AxiomType.REFLEXIVE_OBJECT_PROPERTY)); -// man.removeAxioms(cleaned, cleaned.getAxioms(AxiomType.IRREFLEXIVE_OBJECT_PROPERTY)); -// man.removeAxioms(cleaned, cleaned.getAxioms(AxiomType.SYMMETRIC_OBJECT_PROPERTY)); -// man.removeAxioms(cleaned, cleaned.getAxioms(AxiomType.ASYMMETRIC_OBJECT_PROPERTY)); -// man.removeAxioms(cleaned, cleaned.getAxioms(AxiomType.FUNCTIONAL_OBJECT_PROPERTY)); -// man.removeAxioms(cleaned, cleaned.getAxioms(AxiomType.INVERSE_FUNCTIONAL_OBJECT_PROPERTY)); -// man.saveOntology(cleaned, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(file.getParent() + "/cleaned.owl"))); -// OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("log/dbpedia_95.owl")); -// OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("/home/lorenz/arbeit/dbpedia_0.75_no_datapropaxioms.owl")); -// System.out.println(schema.getLogicalAxiomCount()); -// OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("log/dbpedia_coherent.owl")); -// System.out.println(schema.getLogicalAxiomCount()); + Set<OWLTransitiveObjectPropertyAxiom> removedAxioms = schema.getAxioms(AxiomType.TRANSITIVE_OBJECT_PROPERTY); + man.removeAxioms(schema, removedAxioms); System.out.println("...done."); JustificationBasedCoherentOntologyExtractor extractor = new JustificationBasedCoherentOntologyExtractor(); extractor.setNumberOfJustifications(numberOfJustifications); extractor.setComputeParallel(computeParallel); + extractor.setConfidencePropertyIRI(confidenceIRI); if(filename.indexOf('/') >= 0){ filename = filename.substring( filename.lastIndexOf('/')+1, filename.length() ); } extractor.setFileName(filename); OWLOntology coherentOntology = extractor.getCoherentOntology(schema, preferRoots); + man.addAxioms(coherentOntology, removedAxioms); + System.out.println("Coherent ontology contains " + coherentOntology.getLogicalAxiomCount() + " logical axioms."); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-28 21:45:06
|
Revision: 3598 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3598&view=rev Author: lorenz_b Date: 2012-02-28 21:44:59 +0000 (Tue, 28 Feb 2012) Log Message: ----------- Further debugging for eval. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-28 15:56:18 UTC (rev 3597) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-28 21:44:59 UTC (rev 3598) @@ -238,7 +238,7 @@ cnt += unsatPropCnt; } - while(cnt >= 0){ + while(cnt > 0){ //we remove the most appropriate axiom from the ontology removeAppropriateAxiom(); @@ -301,6 +301,8 @@ entity2Explanations.clear(); entity2ModuleMap.clear(); + save("log/" + fileName + "_" + cnt + "cls" + unsatPropCnt + "prop.owl"); + if(!computeParallel){ unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); @@ -341,15 +343,9 @@ } } - try { - incoherentOntology.getOWLOntologyManager().saveOntology(getOntologyWithAnnotations(incoherentOntology), new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/dbpedia_coherent.owl"))); - } catch (OWLOntologyStorageException e) { - e.printStackTrace(); - } catch (FileNotFoundException e) { - e.printStackTrace(); - } + save("log/" + fileName + "_coherent.owl"); - return getOntologyWithAnnotations(incoherentOntology); + return ontology; } private OWLOntology computeCoherentOntology(OWLOntology ontology) { @@ -501,6 +497,10 @@ if(!dbpediaOntology.containsAxiomIgnoreAnnotations(axiom)){ logger.info("Removing axiom " + axiom + "."); manager.removeAxiom(incoherentOntology, axiom); + //remove the axiom also from the loaded ontology + OWLAxiom originalAnnotatedAxiom = ontology.getAxiomsIgnoreAnnotations(axiom).iterator().next(); + ontology.getOWLOntologyManager().removeAxiom(ontology, originalAnnotatedAxiom); + manager.addAxiom(diffOntology, axiom); manager.applyChange(new RemoveAxiom(incoherentOntology, axiom)); removeFromExplanations(entity2Explanations, axiom); @@ -511,10 +511,9 @@ } private void save(String fileName){ - OWLOntology toSave = getOntologyWithAnnotations(incoherentOntology); try { - toSave.getOWLOntologyManager().saveOntology(toSave, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(fileName))); - toSave.getOWLOntologyManager().saveOntology(diffOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/" + diffFileName))); + ontology.getOWLOntologyManager().saveOntology(ontology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(fileName))); + diffOntology.getOWLOntologyManager().saveOntology(diffOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/" + diffFileName))); } catch (OWLOntologyStorageException e) { e.printStackTrace(); } catch (FileNotFoundException e) { @@ -634,13 +633,14 @@ } private OWLOntology getOntologyWithAnnotations(OWLOntology ontologyWithOutAnnotations){ + logger.info("BEFORE: "+ ontology.getSubClassAxiomsForSubClass(factory.getOWLClass(IRI.create("http://dbpedia.org/ontology/Award")))); OWLOntologyManager man = ontology.getOWLOntologyManager(); for (Iterator<OWLLogicalAxiom> iterator = ontology.getLogicalAxioms().iterator(); iterator.hasNext();) { OWLLogicalAxiom axiom = iterator.next(); if(!ontologyWithOutAnnotations.containsAxiomIgnoreAnnotations(axiom)){ man.removeAxiom(ontology, axiom); } - } + }logger.info("AFTER: "+ ontology.getSubClassAxiomsForSubClass(factory.getOWLClass(IRI.create("http://dbpedia.org/ontology/Award")))); return ontology; } @@ -677,9 +677,7 @@ Set<OWLAxiom> axiomsWithAnnotations = ontology.getAxiomsIgnoreAnnotations(axiom); if(axiomsWithAnnotations.isEmpty()){ logger.info("Axiom with annotations not found: " + axiom); - logger.info("Ontology contains axiom: " + incoherentOntology.containsAxiomIgnoreAnnotations(axiom)); - logger.info("Original loaded ontology contains axiom: " + ontology.containsAxiomIgnoreAnnotations(axiom)); - System.out.println(ontology.getSubClassAxiomsForSubClass(factory.getOWLClass(IRI.create("http://dbpedia.org/ontology/Award")))); + return 2; } OWLAxiom axiomWithAnnotations = axiomsWithAnnotations.iterator().next(); Set<OWLAnnotation> annotations = axiomWithAnnotations.getAnnotations(confidenceProperty); @@ -694,38 +692,46 @@ public OWLOntology getModule(OWLEntity entity){ - OWLOntology module = entity2ModuleMap.get(entity); - new File("log").mkdir(); - if(module == null){ - md5.reset(); - md5.update((ontology.getOWLOntologyManager().getOntologyDocumentIRI(ontology).toString() + entity.toStringID()).getBytes()); - String hash = MD5.asHex(md5.digest()); - String filename = "log/" + hash + ".owl"; - File file = new File(filename); - boolean load = false; - if(load){//file.exists()){ - module = loadModule(file); - } else { - try { - module = OWLManager.createOWLOntologyManager().createOntology(ModularityUtils.extractModule(incoherentOntology, Collections.singleton(entity), ModuleType.TOP_OF_BOT)); - } catch (OWLOntologyCreationException e) { - e.printStackTrace(); - } - /* - module = OntologyUtils.getOntologyFromAxioms( - ModularityUtils.extractModule(incoherentOntology, Collections.singleton(entity), ModuleType.TOP_OF_BOT)); - - try { - manager.saveOntology(module, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(filename))); - } catch (OWLOntologyStorageException e) { - e.printStackTrace(); - } catch (FileNotFoundException e) { - e.printStackTrace(); - }*/ - } - - //entity2ModuleMap.put(entity, module); + OWLOntology module = null; + try { + module = OWLManager.createOWLOntologyManager().createOntology(ModularityUtils.extractModule(incoherentOntology, Collections.singleton(entity), ModuleType.TOP_OF_BOT)); + } catch (OWLOntologyCreationException e) { + // TODO Auto-generated catch block + e.printStackTrace(); } + +// OWLOntology module = entity2ModuleMap.get(entity); +// new File("log").mkdir(); +// if(module == null){ +// md5.reset(); +// md5.update((ontology.getOWLOntologyManager().getOntologyDocumentIRI(ontology).toString() + entity.toStringID()).getBytes()); +// String hash = MD5.asHex(md5.digest()); +// String filename = "log/" + hash + ".owl"; +// File file = new File(filename); +// boolean load = false; +// if(load){//file.exists()){ +// module = loadModule(file); +// } else { +// try { +// module = OWLManager.createOWLOntologyManager().createOntology(ModularityUtils.extractModule(incoherentOntology, Collections.singleton(entity), ModuleType.TOP_OF_BOT)); +// } catch (OWLOntologyCreationException e) { +// e.printStackTrace(); +// } +// /* +// module = OntologyUtils.getOntologyFromAxioms( +// ModularityUtils.extractModule(incoherentOntology, Collections.singleton(entity), ModuleType.TOP_OF_BOT)); +// +// try { +// manager.saveOntology(module, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(filename))); +// } catch (OWLOntologyStorageException e) { +// e.printStackTrace(); +// } catch (FileNotFoundException e) { +// e.printStackTrace(); +// }*/ +// } +// +// //entity2ModuleMap.put(entity, module); +// } return module; } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-29 08:00:11
|
Revision: 3599 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3599&view=rev Author: lorenz_b Date: 2012-02-29 08:00:04 +0000 (Wed, 29 Feb 2012) Log Message: ----------- Added optimizations. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-28 21:44:59 UTC (rev 3598) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-29 08:00:04 UTC (rev 3599) @@ -27,8 +27,6 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import openlink.util.MD5; - import org.apache.commons.compress.compressors.CompressorException; import org.apache.commons.compress.compressors.CompressorStreamFactory; import org.apache.log4j.ConsoleAppender; @@ -58,6 +56,7 @@ import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom; import org.semanticweb.owlapi.model.RemoveAxiom; import org.semanticweb.owlapi.reasoner.IllegalConfigurationException; +import org.semanticweb.owlapi.reasoner.InferenceType; import org.semanticweb.owlapi.reasoner.OWLReasoner; import org.semanticweb.owlapi.reasoner.OWLReasonerConfiguration; import org.semanticweb.owlapi.reasoner.OWLReasonerFactory; @@ -70,6 +69,7 @@ import com.clarkparsia.owlapi.explanation.BlackBoxExplanation; import com.clarkparsia.owlapi.explanation.HSTExplanationGenerator; import com.clarkparsia.owlapi.explanation.PelletExplanation; +import com.clarkparsia.pellet.owlapiv3.PelletReasoner; import com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory; public class JustificationBasedCoherentOntologyExtractor implements CoherentOntologyExtractor{ @@ -78,7 +78,7 @@ private static final String DIFF_ONTOLOGY_NAME = "diff.owl"; private int numberOfJustifications = 10; -// private PelletReasoner reasoner; + private PelletReasoner propReasoner; private IncrementalClassifier reasoner; private Reasoner hermitReasoner; @@ -129,7 +129,7 @@ @Override public OWLOntology getCoherentOntology(OWLOntology ontology, boolean preferRoots){ - ontology.getOWLOntologyManager().addAxioms(ontology, dbpediaOntology.getLogicalAxioms()); +// ontology.getOWLOntologyManager().addAxioms(ontology, dbpediaOntology.getLogicalAxioms()); this.ontology = ontology; this.incoherentOntology = getOntologyWithoutAnnotations(ontology); @@ -155,12 +155,13 @@ factory = manager.getOWLDataFactory(); long startTime = System.currentTimeMillis(); - reasoner = new IncrementalClassifier(incoherentOntology); - reasoner.classify(); + propReasoner = PelletReasonerFactory.getInstance().createNonBufferingReasoner(incoherentOntology); + reasoner = new IncrementalClassifier(propReasoner); + reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); //compute the unsatisfiable object properties and their corresponding modules - unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); + unsatObjectProperties = getUnsatisfiableObjectProperties(); logger.info("Found unsatisfiable object properties: " + unsatObjectProperties.size()); if(computeParallel){ entity2ModuleMap.putAll(extractModules(unsatObjectProperties)); @@ -272,7 +273,7 @@ //recompute unsatisfiable object properties // if(computeParallel){ - unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); + unsatObjectProperties = getUnsatisfiableObjectProperties(); logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); // } @@ -286,6 +287,11 @@ cnt += unsatPropCnt; } } + if(unsatClasses.isEmpty() && (!computeParallel || (computeParallel && unsatObjectProperties.isEmpty()))){ + cnt = 0; + unsatPropCnt = unsatObjectProperties.size(); + break; + } //recompute explanations if necessary logger.info("Recomputing explanations..."); @@ -304,10 +310,10 @@ save("log/" + fileName + "_" + cnt + "cls" + unsatPropCnt + "prop.owl"); if(!computeParallel){ - unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); + unsatObjectProperties = getUnsatisfiableObjectProperties(); logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); - entity2ModuleMap.putAll(extractModules(unsatObjectProperties)); +// entity2ModuleMap.putAll(extractModules(unsatObjectProperties)); logger.info("Recomputing explanations..."); startTime = System.currentTimeMillis(); @@ -317,18 +323,12 @@ //we remove the most appropriate axiom from the ontology removeAppropriateAxiom(); - //recompute the unsatisfiable classes - logger.info("Reclassifying..."); - startTime = System.currentTimeMillis(); - reasoner.classify(); - logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); - //recompute unsatisfiable object properties - unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); + unsatObjectProperties = getUnsatisfiableObjectProperties(); logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); //save - if((unsatPropCnt - unsatObjectProperties.size()) >= 1){ + if((unsatPropCnt - unsatObjectProperties.size()) >= 5){ save("log/" + fileName + "_" + cnt + "cls" + unsatPropCnt + "prop.owl"); unsatPropCnt = unsatObjectProperties.size(); } @@ -387,7 +387,7 @@ logger.info("Remaining unsatisfiable classes: " + unsatClasses.size()); //recompute unsatisfiable object properties - unsatObjectProperties = getUnsatisfiableObjectProperties(reasoner); + unsatObjectProperties = getUnsatisfiableObjectProperties(); logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); //save @@ -521,7 +521,7 @@ } } - private Set<OWLObjectProperty> getUnsatisfiableObjectProperties(IncrementalClassifier reasoner){ + private Set<OWLObjectProperty> getUnsatisfiableObjectProperties(){ logger.info("Computing unsatisfiable object properties..."); long startTime = System.currentTimeMillis(); SortedSet<OWLObjectProperty> properties = new TreeSet<OWLObjectProperty>(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-29 08:07:28
|
Revision: 3600 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3600&view=rev Author: lorenz_b Date: 2012-02-29 08:07:17 +0000 (Wed, 29 Feb 2012) Log Message: ----------- Added optimizations. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-29 08:00:04 UTC (rev 3599) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-29 08:07:17 UTC (rev 3600) @@ -78,8 +78,8 @@ private static final String DIFF_ONTOLOGY_NAME = "diff.owl"; private int numberOfJustifications = 10; - private PelletReasoner propReasoner; - private IncrementalClassifier reasoner; + private PelletReasoner baseReasoner; + private PelletReasoner reasoner;//IncrementalClassifier reasoner; private Reasoner hermitReasoner; private OWLOntology incoherentOntology; @@ -155,8 +155,8 @@ factory = manager.getOWLDataFactory(); long startTime = System.currentTimeMillis(); - propReasoner = PelletReasonerFactory.getInstance().createNonBufferingReasoner(incoherentOntology); - reasoner = new IncrementalClassifier(propReasoner); + baseReasoner = PelletReasonerFactory.getInstance().createNonBufferingReasoner(incoherentOntology); + reasoner = baseReasoner;//new IncrementalClassifier(baseReasoner); reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); @@ -246,7 +246,7 @@ //recompute the unsatisfiable classes logger.info("Reclassifying..."); startTime = System.currentTimeMillis(); - reasoner.classify(); + reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); // hermitReasoner.classifyClasses(); //Set<OWLClass> unsatClasses2 = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); @@ -381,7 +381,7 @@ //recompute the unsatisfiable classes logger.info("Reclassifying..."); startTime = System.currentTimeMillis(); - reasoner.classify(); + reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); logger.info("Remaining unsatisfiable classes: " + unsatClasses.size()); @@ -434,7 +434,7 @@ //compute initial explanations for each unsatisfiable class logger.info("Computing initial explanations..."); startTime = System.currentTimeMillis(); - PelletExplanation expGen = new PelletExplanation(reasoner.getReasoner()); + PelletExplanation expGen = new PelletExplanation(baseReasoner); Set<Set<OWLAxiom>> explanations; for(OWLClass unsatCls : unsatClasses){ explanations = expGen.getUnsatisfiableExplanations(unsatCls, numberOfJustifications); @@ -450,7 +450,7 @@ //recompute the unsatisfiable classes logger.info("Reclassifying..."); startTime = System.currentTimeMillis(); - reasoner.classify(); + reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); logger.info("Remaining unsatisfiable classes: " + unsatClasses.size()); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-29 08:08:26
|
Revision: 3601 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3601&view=rev Author: lorenz_b Date: 2012-02-29 08:08:16 +0000 (Wed, 29 Feb 2012) Log Message: ----------- Added optimizations. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-29 08:07:17 UTC (rev 3600) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-29 08:08:16 UTC (rev 3601) @@ -129,7 +129,7 @@ @Override public OWLOntology getCoherentOntology(OWLOntology ontology, boolean preferRoots){ -// ontology.getOWLOntologyManager().addAxioms(ontology, dbpediaOntology.getLogicalAxioms()); + ontology.getOWLOntologyManager().addAxioms(ontology, dbpediaOntology.getLogicalAxioms()); this.ontology = ontology; this.incoherentOntology = getOntologyWithoutAnnotations(ontology); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-03-01 02:50:21
|
Revision: 3602 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3602&view=rev Author: lorenz_b Date: 2012-03-01 02:50:14 +0000 (Thu, 01 Mar 2012) Log Message: ----------- Added workaround for problem in OWLAPI with user-defined datatypes. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-02-29 08:08:16 UTC (rev 3601) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-03-01 02:50:14 UTC (rev 3602) @@ -2,6 +2,8 @@ import java.io.BufferedInputStream; import java.io.BufferedOutputStream; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; @@ -47,7 +49,6 @@ import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLEntity; import org.semanticweb.owlapi.model.OWLLiteral; -import org.semanticweb.owlapi.model.OWLLogicalAxiom; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; @@ -64,13 +65,18 @@ import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; import uk.ac.manchester.cs.owlapi.modularity.ModuleType; -import com.clarkparsia.modularity.IncrementalClassifier; import com.clarkparsia.modularity.ModularityUtils; import com.clarkparsia.owlapi.explanation.BlackBoxExplanation; import com.clarkparsia.owlapi.explanation.HSTExplanationGenerator; import com.clarkparsia.owlapi.explanation.PelletExplanation; import com.clarkparsia.pellet.owlapiv3.PelletReasoner; import com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; +import com.hp.hpl.jena.rdf.model.RDFNode; +import com.hp.hpl.jena.rdf.model.Statement; +import com.hp.hpl.jena.rdf.model.StmtIterator; +import com.hp.hpl.jena.vocabulary.RDFS; public class JustificationBasedCoherentOntologyExtractor implements CoherentOntologyExtractor{ @@ -108,7 +114,7 @@ private OWLOntology dbpediaOntology; - private String fileName = "dbpedia"; + private String fileName; private String diffFileName = "diff.owl"; public JustificationBasedCoherentOntologyExtractor() { @@ -117,7 +123,7 @@ } catch (NoSuchAlgorithmException e) { e.printStackTrace(); } - dbpediaOntology = loadDBpediaOntology(); + dbpediaOntology = loadDBpediaOntologyOWLDL(); } static {PelletExplanation.setup();} @@ -147,7 +153,8 @@ e1.printStackTrace(); } - //only for debugging + /*only to avoid Pellet warnings during the process and this axioms are only removed from the ontology, + which is used during the debugging and not from the ontology which is always saved and returned finally*/ removedTransitiveAxioms = incoherentOntology.getAxioms(AxiomType.TRANSITIVE_OBJECT_PROPERTY); incoherentOntology.getOWLOntologyManager().removeAxioms(incoherentOntology, removedTransitiveAxioms); @@ -181,12 +188,6 @@ } private OWLOntology computeCoherentOntologyRootBased(OWLOntology ontology) { - -// startTime = System.currentTimeMillis(); -// hermitReasoner = new Reasoner(incoherentOntology); -// hermitReasoner.classifyClasses(); -// logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); - OWLOntologyManager man = incoherentOntology.getOWLOntologyManager(); factory = man.getOWLDataFactory(); // man.addOntologyChangeListener(reasoner); @@ -203,9 +204,9 @@ int derivedCnt = derivedUnsatClasses.size(); //if no roots are found we use all unsat classes - if(rootCnt == 0){ - unsatClasses = derivedUnsatClasses; - } + if(rootCnt == 0){ + unsatClasses = derivedUnsatClasses; + } // Set<OWLClass> unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); int cnt = rootCnt + derivedCnt; @@ -333,6 +334,10 @@ unsatPropCnt = unsatObjectProperties.size(); } + if(unsatObjectProperties.isEmpty()){ + break; + } + //recompute explanations if necessary logger.info("Recomputing explanations..."); startTime = System.currentTimeMillis(); @@ -349,39 +354,42 @@ } private OWLOntology computeCoherentOntology(OWLOntology ontology) { + OWLOntologyManager man = incoherentOntology.getOWLOntologyManager(); + factory = man.getOWLDataFactory(); +// man.addOntologyChangeListener(reasoner); + //compute the unsatisfiable classes logger.info("Computing unsatisfiable classes..."); long startTime = System.currentTimeMillis(); + Set<OWLClass> unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); - Set<OWLClass> unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); - int cnt = unsatClasses.size(); - logger.info("Detected " + cnt + " unsatisfiable classes."); + logger.info("Detected " + unsatClasses.size() + " unsatisfiable classes."); //if the ontology is not incoherent we return it here if(unsatClasses.isEmpty()){ return incoherentOntology; } - //compute the logical modules for each unsatisfiable class - logger.info("Computing module for each unsatisfiable class..."); - startTime = System.currentTimeMillis(); - entity2ModuleMap = extractModules(unsatClasses); - logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); //compute initial explanations for each unsatisfiable class logger.info("Computing initial explanations..."); startTime = System.currentTimeMillis(); computeExplanations(unsatClasses); -// entity2Explanations.putAll(getInitialExplanations(unsatObjectProperties)); + if(computeParallel){ + computeExplanations(unsatObjectProperties); + } logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); - while(!unsatClasses.isEmpty()){// && !unsatObjectProperties.isEmpty()){ + int cnt = unsatClasses.size(); + if(computeParallel){ + cnt += unsatObjectProperties.size(); + } + + while(cnt > 0){ //we remove the most appropriate axiom from the ontology removeAppropriateAxiom(); - //recompute the unsatisfiable classes - logger.info("Reclassifying..."); + logger.info("Computing unsatisfiable classes..."); startTime = System.currentTimeMillis(); - reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); logger.info("Remaining unsatisfiable classes: " + unsatClasses.size()); @@ -391,30 +399,75 @@ logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); //save - if(cnt - unsatClasses.size() >= 10){ + if((!computeParallel && (cnt-unsatClasses.size()>= 5)) + || (computeParallel && (cnt-unsatClasses.size()+unsatObjectProperties.size())>=5)){ cnt = unsatClasses.size(); - save("log/" + fileName + "_" + cnt + "cls" + unsatObjectProperties.size() + "prop.owl"); + save("log/" + fileName + "_" + unsatClasses.size() + "cls" + unsatObjectProperties.size() + "prop.owl"); + if(computeParallel){ + cnt += unsatObjectProperties.size(); + } } + if(unsatClasses.isEmpty() && (!computeParallel || (computeParallel && unsatObjectProperties.isEmpty()))){ + cnt = 0; + break; + } //recompute explanations if necessary logger.info("Recomputing explanations..."); startTime = System.currentTimeMillis(); - refillExplanations(unsatClasses, entity2Explanations); - //refillExplanations(unsatObjectProperties, entity2Explanations); + computeExplanations(unsatClasses); + if(computeParallel){ + computeExplanations(unsatObjectProperties); + } logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); System.gc(); } - try { - incoherentOntology.getOWLOntologyManager().saveOntology(getOntologyWithAnnotations(incoherentOntology), new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/dbpedia_coherent.owl"))); - } catch (OWLOntologyStorageException e) { - e.printStackTrace(); - } catch (FileNotFoundException e) { - e.printStackTrace(); + entity2Explanations.clear(); + entity2ModuleMap.clear(); + + save("log/" + fileName + "_" + unsatClasses.size() + "cls" + unsatObjectProperties.size() + "prop.owl"); + + + if(!computeParallel){ + unsatObjectProperties = getUnsatisfiableObjectProperties(); + int unsatPropCnt = unsatObjectProperties.size(); + logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); + + logger.info("Recomputing explanations..."); + startTime = System.currentTimeMillis(); + computeExplanations(unsatObjectProperties); + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); + while(unsatPropCnt > 0){ + //we remove the most appropriate axiom from the ontology + removeAppropriateAxiom(); + + //recompute unsatisfiable object properties + unsatObjectProperties = getUnsatisfiableObjectProperties(); + logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); + + //save + if((unsatPropCnt - unsatObjectProperties.size()) >= 5){ + save("log/" + fileName + "_" + unsatClasses.size() + "cls" + unsatPropCnt + "prop.owl"); + unsatPropCnt = unsatObjectProperties.size(); + } + if(unsatObjectProperties.isEmpty()){ + break; + } + + //recompute explanations if necessary + logger.info("Recomputing explanations..."); + startTime = System.currentTimeMillis(); + computeExplanations(unsatObjectProperties); + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); + + System.gc(); + } } - System.out.println(incoherentOntology.getLogicalAxiomCount()); - return getOntologyWithAnnotations(incoherentOntology); + save("log/" + fileName + "_coherent.owl"); + + return ontology; } private OWLOntology computeCoherentOntology2(OWLOntology ontology) { @@ -473,16 +526,9 @@ logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); } - try { - incoherentOntology.getOWLOntologyManager().saveOntology(getOntologyWithAnnotations(incoherentOntology), new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/dbpedia_coherent.owl"))); - } catch (OWLOntologyStorageException e) { - e.printStackTrace(); - } catch (FileNotFoundException e) { - e.printStackTrace(); - } - System.out.println(incoherentOntology.getLogicalAxiomCount()); + save("log/" + fileName + "_coherent.owl"); - return getOntologyWithAnnotations(incoherentOntology); + return ontology; } private void removeAppropriateAxiom(){ @@ -491,6 +537,12 @@ Map<OWLAxiom, Integer> axiom2CountMap = getAxiomFrequency(entity2Explanations); //get a sorted list of entries with the highest axiom count first List<Entry<OWLAxiom, Integer>> sortedEntries = sort(axiom2CountMap); + logger.info("Candidates: " + sortedEntries.size()); + if(sortedEntries.size() >= 2){ + logger.info("First: " + sortedEntries.get(0) + "(" + getConfidence(sortedEntries.get(0).getKey()) + ")"); + logger.info("Second: " + sortedEntries.get(1) + "(" + getConfidence(sortedEntries.get(1).getKey()) + ")"); + } + //we remove the most frequent axiom from the ontology which is not contained in the original DBpedia ontology for(Entry<OWLAxiom, Integer> e : sortedEntries){ OWLAxiom axiom = e.getKey(); @@ -511,6 +563,8 @@ } private void save(String fileName){ + logger.info("Writing to disk..."); + long startTime = System.currentTimeMillis(); try { ontology.getOWLOntologyManager().saveOntology(ontology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(fileName))); diffOntology.getOWLOntologyManager().saveOntology(diffOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream("log/" + diffFileName))); @@ -519,6 +573,7 @@ } catch (FileNotFoundException e) { e.printStackTrace(); } + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); } private Set<OWLObjectProperty> getUnsatisfiableObjectProperties(){ @@ -526,13 +581,20 @@ long startTime = System.currentTimeMillis(); SortedSet<OWLObjectProperty> properties = new TreeSet<OWLObjectProperty>(); OWLDataFactory f = OWLManager.createOWLOntologyManager().getOWLDataFactory(); - for(OWLObjectProperty p : reasoner.getRootOntology().getObjectPropertiesInSignature()){ + for(OWLObjectProperty p : incoherentOntology.getObjectPropertiesInSignature()){ // boolean satisfiable = reasoner.isSatisfiable(f.getOWLObjectExactCardinality(1, p)); boolean satisfiable = reasoner.isSatisfiable(f.getOWLObjectSomeValuesFrom(p, factory.getOWLThing())); if(!satisfiable){ properties.add(p); } } + /* + * this method down't seem to work TODO ask Pellet developers why + for(OWLObjectPropertyExpression p : reasoner.getEquivalentObjectProperties(factory.getOWLBottomObjectProperty()).getEntitiesMinusBottom()){ + if(!p.isAnonymous()){ + properties.add(p.asOWLObjectProperty()); + } + }*/ logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); return properties; } @@ -556,17 +618,6 @@ } } - private void refillExplanations(Set<? extends OWLEntity> unsatEntities, Map<OWLEntity, Set<Set<OWLAxiom>>> entity2Explanations){ - for(OWLEntity unsatClass : unsatEntities){ - Set<Set<OWLAxiom>> precomputedExplanations = entity2Explanations.get(unsatClass); - if(precomputedExplanations == null || precomputedExplanations.size() < numberOfJustifications){ - Set<Set<OWLAxiom>> newExplanations = computeExplanations(unsatClass, numberOfJustifications); - logger.info(unsatClass + ": " + newExplanations.size()); - entity2Explanations.put(unsatClass, newExplanations); - } - } - } - private Map<OWLAxiom, Integer> getAxiomFrequency(Map<OWLEntity, Set<Set<OWLAxiom>>> entity2Explanations){ Map<OWLAxiom, Integer> axiom2CountMap = new HashMap<OWLAxiom, Integer>(); @@ -632,18 +683,6 @@ return null; } - private OWLOntology getOntologyWithAnnotations(OWLOntology ontologyWithOutAnnotations){ - logger.info("BEFORE: "+ ontology.getSubClassAxiomsForSubClass(factory.getOWLClass(IRI.create("http://dbpedia.org/ontology/Award")))); - OWLOntologyManager man = ontology.getOWLOntologyManager(); - for (Iterator<OWLLogicalAxiom> iterator = ontology.getLogicalAxioms().iterator(); iterator.hasNext();) { - OWLLogicalAxiom axiom = iterator.next(); - if(!ontologyWithOutAnnotations.containsAxiomIgnoreAnnotations(axiom)){ - man.removeAxiom(ontology, axiom); - } - }logger.info("AFTER: "+ ontology.getSubClassAxiomsForSubClass(factory.getOWLClass(IRI.create("http://dbpedia.org/ontology/Award")))); - return ontology; - } - private Set<Set<OWLAxiom>> computeExplanations(OWLEntity unsatEntity){ return computeExplanations(unsatEntity, numberOfJustifications); } @@ -675,7 +714,7 @@ private double getConfidence(OWLAxiom axiom){ Set<OWLAxiom> axiomsWithAnnotations = ontology.getAxiomsIgnoreAnnotations(axiom); - if(axiomsWithAnnotations.isEmpty()){ + if(axiomsWithAnnotations.isEmpty()){//this should never happen logger.info("Axiom with annotations not found: " + axiom); return 2; } @@ -768,6 +807,62 @@ return ontology; } + /** + * First try to clean up ontology with JENA as original ontology is in OWL Full because of some user-defined datatypes. + * We could either (1) return the rdfs:range triples of the properties with user-defined datatypes or (2) remove all triples about the property. + * @return + */ + private OWLOntology loadDBpediaOntologyOWLDL() { + long startTime = System.currentTimeMillis(); + logger.info("Loading DBpedia reference ontology..."); + OWLOntology ontology = null; + try { + URL dbpediaURL = new URL("http://downloads.dbpedia.org/3.7/dbpedia_3.7.owl.bz2"); + InputStream is = dbpediaURL.openStream(); + is = new CompressorStreamFactory().createCompressorInputStream("bzip2", is); + Model model = ModelFactory.createDefaultModel(); + model.read(is, null); + //get all subjects where URI of RDFS:range starts with http://dbpedia.org/datatype/ + for(StmtIterator iter = model.listStatements(null, RDFS.range, (RDFNode)null); iter.hasNext();){ + Statement st = iter.next(); + if(st.getObject().asResource().getURI().startsWith("http://dbpedia.org/datatype/")){ + iter.remove(); + } + //solution 2 +// for(StmtIterator iter2 = model.listStatements(iter.next().getSubject(), null, (RDFNode)null); iter2.hasNext();){ +// iter2.remove(); +// } + } + + + + return convert(model); + + } catch (MalformedURLException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } catch (CompressorException e) { + e.printStackTrace(); + } + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); + return ontology; + } + + private OWLOntology convert(Model model) { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + model.write(baos, "N-TRIPLE"); + ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + OWLOntology retOnt = null; + try { + retOnt = manager.loadOntologyFromOntologyDocument(bais); + } catch (OWLOntologyCreationException e) { + + } + return retOnt; + } + private Map<OWLEntity, OWLOntology> extractModules(Set<? extends OWLEntity> entities){ logger.info("Computing modules..."); long startTime = System.currentTimeMillis(); @@ -843,8 +938,6 @@ is = new CompressorStreamFactory().createCompressorInputStream("bzip2", is); } OWLOntology schema = man.loadOntologyFromOntologyDocument(is); - Set<OWLTransitiveObjectPropertyAxiom> removedAxioms = schema.getAxioms(AxiomType.TRANSITIVE_OBJECT_PROPERTY); - man.removeAxioms(schema, removedAxioms); System.out.println("...done."); JustificationBasedCoherentOntologyExtractor extractor = new JustificationBasedCoherentOntologyExtractor(); @@ -854,10 +947,9 @@ if(filename.indexOf('/') >= 0){ filename = filename.substring( filename.lastIndexOf('/')+1, filename.length() ); } - extractor.setFileName(filename); + OWLOntology coherentOntology = extractor.getCoherentOntology(schema, preferRoots); - man.addAxioms(coherentOntology, removedAxioms); System.out.println("Coherent ontology contains " + coherentOntology.getLogicalAxiomCount() + " logical axioms."); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-03-01 09:32:12
|
Revision: 3603 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3603&view=rev Author: lorenz_b Date: 2012-03-01 09:32:01 +0000 (Thu, 01 Mar 2012) Log Message: ----------- Added some optimization to use local debugging if all justifications for each entity are found, so no reasoner call would be needed to check if there are still some unsatisfiable entities. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-03-01 02:50:14 UTC (rev 3602) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2012-03-01 09:32:01 UTC (rev 3603) @@ -106,6 +106,7 @@ private Set<OWLTransitiveObjectPropertyAxiom> removedTransitiveAxioms; private Set<OWLObjectProperty> unsatObjectProperties; + private Set<OWLClass> unsatClasses; //whether to debug classes and properties in parallel private boolean computeParallel = false; @@ -196,7 +197,7 @@ logger.info("Computing root/derived unsatisfiable classes..."); long startTime = System.currentTimeMillis(); StructureBasedRootClassFinder rootFinder = new StructureBasedRootClassFinder(reasoner, this); - Set<OWLClass> unsatClasses = rootFinder.getRootUnsatisfiableClasses(); + unsatClasses = rootFinder.getRootUnsatisfiableClasses(); Set<OWLClass> derivedUnsatClasses = rootFinder.getDerivedUnsatisfiableClasses(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); @@ -347,8 +348,9 @@ System.gc(); } } - save("log/" + fileName + "_coherent.owl"); + logger.info("Finished. \n Coherent ontology contains " + ontology.getLogicalAxiomCount() + " logical axioms." + + "Removed axioms: " + diffOntology.getLogicalAxiomCount()); return ontology; } @@ -361,7 +363,7 @@ //compute the unsatisfiable classes logger.info("Computing unsatisfiable classes..."); long startTime = System.currentTimeMillis(); - Set<OWLClass> unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); + unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); logger.info("Detected " + unsatClasses.size() + " unsatisfiable classes."); @@ -384,7 +386,7 @@ cnt += unsatObjectProperties.size(); } - while(cnt > 0){ + while(!runLocalDebugging() && cnt > 0){ //we remove the most appropriate axiom from the ontology removeAppropriateAxiom(); @@ -399,8 +401,8 @@ logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); //save - if((!computeParallel && (cnt-unsatClasses.size()>= 5)) - || (computeParallel && (cnt-unsatClasses.size()+unsatObjectProperties.size())>=5)){ + if((!computeParallel && (cnt-unsatClasses.size()>= 10)) + || (computeParallel && (cnt-unsatClasses.size()+unsatObjectProperties.size())>=10)){ cnt = unsatClasses.size(); save("log/" + fileName + "_" + unsatClasses.size() + "cls" + unsatObjectProperties.size() + "prop.owl"); if(computeParallel){ @@ -425,6 +427,8 @@ } entity2Explanations.clear(); entity2ModuleMap.clear(); + entitiesWithLessExplanations.clear(); + entity2ExpGen.clear(); save("log/" + fileName + "_" + unsatClasses.size() + "cls" + unsatObjectProperties.size() + "prop.owl"); @@ -434,11 +438,11 @@ int unsatPropCnt = unsatObjectProperties.size(); logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); - logger.info("Recomputing explanations..."); + logger.info("Computing explanations..."); startTime = System.currentTimeMillis(); computeExplanations(unsatObjectProperties); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); - while(unsatPropCnt > 0){ + while(!runLocalDebugging() && unsatPropCnt > 0){ //we remove the most appropriate axiom from the ontology removeAppropriateAxiom(); @@ -447,7 +451,7 @@ logger.info("Remaining unsatisfiable object properties: " + unsatObjectProperties.size()); //save - if((unsatPropCnt - unsatObjectProperties.size()) >= 5){ + if((unsatPropCnt - unsatObjectProperties.size()) >= 10){ save("log/" + fileName + "_" + unsatClasses.size() + "cls" + unsatPropCnt + "prop.owl"); unsatPropCnt = unsatObjectProperties.size(); } @@ -464,12 +468,52 @@ System.gc(); } } - save("log/" + fileName + "_coherent.owl"); + logger.info("Finished. \n Coherent ontology contains " + ontology.getLogicalAxiomCount() + " logical axioms." + + "Removed axioms: " + diffOntology.getLogicalAxiomCount()); return ontology; } + /* + * check here if all explanations are found, i.e. for each entity the number of justifications is lower than the limit which was set + */ + private boolean allExplanationsFound(){ + boolean allExplanationsFound = false; + if(computeParallel){ + allExplanationsFound = entitiesWithLessExplanations.size() == (unsatClasses.size() + unsatObjectProperties.size()); + } else { + allExplanationsFound = entitiesWithLessExplanations.size() == unsatClasses.size(); + } + return allExplanationsFound; + } + + private boolean runLocalDebugging(){ + if(allExplanationsFound()){ + //add all explanations into one set + Set<Set<OWLAxiom>> explanations = new HashSet<Set<OWLAxiom>>(); + for(Entry<OWLEntity, Set<Set<OWLAxiom>>> e: entity2Explanations.entrySet()){ + explanations.addAll(e.getValue()); + } + //get the frequency for each axiom + Map<OWLAxiom, Integer> axiom2CountMap = getAxiomFrequency(entity2Explanations); + //get a sorted list of entries with the highest axiom count first + List<Entry<OWLAxiom, Integer>> candidates = sort(axiom2CountMap); + //remove axioms until no further explanation exists + while(!explanations.isEmpty()){ + removeAppropriateAxiomLocal(explanations, candidates); + } + if(computeParallel){ + unsatClasses.clear(); + unsatObjectProperties.clear(); + } else { + unsatClasses.clear(); + } + return true; + } + return false; + } + private OWLOntology computeCoherentOntology2(OWLOntology ontology) { //compute the unsatisfiable classes logger.info("Computing unsatisfiable classes..."); @@ -562,6 +606,40 @@ } } + private void removeAppropriateAxiomLocal(Set<Set<OWLAxiom>> explanations, List<Entry<OWLAxiom, Integer>> candidates){ + logger.info("Searching for appropriate axiom to remove..."); + logger.info("Candidates: " + candidates.size()); + if(candidates.size() >= 2){ + logger.info("First: " + candidates.get(0) + "(" + getConfidence(candidates.get(0).getKey()) + ")"); + logger.info("Second: " + candidates.get(1) + "(" + getConfidence(candidates.get(1).getKey()) + ")"); + } + + //we remove the most frequent axiom from the ontology which is not contained in the original DBpedia ontology + for(Iterator<Entry<OWLAxiom, Integer>> iter = candidates.iterator(); iter.hasNext();){ + OWLAxiom axiom = iter.next().getKey(); + if(!dbpediaOntology.containsAxiomIgnoreAnnotations(axiom)){ + iter.remove(); + logger.info("Removing axiom " + axiom + "."); + manager.removeAxiom(incoherentOntology, axiom); + //remove the axiom also from the loaded ontology + OWLAxiom originalAnnotatedAxiom = ontology.getAxiomsIgnoreAnnotations(axiom).iterator().next(); + ontology.getOWLOntologyManager().removeAxiom(ontology, originalAnnotatedAxiom); + //add the removed annotated axiom to the diff ontology + manager.addAxiom(diffOntology, originalAnnotatedAxiom); + //remove each explanation which contains the axiom + for (Iterator<Set<OWLAxiom>> iterator = explanations.iterator(); iterator.hasNext();) { + Set<OWLAxiom> explanation = iterator.next(); + if(explanation.contains(axiom)){ + iterator.remove(); + } + } + return; + } else { + iter.remove(); + } + } + } + private void save(String fileName){ logger.info("Writing to disk..."); long startTime = System.currentTimeMillis(); @@ -951,7 +1029,6 @@ OWLOntology coherentOntology = extractor.getCoherentOntology(schema, preferRoots); - System.out.println("Coherent ontology contains " + coherentOntology.getLogicalAxiomCount() + " logical axioms."); } class HermiTReasonerFactory implements OWLReasonerFactory{ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |