From: <lor...@us...> - 2011-11-23 07:25:47
|
Revision: 3429 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3429&view=rev Author: lorenz_b Date: 2011-11-23 07:25:39 +0000 (Wed, 23 Nov 2011) Log Message: ----------- Continued script. Modified Paths: -------------- trunk/scripts/src/main/java/org/dllearner/scripts/SPARQLSampleDebugging.java Modified: trunk/scripts/src/main/java/org/dllearner/scripts/SPARQLSampleDebugging.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/SPARQLSampleDebugging.java 2011-11-22 21:19:51 UTC (rev 3428) +++ trunk/scripts/src/main/java/org/dllearner/scripts/SPARQLSampleDebugging.java 2011-11-23 07:25:39 UTC (rev 3429) @@ -4,11 +4,15 @@ import java.io.ByteArrayOutputStream; import java.net.URL; import java.util.Collections; +import java.util.EnumSet; import java.util.HashSet; import java.util.Set; +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.FileAppender; import org.apache.log4j.Level; import org.apache.log4j.Logger; +import org.apache.log4j.SimpleLayout; import org.dllearner.algorithm.qtl.util.ModelGenerator; import org.dllearner.algorithm.qtl.util.ModelGenerator.Strategy; import org.dllearner.kb.sparql.ExtractionDBCache; @@ -16,11 +20,14 @@ import org.dllearner.kb.sparql.SparqlQuery; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyManager; import org.semanticweb.owlapi.reasoner.OWLReasoner; +import com.clarkparsia.owlapi.explanation.PelletExplanation; +import com.clarkparsia.pellet.owlapiv3.PelletReasoner; import com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory; import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.rdf.model.Model; @@ -31,32 +38,47 @@ private SparqlEndpoint endpoint; private ExtractionDBCache cache = new ExtractionDBCache("cache"); - private int sampleSize = 1000; - private int depth = 3; + private int sampleSize = 10; + private int depth = 5; + private int nrOfChunks = 10; + private int maxNrOfExplanations = 10; private Logger logger = Logger.getLogger(SPARQLSampleDebugging.class); + static {PelletExplanation.setup();} + public SPARQLSampleDebugging(SparqlEndpoint endpoint) { this.endpoint = endpoint; } - private Set<String> extractSampleResources(int offset){ - logger.info("Extracting " + sampleSize + "sample resources..."); + private Set<String> extractSampleResourcesChunked(int size){ + logger.info("Extracting " + sampleSize + " sample resources..."); long startTime = System.currentTimeMillis(); Set<String> resources = new HashSet<String>(); - String query = String.format("SELECT DISTINCT ?s WHERE {?s a ?type} LIMIT %d OFFSET %d", sampleSize, offset); + String query = "SELECT COUNT(DISTINCT ?s) WHERE {?s a ?type}"; ResultSet rs = SparqlQuery.convertJSONtoResultSet(cache.executeSelectQuery(endpoint, query)); + int max = rs.next().getLiteral(rs.getResultVars().get(0)).getInt(); - while(rs.hasNext()){ - resources.add(rs.next().getResource("s").getURI()); + for(int i = 0; i < nrOfChunks; i++){ + int offset = (int)(Math.random() * max); + offset = Math.min(offset, offset-(size/nrOfChunks)); + + query = String.format("SELECT DISTINCT ?s WHERE {?s a ?type} LIMIT %d OFFSET %d", (size/nrOfChunks), offset); + logger.info(query); + rs = SparqlQuery.convertJSONtoResultSet(cache.executeSelectQuery(endpoint, query)); + + while(rs.hasNext()){ + resources.add(rs.next().getResource("s").getURI()); + } } + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); return resources; } - private Set<String> extractSampleResourcesRandom(int size){ - logger.info("Extracting " + sampleSize + "sample resources..."); + private Set<String> extractSampleResourcesSingle(int size){ + logger.info("Extracting " + sampleSize + " sample resources..."); long startTime = System.currentTimeMillis(); Set<String> resources = new HashSet<String>(); @@ -67,9 +89,8 @@ for(int i = 0; i < size; i++){ int random = (int)(Math.random() * max); query = String.format("SELECT DISTINCT ?s WHERE {?s a ?type} LIMIT 1 OFFSET %d", random); - System.out.println(random); rs = SparqlQuery.convertJSONtoResultSet(cache.executeSelectQuery(endpoint, query)); - resources.add(rs.next().getResource("s").getURI());System.out.println(resources); + resources.add(rs.next().getResource("s").getURI()); } @@ -90,9 +111,31 @@ } + private Set<Set<OWLAxiom>> computeExplanations(OWLOntology ontology){ + logger.info("Computing explanations..."); + long startTime = System.currentTimeMillis(); + boolean useGlassBox = true; + PelletExplanation expGen = new PelletExplanation(ontology, useGlassBox); + Set<Set<OWLAxiom>> explanations = expGen.getInconsistencyExplanations(maxNrOfExplanations); + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); + return explanations; + } + + private Set<Set<OWLAxiom>> computeExplanations(PelletReasoner reasoner){ + logger.info("Computing explanations..."); + long startTime = System.currentTimeMillis(); + PelletExplanation expGen = new PelletExplanation(reasoner); + Set<Set<OWLAxiom>> explanations = expGen.getInconsistencyExplanations(maxNrOfExplanations); + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); + return explanations; + } + private OWLOntology loadReferenceOntology() throws OWLOntologyCreationException{ + long startTime = System.currentTimeMillis(); + logger.info("Loading reference ontology..."); OWLOntology ontology = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument( getClass().getClassLoader().getResourceAsStream("dbpedia_0.75.owl")); + logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); return ontology; } @@ -113,38 +156,112 @@ public void run() throws OWLOntologyCreationException{ OWLOntology reference = loadReferenceOntology(); + Set<OWLOntology> ontologies = new HashSet<OWLOntology>(); + ontologies.add(reference); OWLOntology sample; OWLOntology merged; OWLReasoner reasoner; - for(int i = 0; i < 1; i++){ - Set<String> resources = extractSampleResourcesRandom(sampleSize); + for(int i = 0; i < 100; i++){ + Set<String> resources = extractSampleResourcesChunked(sampleSize); sample = extractSampleModule(resources); - Set<OWLOntology> ontologies = new HashSet<OWLOntology>(); ontologies.add(sample); - ontologies.add(reference); merged = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://merged.en"), ontologies); reasoner = PelletReasonerFactory.getInstance().createNonBufferingReasoner(merged); + Logger pelletLogger = Logger.getLogger("org.mindswap.pellet"); + pelletLogger.setLevel(Level.OFF); + boolean isConsistent = reasoner.isConsistent(); logger.info("Consistent: " + isConsistent); - System.out.println(isConsistent); reasoner.dispose(); + if(!isConsistent){ + Set<Set<OWLAxiom>> explanations = computeExplanations(merged); + for(Set<OWLAxiom> explanation : explanations){ + System.out.println(explanation); + } + break; + + } + ontologies.remove(sample); + } } + public void run2() throws OWLOntologyCreationException{ + OWLOntology reference = loadReferenceOntology(); + OWLReasoner reasoner = PelletReasonerFactory.getInstance().createReasoner(reference); + OWLOntologyManager man = OWLManager.createOWLOntologyManager(); + + Set<String> resources = extractSampleResourcesChunked(sampleSize); + for(String resource : resources){ + logger.info("Resource " + resource); + OWLOntology module = extractSampleModule(Collections.singleton(resource)); + man.addAxioms(reference, module.getLogicalAxioms()); + reasoner.flush(); + logger.info(reasoner.getRootOntology().getLogicalAxiomCount()); + boolean isConsistent = reasoner.isConsistent(); + logger.info("Consistent: " + isConsistent); + if(!isConsistent){ + Set<Set<OWLAxiom>> explanations = computeExplanations(reference); + for(Set<OWLAxiom> explanation : explanations){ + logger.info(explanation); + } + } + man.removeAxioms(reference, module.getLogicalAxioms()); + + } + + } + + public void run3() throws OWLOntologyCreationException{ + OWLOntology reference = loadReferenceOntology(); + Set<OWLOntology> ontologies = new HashSet<OWLOntology>(); + ontologies.add(reference); + PelletReasoner reasoner; + OWLOntology merged; + OWLOntology module; + + Set<String> resources = extractSampleResourcesChunked(sampleSize); + for(String resource : resources){ + logger.info("Resource " + resource); + module = extractSampleModule(Collections.singleton(resource)); + ontologies.add(module); + merged = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://merged.en"), ontologies); + reasoner = PelletReasonerFactory.getInstance().createNonBufferingReasoner(merged); + boolean isConsistent = reasoner.isConsistent(); + logger.info("Consistent: " + isConsistent); + if(!isConsistent){ + Set<Set<OWLAxiom>> explanations = computeExplanations(reasoner); + for(Set<OWLAxiom> explanation : explanations){ + logger.info(explanation); + } + } + ontologies.remove(module); + reasoner.dispose(); + + } + + } + /** * @param args * @throws Exception */ public static void main(String[] args) throws Exception { Logger.getRootLogger().setLevel(Level.INFO); + Logger.getRootLogger().removeAllAppenders(); + Logger.getRootLogger().addAppender(new ConsoleAppender(new SimpleLayout())); + Logger.getRootLogger().addAppender(new FileAppender(new SimpleLayout(), "log/debug.log")); Logger.getLogger(SPARQLSampleDebugging.class).setLevel(Level.INFO); + java.util.logging.Logger pelletLogger = java.util.logging.Logger.getLogger("com.clarkparsia.pellet"); + pelletLogger.setLevel(java.util.logging.Level.OFF); + SparqlEndpoint endpoint = new SparqlEndpoint(new URL("http://dbpedia.aksw.org:8902/sparql"), Collections.singletonList("http://dbpedia.org"), Collections.<String>emptyList()); - new SPARQLSampleDebugging(endpoint).run(); + new SPARQLSampleDebugging(endpoint).run3(); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |