You can subscribe to this list here.
2007 |
Jan
|
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
|
Aug
(120) |
Sep
(36) |
Oct
(116) |
Nov
(17) |
Dec
(44) |
---|---|---|---|---|---|---|---|---|---|---|---|---|
2008 |
Jan
(143) |
Feb
(192) |
Mar
(74) |
Apr
(84) |
May
(105) |
Jun
(64) |
Jul
(49) |
Aug
(120) |
Sep
(159) |
Oct
(156) |
Nov
(51) |
Dec
(28) |
2009 |
Jan
(17) |
Feb
(55) |
Mar
(33) |
Apr
(57) |
May
(54) |
Jun
(28) |
Jul
(6) |
Aug
(16) |
Sep
(38) |
Oct
(30) |
Nov
(26) |
Dec
(52) |
2010 |
Jan
(7) |
Feb
(91) |
Mar
(65) |
Apr
(2) |
May
(14) |
Jun
(25) |
Jul
(38) |
Aug
(48) |
Sep
(80) |
Oct
(70) |
Nov
(75) |
Dec
(77) |
2011 |
Jan
(68) |
Feb
(53) |
Mar
(51) |
Apr
(35) |
May
(65) |
Jun
(101) |
Jul
(29) |
Aug
(230) |
Sep
(95) |
Oct
(49) |
Nov
(110) |
Dec
(63) |
2012 |
Jan
(41) |
Feb
(42) |
Mar
(25) |
Apr
(46) |
May
(51) |
Jun
(44) |
Jul
(45) |
Aug
(29) |
Sep
(12) |
Oct
(9) |
Nov
(17) |
Dec
(2) |
2013 |
Jan
(12) |
Feb
(14) |
Mar
(7) |
Apr
(16) |
May
(54) |
Jun
(27) |
Jul
(11) |
Aug
(5) |
Sep
(85) |
Oct
(27) |
Nov
(37) |
Dec
(32) |
2014 |
Jan
(8) |
Feb
(29) |
Mar
(5) |
Apr
(3) |
May
(22) |
Jun
(3) |
Jul
(4) |
Aug
(3) |
Sep
|
Oct
|
Nov
|
Dec
|
From: <lor...@us...> - 2011-12-19 13:18:39
|
Revision: 3507 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3507&view=rev Author: lorenz_b Date: 2011-12-19 13:18:28 +0000 (Mon, 19 Dec 2011) Log Message: ----------- Improved script. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2011-12-19 11:22:43 UTC (rev 3506) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2011-12-19 13:18:28 UTC (rev 3507) @@ -31,13 +31,15 @@ public class JustificationBasedCoherentOntologyExtractor implements CoherentOntologyExtractor{ - private static final int NUMBER_OF_JUSTIFICATIONS = 1; + private static final int NUMBER_OF_JUSTIFICATIONS = 2; // private PelletReasoner reasoner; private IncrementalClassifier reasoner; private OWLOntology incoherentOntology; private OWLOntology ontology; + private Map<OWLClass, OWLOntology> cls2ModuleMap; + static {PelletExplanation.setup();} @Override @@ -60,6 +62,8 @@ return incoherentOntology; } + cls2ModuleMap = extractModules(unsatClasses); + while(!unsatClasses.isEmpty()){ //for each unsatisfiable class we compute n justifications here and count how often each axiom occurs globally Map<OWLAxiom, Integer> axiom2CountMap = new HashMap<OWLAxiom, Integer>(); @@ -120,17 +124,26 @@ } private Set<Set<OWLAxiom>> computeExplanations(OWLClass unsatClass){ - OWLOntology module = OntologyUtils.getOntologyFromAxioms( - ModularityUtils.extractModule(incoherentOntology, Collections.singleton((OWLEntity)unsatClass), ModuleType.TOP_OF_BOT)); - PelletExplanation expGen = new PelletExplanation(module); + PelletExplanation expGen = new PelletExplanation(cls2ModuleMap.get(unsatClass)); return expGen.getUnsatisfiableExplanations(unsatClass, NUMBER_OF_JUSTIFICATIONS); } + private Map<OWLClass, OWLOntology> extractModules(Set<OWLClass> classes){ + Map<OWLClass, OWLOntology> cls2ModuleMap = new HashMap<OWLClass, OWLOntology>(); + for(OWLClass cls : classes){ + OWLOntology module = OntologyUtils.getOntologyFromAxioms( + ModularityUtils.extractModule(incoherentOntology, Collections.singleton((OWLEntity)cls), ModuleType.TOP_OF_BOT)); + cls2ModuleMap.put(cls, module); + } + return cls2ModuleMap; + } + public static void main(String[] args) throws Exception{ Logger.getLogger(RBox.class.getName()).setLevel(Level.OFF); OWLOntologyManager man = OWLManager.createOWLOntologyManager(); - OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("../components-core/cohaerent.owl")); -// OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("/home/lorenz/arbeit/dbpedia_0.75_no_datapropaxioms.owl")); +// OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("../components-core/cohaerent.owl")); +// System.out.println(schema.getLogicalAxiomCount()); + OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("/home/lorenz/arbeit/dbpedia_0.75_no_datapropaxioms.owl")); JustificationBasedCoherentOntologyExtractor extractor = new JustificationBasedCoherentOntologyExtractor(); OWLOntology coherentOntology = extractor.getCoherentOntology(schema);System.out.println(coherentOntology.getLogicalAxiomCount()); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2011-12-19 11:22:49
|
Revision: 3506 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3506&view=rev Author: lorenz_b Date: 2011-12-19 11:22:43 +0000 (Mon, 19 Dec 2011) Log Message: ----------- Prepared all to work on local models. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java trunk/components-core/src/main/java/org/dllearner/kb/SparqlEndpointKS.java trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/kb/LocalModelBasedSparqlEndpointKS.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java 2011-12-18 18:47:08 UTC (rev 3505) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java 2011-12-19 11:22:43 UTC (rev 3506) @@ -43,6 +43,8 @@ import org.dllearner.core.owl.Description; import org.dllearner.core.owl.DisjointClassesAxiom; import org.dllearner.core.owl.NamedClass; +import org.dllearner.kb.LocalModelBasedSparqlEndpointKS; +import org.dllearner.kb.OWLFile; import org.dllearner.kb.SparqlEndpointKS; import org.dllearner.kb.sparql.SPARQLTasks; import org.dllearner.kb.sparql.SparqlEndpoint; @@ -117,7 +119,7 @@ //TODO //at first get all existing classes in knowledgebase - allClasses = new SPARQLTasks(ks.getEndpoint()).getAllClasses(); + allClasses = getAllClasses(); allClasses.remove(classToDescribe); //get the subclasses @@ -128,9 +130,9 @@ } if(ks.supportsSPARQL_1_1()){ + runSPARQL1_0_Mode(); + } else { runSPARQL1_1_Mode(); - } else { - runSPARQL1_0_Mode(); } //get classes and how often they occur @@ -184,9 +186,9 @@ private void runSPARQL1_1_Mode(){ int limit = 1000; int offset = 0; - String queryTemplate = "SELECT ?type COUNT(?s) AS ?count WHERE {?s a ?type." + - "{SELECT ?s WHERE {?s a <%s>.} LIMIT %d OFFSET %d}" + - "}"; + String queryTemplate = "SELECT ?type (COUNT(?s) AS ?count) WHERE {?s a ?type." + + "{SELECT ?s WHERE {?s a <%s>.} LIMIT %d OFFSET %d} " + + "} GROUP BY ?type"; String query; Map<NamedClass, Integer> result = new HashMap<NamedClass, Integer>(); NamedClass cls; @@ -200,17 +202,20 @@ repeat = false; while(rs.hasNext()){ qs = rs.next(); - cls = new NamedClass(qs.getResource("type").getURI()); - int newCnt = qs.getLiteral("count").getInt(); - oldCnt = result.get(cls); - if(oldCnt == null){ - oldCnt = Integer.valueOf(newCnt); - } else { - oldCnt += newCnt; + if(qs.getResource("type") != null){ + cls = new NamedClass(qs.getResource("type").getURI()); + int newCnt = qs.getLiteral("count").getInt(); + oldCnt = result.get(cls); + if(oldCnt == null){ + oldCnt = Integer.valueOf(newCnt); + } else { + oldCnt += newCnt; + } + + result.put(cls, oldCnt); + repeat = true; } - result.put(cls, oldCnt); - repeat = true; } if(!result.isEmpty()){ currentlyBestEvaluatedDescriptions = buildEvaluatedClassDescriptions(result, allClasses); @@ -350,7 +355,8 @@ SortedSet<Description> mostGeneralClasses = reasoner.getClassHierarchy().getMostGeneralClasses(); } for(NamedClass cls : completeDisjointclasses){ - if(useClassPopularity && ks.supportsSPARQL_1_1()){ + if(useClassPopularity && ( + (ks instanceof SparqlEndpointKS && ((SparqlEndpointKS) ks).supportsSPARQL_1_1()) || !(ks instanceof SparqlEndpointKS))){ int popularity = reasoner.getIndividualsCount(cls); //we skip classes with no instances if(popularity == 0) continue; @@ -387,9 +393,11 @@ } public static void main(String[] args) throws Exception{ + LocalModelBasedSparqlEndpointKS ks = new LocalModelBasedSparqlEndpointKS(new URL("http://dl-learner.svn.sourceforge.net/viewvc/dl-learner/trunk/examples/swore/swore.rdf?revision=2217")); DisjointClassesLearner l = new DisjointClassesLearner(new SparqlEndpointKS(new SparqlEndpoint(new URL("http://dbpedia.aksw.org:8902/sparql"), Collections.singletonList("http://dbpedia.org"), Collections.<String>emptyList()))); - l.setClassToDescribe(new NamedClass("http://dbpedia.org/ontology/Band")); + l = new DisjointClassesLearner(ks); + l.setClassToDescribe(new NamedClass("http://ns.softwiki.de/req/CustomerRequirement")); l.init(); l.getReasoner().prepareSubsumptionHierarchy(); // System.out.println(l.getReasoner().getClassHierarchy().getSubClasses(new NamedClass("http://dbpedia.org/ontology/Athlete"), false));System.exit(0); Modified: trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2011-12-18 18:47:08 UTC (rev 3505) +++ trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2011-12-19 11:22:43 UTC (rev 3506) @@ -25,6 +25,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; @@ -34,8 +35,12 @@ import org.dllearner.core.owl.Axiom; import org.dllearner.core.owl.ClassHierarchy; import org.dllearner.core.owl.Description; +import org.dllearner.core.owl.NamedClass; +import org.dllearner.kb.LocalModelBasedSparqlEndpointKS; import org.dllearner.kb.SparqlEndpointKS; import org.dllearner.kb.sparql.ExtendedQueryEngineHTTP; +import org.dllearner.kb.sparql.SPARQLTasks; +import org.dllearner.kb.sparql.SparqlEndpoint; import org.dllearner.learningproblems.AxiomScore; import org.dllearner.learningproblems.Heuristics; import org.dllearner.reasoning.SPARQLReasoner; @@ -44,6 +49,7 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import com.hp.hpl.jena.ontology.OntClass; import com.hp.hpl.jena.query.QueryExecution; import com.hp.hpl.jena.query.QueryExecutionFactory; import com.hp.hpl.jena.query.ResultSet; @@ -90,8 +96,6 @@ this.learningProblem = learningProblem; } - ExtendedQueryEngineHTTP queryExecution; - public int getMaxExecutionTimeInSeconds() { return maxExecutionTimeInSeconds; } @@ -132,7 +136,7 @@ public void init() throws ComponentInitException { ks.init(); if(reasoner == null){ - reasoner = new SPARQLReasoner(ks); + reasoner = new SPARQLReasoner((SparqlEndpointKS) ks); } } @@ -184,68 +188,72 @@ return returnList; } + protected Set<NamedClass> getAllClasses() { + if(ks.isRemote()){ + return new SPARQLTasks(((SparqlEndpointKS) ks).getEndpoint()).getAllClasses(); + } else { + Set<NamedClass> classes = new TreeSet<NamedClass>(); + for(OntClass cls : ((LocalModelBasedSparqlEndpointKS)ks).getModel().listClasses().toList()){ + if(!cls.isAnon()){ + classes.add(new NamedClass(cls.getURI())); + } + } + return classes; + } + + } + protected Model executeConstructQuery(String query) { logger.info("Sending query\n{} ...", query); - queryExecution = new ExtendedQueryEngineHTTP(ks.getEndpoint().getURL().toString(), - query); - queryExecution.setTimeout(maxExecutionTimeInSeconds * 1000); - queryExecution.setDefaultGraphURIs(ks.getEndpoint().getDefaultGraphURIs()); - queryExecution.setNamedGraphURIs(ks.getEndpoint().getNamedGraphURIs()); - - return queryExecution.execConstruct(); + if(ks.isRemote()){ + SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); + ExtendedQueryEngineHTTP queryExecution = new ExtendedQueryEngineHTTP(endpoint.getURL().toString(), + query); + queryExecution.setTimeout(maxExecutionTimeInSeconds * 1000); + queryExecution.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); + queryExecution.setNamedGraphURIs(endpoint.getNamedGraphURIs()); + return queryExecution.execConstruct(); + } else { + QueryExecution qexec = QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); + return qexec.execConstruct(); + } } protected ResultSet executeSelectQuery(String query) { logger.info("Sending query\n{} ...", query); - queryExecution = new ExtendedQueryEngineHTTP(ks.getEndpoint().getURL().toString(), - query); - queryExecution.setTimeout(maxExecutionTimeInSeconds * 1000); - queryExecution.setDefaultGraphURIs(ks.getEndpoint().getDefaultGraphURIs()); - queryExecution.setNamedGraphURIs(ks.getEndpoint().getNamedGraphURIs()); - -// ResultSet resultSet = null; -// try { -// resultSet = queryExecution.execSelect(); -// } catch (Exception e) { -// logger.error("Got a timeout during query execution.", e); -// resultSet = new CollectionResultSet(Collections.<String>emptyList(), Collections.<QuerySolution>emptyList()); -// } - ResultSet resultSet = queryExecution.execSelect(); - - return resultSet; + if(ks.isRemote()){ + SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); + ExtendedQueryEngineHTTP queryExecution = new ExtendedQueryEngineHTTP(endpoint.getURL().toString(), + query); + queryExecution.setTimeout(maxExecutionTimeInSeconds * 1000); + queryExecution.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); + queryExecution.setNamedGraphURIs(endpoint.getNamedGraphURIs()); + return queryExecution.execSelect(); + } else { + return executeSelectQuery(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); + } } protected ResultSet executeSelectQuery(String query, Model model) { logger.info("Sending query\n{} ...", query); QueryExecution qexec = QueryExecutionFactory.create(query, model); ResultSet rs = qexec.execSelect();; - return rs; } - protected void close() { - queryExecution.close(); - } - protected boolean executeAskQuery(String query){ logger.info("Sending query\n{} ...", query); - QueryEngineHTTP queryExecution = new QueryEngineHTTP(ks.getEndpoint().getURL().toString(), query); - for (String dgu : ks.getEndpoint().getDefaultGraphURIs()) { - queryExecution.addDefaultGraph(dgu); + if(ks.isRemote()){ + SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); + QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), query); + queryExecution.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); + queryExecution.setNamedGraphURIs(endpoint.getNamedGraphURIs()); + return queryExecution.execAsk(); + } else { + QueryExecution queryExecution = QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); + return queryExecution.execAsk(); } - for (String ngu : ks.getEndpoint().getNamedGraphURIs()) { - queryExecution.addNamedGraph(ngu); - } -// ResultSet rs = queryExecution.execSelect(); -// boolean result = false; -// QuerySolution qs; -// if(rs.hasNext()){ -// qs = rs.next(); -// result = qs.get(qs.varNames().next()).asLiteral().getBoolean(); -// } - boolean result = queryExecution.execAsk(); - return result; } protected <K, V extends Comparable<V>> List<Entry<K, V>> sortByValues(Map<K, V> map){ @@ -269,6 +277,7 @@ protected List<Entry<Description, Integer>> sortByValues(Map<Description, Integer> map, final boolean useHierachy){ List<Entry<Description, Integer>> entries = new ArrayList<Entry<Description, Integer>>(map.entrySet()); final ClassHierarchy hierarchy = reasoner.getClassHierarchy(); + Collections.sort(entries, new Comparator<Entry<Description, Integer>>() { @Override Added: trunk/components-core/src/main/java/org/dllearner/kb/LocalModelBasedSparqlEndpointKS.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/LocalModelBasedSparqlEndpointKS.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/kb/LocalModelBasedSparqlEndpointKS.java 2011-12-19 11:22:43 UTC (rev 3506) @@ -0,0 +1,60 @@ +package org.dllearner.kb; + +import java.io.InputStream; +import java.net.MalformedURLException; +import java.net.URL; + +import org.dllearner.core.ComponentInitException; + +import com.hp.hpl.jena.ontology.OntModel; +import com.hp.hpl.jena.ontology.OntModelSpec; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; +import com.hp.hpl.jena.util.FileManager; + +public class LocalModelBasedSparqlEndpointKS extends SparqlEndpointKS { + + private OntModel model; + + public LocalModelBasedSparqlEndpointKS(OntModel model) { + this.model = model; + } + + public LocalModelBasedSparqlEndpointKS(String ontologyURL) throws MalformedURLException { + this(new URL(ontologyURL)); + } + + public LocalModelBasedSparqlEndpointKS(URL ontologyURL) { + Model baseModel = ModelFactory.createDefaultModel(); + // use the FileManager to find the input file + InputStream in = FileManager.get().open(ontologyURL.toString()); + if (in == null) { + throw new IllegalArgumentException( + "File: " + ontologyURL + " not found"); + } + // read the RDF/XML file + baseModel.read(in, null); + + model = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, baseModel); + } + + @Override + public void init() throws ComponentInitException { + + } + + public OntModel getModel() { + return model; + } + + @Override + public boolean isRemote() { + return false; + } + + @Override + public boolean supportsSPARQL_1_1() { + return true; + } + +} Modified: trunk/components-core/src/main/java/org/dllearner/kb/SparqlEndpointKS.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/SparqlEndpointKS.java 2011-12-18 18:47:08 UTC (rev 3505) +++ trunk/components-core/src/main/java/org/dllearner/kb/SparqlEndpointKS.java 2011-12-19 11:22:43 UTC (rev 3506) @@ -45,6 +45,7 @@ private SparqlEndpoint endpoint; private boolean supportsSPARQL_1_1 = false; + private boolean isRemote = true; // TODO: turn those into config options @@ -84,6 +85,10 @@ public void setUrl(URL url) { this.url = url; } + + public boolean isRemote() { + return isRemote; + } public List<String> getDefaultGraphURIs() { return defaultGraphURIs; Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2011-12-18 18:47:08 UTC (rev 3505) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2011-12-19 11:22:43 UTC (rev 3506) @@ -53,6 +53,7 @@ import org.dllearner.core.owl.ObjectProperty; import org.dllearner.core.owl.ObjectPropertyHierarchy; import org.dllearner.core.owl.Thing; +import org.dllearner.kb.LocalModelBasedSparqlEndpointKS; import org.dllearner.kb.SparqlEndpointKS; import org.dllearner.kb.sparql.ExtractionDBCache; import org.dllearner.kb.sparql.SPARQLTasks; @@ -64,6 +65,9 @@ import org.slf4j.LoggerFactory; import com.clarkparsia.owlapiv3.XSD; +import com.hp.hpl.jena.ontology.OntClass; +import com.hp.hpl.jena.ontology.OntModel; +import com.hp.hpl.jena.query.QueryExecution; import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.rdf.model.Model; @@ -87,6 +91,7 @@ private SparqlEndpointKS ks; private ClassHierarchy hierarchy; + private OntModel model; public SPARQLReasoner(SparqlEndpointKS ks) { @@ -97,6 +102,10 @@ } } + public SPARQLReasoner(OntModel model) { + this.model = model; + } + public final ClassHierarchy prepareSubsumptionHierarchy() { logger.info("Preparing subsumption hierarchy ..."); long startTime = System.currentTimeMillis(); @@ -117,7 +126,18 @@ subsumptionHierarchyDown.put(Nothing.instance, new TreeSet<Description>(conceptComparator)); // ... and named classes - Set<NamedClass> atomicConcepts = new SPARQLTasks(ks.getEndpoint()).getAllClasses(); + Set<NamedClass> atomicConcepts; + if(ks.isRemote()){ + atomicConcepts = new SPARQLTasks(ks.getEndpoint()).getAllClasses(); + } else { + atomicConcepts = new TreeSet<NamedClass>(); + for(OntClass cls : ((LocalModelBasedSparqlEndpointKS)ks).getModel().listClasses().toList()){ + if(!cls.isAnon()){ + atomicConcepts.add(new NamedClass(cls.getURI())); + } + } + } + for (NamedClass atom : atomicConcepts) { tmp = getSubClasses(atom); // quality control: we explicitly check that no reasoner implementation returns null here @@ -571,7 +591,7 @@ } public int getIndividualsCount(NamedClass nc){ - String query = String.format("SELECT COUNT(?s) WHERE {" + + String query = String.format("SELECT (COUNT(?s) AS ?cnt) WHERE {" + "?s a <%s>." + "}", nc.getURI()); @@ -859,20 +879,26 @@ private ResultSet executeSelectQuery(String query){ logger.info("Sending query \n {}", query); - ResultSet resultset = null; - if(useCache){ - resultset = SparqlQuery.convertJSONtoResultSet(cache.executeSelectQuery(ks.getEndpoint(), query)); + ResultSet rs = null; + if(ks.isRemote()){ + if(useCache){ + rs = SparqlQuery.convertJSONtoResultSet(cache.executeSelectQuery(ks.getEndpoint(), query)); + } else { + QueryEngineHTTP queryExecution = new QueryEngineHTTP(ks.getEndpoint().getURL().toString(), query); + for (String dgu : ks.getEndpoint().getDefaultGraphURIs()) { + queryExecution.addDefaultGraph(dgu); + } + for (String ngu : ks.getEndpoint().getNamedGraphURIs()) { + queryExecution.addNamedGraph(ngu); + } + rs = queryExecution.execSelect(); + } } else { - QueryEngineHTTP queryExecution = new QueryEngineHTTP(ks.getEndpoint().getURL().toString(), query); - for (String dgu : ks.getEndpoint().getDefaultGraphURIs()) { - queryExecution.addDefaultGraph(dgu); - } - for (String ngu : ks.getEndpoint().getNamedGraphURIs()) { - queryExecution.addNamedGraph(ngu); - } - resultset = queryExecution.execSelect(); + QueryExecution qExec = com.hp.hpl.jena.query.QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); + rs = qExec.execSelect(); + } - return resultset; + return rs; } /** @@ -888,30 +914,26 @@ } private boolean executeAskQuery(String query){ - QueryEngineHTTP queryExecution = new QueryEngineHTTP(ks.getEndpoint().getURL().toString(), query); - for (String dgu : ks.getEndpoint().getDefaultGraphURIs()) { - queryExecution.addDefaultGraph(dgu); + boolean ret; + if(ks.isRemote()){ + QueryEngineHTTP queryExecution = new QueryEngineHTTP(ks.getEndpoint().getURL().toString(), query); + for (String dgu : ks.getEndpoint().getDefaultGraphURIs()) { + queryExecution.addDefaultGraph(dgu); + } + for (String ngu : ks.getEndpoint().getNamedGraphURIs()) { + queryExecution.addNamedGraph(ngu); + } + ret = queryExecution.execAsk(); + + } else { + QueryExecution qExec = com.hp.hpl.jena.query.QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); + ret = qExec.execAsk(); } - for (String ngu : ks.getEndpoint().getNamedGraphURIs()) { - queryExecution.addNamedGraph(ngu); - } - boolean ret = queryExecution.execAsk(); + return ret; } - private Model executeConstructQuery(String query){ - QueryEngineHTTP queryExecution = new QueryEngineHTTP(ks.getEndpoint().getURL().toString(), query); - for (String dgu : ks.getEndpoint().getDefaultGraphURIs()) { - queryExecution.addDefaultGraph(dgu); - } - for (String ngu : ks.getEndpoint().getNamedGraphURIs()) { - queryExecution.addNamedGraph(ngu); - } - Model ret = queryExecution.execConstruct(); - return ret; - } - public static void main(String[] args) throws Exception{ // QueryEngineHTTP e = new QueryEngineHTTP("http://bibleontology.com/sparql/index.jsp", // "SELECT DISTINCT ?type WHERE {?s a ?type) LIMIT 10"); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <seb...@us...> - 2011-12-18 18:47:15
|
Revision: 3505 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3505&view=rev Author: sebastianwtr Date: 2011-12-18 18:47:08 +0000 (Sun, 18 Dec 2011) Log Message: ----------- [tbsl exploration] collecting all queries Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Levenshtein.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java 2011-12-15 13:26:09 UTC (rev 3504) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java 2011-12-18 18:47:08 UTC (rev 3505) @@ -32,8 +32,8 @@ */ private HashMap<String,String> sendServerPropertyRequest(String vergleich, String side) throws IOException{ - System.out.println("Resource die gesucht wird: "+ vergleich); - System.out.println("Seite die gesucht wird: "+side); + //System.out.println("Resource die gesucht wird: "+ vergleich); + //System.out.println("Seite die gesucht wird: "+side); /* * * For the second Iteration, I can just add the sparql property here. Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Levenshtein.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Levenshtein.java 2011-12-15 13:26:09 UTC (rev 3504) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Levenshtein.java 2011-12-18 18:47:08 UTC (rev 3505) @@ -13,8 +13,9 @@ double length=Math.max(orig.length(),eing.length()); //if distance between both is zero, then the NLD must be one + //but because they are equal, return a very high value, so that that query will be taken. if(result==0.0 ){ - return 1; + return 10.0; } else{ Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java 2011-12-15 13:26:09 UTC (rev 3504) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java 2011-12-18 18:47:08 UTC (rev 3505) @@ -63,7 +63,7 @@ // btemplator = new BasicTemplator(); btemplator.UNTAGGED_INPUT = false; - templator = new Templator(); + //templator = new Templator(); System.out.println("Loading SPARQL Templator Done\n"); System.out.println("Start Indexing"); myindex = new mySQLDictionary(); @@ -73,7 +73,7 @@ //normaly 1 setExplorationdepthwordnet(1); //eigentlich immer mit 0 initialisieren - setIterationdepth(9); + setIterationdepth(1); setNumberofanswers(1); } @@ -127,8 +127,10 @@ long startParsingTime = System.currentTimeMillis(); lstquery=getQuery(question); long endParsingTime = System.currentTimeMillis(); + long startIterationTime = System.currentTimeMillis(); System.out.println("The Questionparsing took "+ (endParsingTime-startParsingTime)+ " ms"); ArrayList<String> final_answer = new ArrayList<String>(); + ArrayList<String> final_query = new ArrayList<String>(); if(lstquery.isEmpty()){ saveNotParsedQuestions(question); @@ -226,24 +228,56 @@ * Only Levensthein!!! */ if(getIterationdepth()==1&&startIterating==true||getIterationdepth()==9&&startIterating==true){ - /* - //4, because of query + three conditions for the simple case - if(querylist.size()==4)final_answer=simpleIteration1Case(querylist, query); - //if we have more conditions, we need to change the way of replacing the uris got from wordnet etc - if(querylist.size()>4)final_answer=complexeIteration1Case(querylist, query); - */ - ArrayList<String> final_answer_tmp = new ArrayList<String>(); - if(querylist.size()==4)final_answer_tmp=simpleLevinstheinIteration(querylist, query); - if(querylist.size()>4)final_answer_tmp=complexeLevinstheinIteration(querylist, query); + ArrayList<String> final_query_tmp=new ArrayList<String>(); + if(querylist.size()==4){ + //System.out.println("YEAH!!!!!"); + //final_answer_tmp=simpleLevinstheinIteration(querylist, query); + final_query_tmp=simpleLevinstheinIteration(querylist, query); + for(String i: final_query_tmp){ + + //do it unnice for first + boolean double_query=false; + for(String s: final_query ){ + + if(s.contains(i)){ + double_query=true; + + } + } + if(double_query==false){ + + final_query.add(i); + } + } + } - for(String i : final_answer_tmp){ - final_answer.add(i); + + if(querylist.size()>4){ + final_query_tmp=complexeLevinstheinIteration(querylist, query); + for(String i: final_query_tmp){ + + //do it unnice for first + boolean double_query=false; + for(String s: final_query ){ + + if(s.contains(i)){ + double_query=true; + + } + } + if(double_query==false){ + + final_query.add(i); + } + } } + + } /* * ################################################################################################# @@ -254,21 +288,67 @@ */ if(getIterationdepth()==2&&startIterating==true||getIterationdepth()==9&&startIterating==true){ ArrayList<String> final_answer_tmp = new ArrayList<String>(); + ArrayList<String> final_query_tmp = new ArrayList<String>(); - if(querylist.size()==4)final_answer_tmp=simpleWordnetIteration(querylist, query); + if(querylist.size()==4){ + + //final_answer_tmp=simpleLevinstheinIteration(querylist, query); + final_query_tmp = simpleWordnetIteration(querylist, query); + for(String i: final_query_tmp){ + + //do it unnice for first + boolean double_query=false; + for(String s: final_query ){ + + if(s.contains(i)){ + double_query=true; + + } + } + if(double_query==false){ + + final_query.add(i); + } + } + } + final_answer_tmp=simpleWordnetIteration(querylist, query); //if(querylist.size()>4)final_answer=complexWordnetIteration(querylist, query); //for a test only use: - if(querylist.size()>4)final_answer_tmp=newIteration(querylist,query); - - for(String i : final_answer_tmp){ - final_answer.add(i); + if(querylist.size()>4){ + final_query_tmp=newIteration(querylist,query); + for(String i: final_query_tmp){ + boolean double_query=false; + for(String s: final_query ){ + if(s.contains(i)){ + double_query=true; + } + } + if(double_query==false){ + final_query.add(i); + } + } } } } + + /* + * Send Query to Server and get answers + */ + + for(String anfrage : final_query){ + String answer_tmp; + answer_tmp=sendServerQuestionRequest(anfrage); + //System.out.println("Antwort vom Server: "+answer_tmp); + if(!final_answer.contains(anfrage)) + final_answer.add("Begin:\n"+anfrage +"\n"+answer_tmp+" \n End"); + //final_answer.add("Begin:\n"+anfrage +"\n"+answer_tmp+" \n End"); + } + + BufferedReader in = null; String tmp=""; @@ -317,7 +397,7 @@ } System.out.println(question); - out = out.replace("@en","").replace("\"",""); + out = out.replace("@en","").replace("\"","").replace("^^<http://www.w3.org/2001/XMLSchema#int> ", ""); System.out.println(out); BufferedWriter outfile = new BufferedWriter( @@ -326,6 +406,10 @@ outfile.write(tmp+"\n"+question+" :\n"+out); outfile.close(); + long stopIterationTime = System.currentTimeMillis(); + System.out.println("The Questionparsing took "+ (endParsingTime-startParsingTime)+ " ms"); + System.out.println("The Iteration took "+ (stopIterationTime-startIterationTime)+ " ms"); + System.out.println("All took "+ (stopIterationTime-startParsingTime)+ " ms"); } private ArrayList<String> newIteration(ArrayList<String> querylist, String query) throws SQLException, @@ -450,6 +534,7 @@ String resource=""; String property_to_compare_with=""; String sideOfProperty="LEFT"; + ArrayList<String> new_queries= new ArrayList<String>(); int tmpcounter=0; @@ -487,7 +572,7 @@ properties=property.getPropertys(getUriFromIndex(resource.toLowerCase(),0),sideOfProperty); if (properties==null){ - final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + System.out.println("Begin:\n"+query +"\nError in getting Properties \n End"); goOnAfterProperty=false; } @@ -495,18 +580,20 @@ } catch (IOException e) { - final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + System.out.println("Begin:\n"+query +"\nError in getting Properties \n End"); goOnAfterProperty=false; } if(goOnAfterProperty==true){ - ArrayList<String> new_queries= new ArrayList<String>(); + String bestQuery=""; + double highestNLD=0; //iterate over properties for (Entry<String, String> entry : properties.entrySet()) { String key = entry.getKey(); key=key.replace("\"",""); key=key.replace("@en",""); String value = entry.getValue(); + // System.out.println("Key "+ key +" and value "+value); //compare property gotten from the resource with the property from the original query double nld=Levenshtein.nld(property_to_compare_with.toLowerCase(), key); @@ -520,32 +607,46 @@ replacement=replacement.replace("ontology", "property"); } querynew=querynew.replace(replacement,value); - System.out.println("Simple Levensthein Query: "+ querynew); + if(nld>highestNLD){ + bestQuery=querynew; + highestNLD=nld; + } + /* System.out.println("Simple Levensthein Query: "+ querynew); + new_queries.add(querynew);*/ + //only add, if nld is greater than the already existing nld. + new_queries.add(querynew); } + } - + - + /*new_queries.add(bestQuery); + System.out.println("Best Query "+bestQuery);*/ //add original query for iteration + new_queries.add(query); + //iterate over all Queries and get answer from Server - for(String anfrage : new_queries){ + /* for(String anfrage : new_queries){ String answer_tmp; answer_tmp=sendServerQuestionRequest(anfrage); System.out.println("Antwort vom Server: "+answer_tmp); final_answer.add("Begin:\n"+anfrage +"\n"+answer_tmp+" \n End"); - } + }*/ } - - return final_answer; + + //test to returnqueries, put them together and than send them to the server. + return new_queries; + //return final_answer; } private ArrayList<String> complexeLevinstheinIteration(ArrayList<String> querylist, String query) throws SQLException, JWNLException { ArrayList<String> final_answer=new ArrayList<String>(); + ArrayList<String> new_queries= new ArrayList<String>(); String resourceOne=""; String property_to_compare_withOne=""; String resourceTwo=""; @@ -603,13 +704,13 @@ propertiesTwo=property.getPropertys(getUriFromIndex(resourceTwo.toLowerCase(),0),sideOfPropertyTwo); if (propertiesOne==null){ - final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + System.out.println("Begin:\n"+query +"\nError in getting Properties \n End"); goOnAfterProperty=false; } } catch (IOException e) { - final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + System.out.println("Begin:\n"+query +"\nError in getting Properties \n End"); goOnAfterProperty=false; } @@ -617,8 +718,8 @@ if(goOnAfterProperty==true){ - ArrayList<String> new_queries= new ArrayList<String>(); + //Iterate over property from resource one for (Entry<String, String> entryOne : propertiesOne.entrySet()) { @@ -672,17 +773,9 @@ //add original query for iteration new_queries.add(query); - //iterate over all Queries and get answer from Server - for(String anfrage : new_queries){ - String answer_tmp; - answer_tmp=sendServerQuestionRequest(anfrage); - System.out.println("Antwort vom Server: "+answer_tmp); - final_answer.add("Begin:\n"+anfrage +"\n"+answer_tmp+" \n End"); - } - } - return final_answer; + return new_queries; } @@ -769,7 +862,7 @@ System.out.println("tmp_semantics in Iteration: "+ tmp_semantics); if (tmp_semantics==null){ goOnAfterWordnet=false; - final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); + System.out.println("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); } else{ @@ -782,7 +875,7 @@ } catch (IOException e) { goOnAfterWordnet=false; - final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); + System.out.println("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); } @@ -832,6 +925,7 @@ private ArrayList<String> simpleWordnetIteration(ArrayList<String> querylist, String query) throws SQLException, JWNLException { ArrayList<String> final_answer=new ArrayList<String>(); + ArrayList<String> new_queries= new ArrayList<String>(); System.out.println("In simpleWordnetIteration"); @@ -882,7 +976,7 @@ } if(goOnAfterProperty==true){ - ArrayList<String> new_queries= new ArrayList<String>(); + System.out.println("Start Iterating Wordnet with "+property_to_compare_with+" and deept of "+explorationdepthwordnet); ArrayList<String> semantics=new ArrayList<String>(); @@ -901,7 +995,11 @@ semantics.add(_temp_); tmp_semantics=semantics; } - + if(property_to_compare_with.contains("_")){ + String[] array=property_to_compare_with.split("_"); + for(String i : array) tmp_semantics.add(i); + tmp_semantics.add(property_to_compare_with.replace("_"," ")); + } System.out.println("tmp_semantics: "+ tmp_semantics); Boolean goOnAfterWordnet = true; @@ -918,7 +1016,7 @@ System.out.println("tmp_semantics in Iteration: "+ tmp_semantics); if (tmp_semantics==null){ goOnAfterWordnet=false; - final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); + System.out.println("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); } else{ @@ -931,7 +1029,7 @@ } catch (IOException e) { goOnAfterWordnet=false; - final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); + System.out.println("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); } @@ -943,8 +1041,9 @@ String key = entry.getKey(); key=key.replace("\"",""); key=key.replace("@en",""); + String value = entry.getValue(); - // System.out.println("Key propery: "+ key); + System.out.println("Key "+ key +" and value "+value); // System.out.println("Value propery: "+ value); for(String b : semantics){ @@ -960,6 +1059,8 @@ test=test.replace("ontology", "property"); } query_tmp=query_tmp.replace(test,value); + System.out.println("\n"); + System.out.println("Original Query: "+ query); System.out.println("Simple Wordnet Query: "+ query_tmp); System.out.println("\n"); new_queries.add(query_tmp); @@ -969,25 +1070,20 @@ } //add original query for iteration - new_queries.add(query); - //iterate over all Queries and get answer from Server - for(String bla : new_queries){ - String answer_tmp; - answer_tmp=sendServerQuestionRequest(bla); - System.out.println("Antwort vom Server: "+answer_tmp); - final_answer.add("Begin:\n"+bla +"\n"+answer_tmp+" \n End"); - } + new_queries.add(query); + } } - return final_answer; + return new_queries; } private ArrayList<String> complexWordnetIteration(ArrayList<String> querylist, String query) throws SQLException, JWNLException { ArrayList<String> final_answer=new ArrayList<String>(); + ArrayList<String> new_queries= new ArrayList<String>(); String resourceOne=""; String property_to_compare_withOne=""; @@ -1062,7 +1158,6 @@ * #################################### Semantics One############################################# */ - ArrayList<String> new_queries= new ArrayList<String>(); //System.out.println("Start Iterating Wordnet with "+property_to_compare_withOne+" and deept of "+explorationdepthwordnet); ArrayList<String> semanticsOne=new ArrayList<String>(); @@ -1089,7 +1184,7 @@ tmp_semanticsOne=getSemantics(tmp_semanticsOne); if (tmp_semanticsOne==null){ goOnAfterWordnet=false; - final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semanticsOne+" \n End"); + System.out.println("Begin:\n"+query +"\n Error in searching Wordnet with word "+semanticsOne+" \n End"); } else{ @@ -1102,7 +1197,7 @@ } catch (IOException e) { goOnAfterWordnet=false; - final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semanticsOne+" \n End"); + System.out.println("Begin:\n"+query +"\n Error in searching Wordnet with word "+semanticsOne+" \n End"); } @@ -1136,7 +1231,7 @@ tmp_semanticsTwo=getSemantics(tmp_semanticsTwo); if (tmp_semanticsTwo==null){ goOnAfterWordnet=false; - final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semanticsTwo+" \n End"); + System.out.println("Begin:\n"+query +"\n Error in searching Wordnet with word "+semanticsTwo+" \n End"); } else{ @@ -1149,7 +1244,7 @@ } catch (IOException e) { goOnAfterWordnet=false; - final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semanticsTwo+" \n End"); + System.out.println("Begin:\n"+query +"\n Error in searching Wordnet with word "+semanticsTwo+" \n End"); } @@ -1214,17 +1309,11 @@ //add original query for iteration new_queries.add(query); - //iterate over all Queries and get answer from Server - for(String bla : new_queries){ - String answer_tmp; - answer_tmp=sendServerQuestionRequest(bla); - System.out.println("Antwort vom Server: "+answer_tmp); - final_answer.add("Begin:\n"+bla +"\n"+answer_tmp+" \n End"); - } + } } - return final_answer; + return new_queries; } @@ -1623,19 +1712,19 @@ List<String> array_relatedNouns=null; List<String> array_bestsynonyms=null; - System.out.println("Wordnet Word: "+id); + //System.out.println("Wordnet Word: "+id); try{ array_relatedNouns =wordnet.getRelatedNouns(id); } catch(Exception e){ //array_relatedNouns.clear(); } - System.out.println("array_relatedNouns: "+ array_relatedNouns); + //System.out.println("array_relatedNouns: "+ array_relatedNouns); //System.out.println("after relatedNouns"); try{ array_bestsynonyms=wordnet.getBestSynonyms(POS.NOUN, id); - System.out.println("array_bestsynonyms: "+ array_bestsynonyms); + //System.out.println("array_bestsynonyms: "+ array_bestsynonyms); } catch(Exception e){ // @@ -1687,12 +1776,12 @@ catch(Exception e){ //array_relatedNouns.clear(); } - System.out.println("array_relatedNouns: "+ array_relatedNouns); + //System.out.println("array_relatedNouns: "+ array_relatedNouns); //System.out.println("after relatedNouns"); try{ array_bestsynonyms=wordnet.getBestSynonyms(POS.NOUN, advanced_id); - System.out.println("array_bestsynonyms: "+ array_bestsynonyms); + // System.out.println("array_bestsynonyms: "+ array_bestsynonyms); } catch(Exception e){ // @@ -1729,6 +1818,10 @@ + /* + * http://purpurtentacle.techfak.uni-bielefeld.de:8893/sparql new endpoint + */ + private String sendServerQuestionRequest(String query){ //SPARQL-Endpoint of Semantic Computing Group This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2011-12-15 13:26:15
|
Revision: 3504 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3504&view=rev Author: lorenz_b Date: 2011-12-15 13:26:09 +0000 (Thu, 15 Dec 2011) Log Message: ----------- Fixed problem with superflous triple <A rdfs:subClassof A>. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2011-12-15 11:11:12 UTC (rev 3503) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2011-12-15 13:26:09 UTC (rev 3504) @@ -698,6 +698,7 @@ qs = rs.next(); superClasses.add(new NamedClass(qs.getResource("sup").getURI())); } + superClasses.remove(description); return superClasses; } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <seb...@us...> - 2011-12-15 11:11:24
|
Revision: 3503 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3503&view=rev Author: sebastianwtr Date: 2011-12-15 11:11:12 +0000 (Thu, 15 Dec 2011) Log Message: ----------- [tbsl exploration] changed getQuery function and dependencies Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java 2011-12-13 11:58:38 UTC (rev 3502) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java 2011-12-15 11:11:12 UTC (rev 3503) @@ -111,6 +111,7 @@ //System.out.println(tmp_array[i-1].toLowerCase() + " " +tmp_array[i]); } + System.out.println("created Properties: "+hm); return hm; } Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java 2011-12-13 11:58:38 UTC (rev 3502) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java 2011-12-15 11:11:12 UTC (rev 3503) @@ -62,8 +62,7 @@ System.out.println("Loading SPARQL Templator"); // btemplator = new BasicTemplator(); - //wenn ich das nehme, dann gebe ich dem parser den ideal.... - //btemplator.UNTAGGED_INPUT = false; + btemplator.UNTAGGED_INPUT = false; templator = new Templator(); System.out.println("Loading SPARQL Templator Done\n"); System.out.println("Start Indexing"); @@ -74,7 +73,7 @@ //normaly 1 setExplorationdepthwordnet(1); //eigentlich immer mit 0 initialisieren - setIterationdepth(1); + setIterationdepth(9); setNumberofanswers(1); } @@ -189,7 +188,7 @@ * ################################################################################################# */ //Iteration 0 - if(getIterationdepth()==0&&startIterating==true){ + if(getIterationdepth()==0&&startIterating==true||getIterationdepth()==9&&startIterating==true){ String tmp = new String(); String s = null; BufferedReader in = null; @@ -226,7 +225,7 @@ /* * Only Levensthein!!! */ - if(getIterationdepth()==1&&startIterating==true){ + if(getIterationdepth()==1&&startIterating==true||getIterationdepth()==9&&startIterating==true){ /* //4, because of query + three conditions for the simple case if(querylist.size()==4)final_answer=simpleIteration1Case(querylist, query); @@ -235,10 +234,16 @@ if(querylist.size()>4)final_answer=complexeIteration1Case(querylist, query); */ + ArrayList<String> final_answer_tmp = new ArrayList<String>(); + if(querylist.size()==4)final_answer_tmp=simpleLevinstheinIteration(querylist, query); + if(querylist.size()>4)final_answer_tmp=complexeLevinstheinIteration(querylist, query); - if(querylist.size()==4)final_answer=simpleLevinstheinIteration(querylist, query); - if(querylist.size()>4)final_answer=complexeLevinstheinIteration(querylist, query); + for(String i : final_answer_tmp){ + final_answer.add(i); + } + + } /* * ################################################################################################# @@ -247,9 +252,18 @@ /* * Only Wordnet!!! */ - if(getIterationdepth()==2&&startIterating==true){ - if(querylist.size()==4)final_answer=simpleWordnetIteration(querylist, query); - if(querylist.size()>4)final_answer=complexWordnetIteration(querylist, query); + if(getIterationdepth()==2&&startIterating==true||getIterationdepth()==9&&startIterating==true){ + ArrayList<String> final_answer_tmp = new ArrayList<String>(); + + if(querylist.size()==4)final_answer_tmp=simpleWordnetIteration(querylist, query); + //if(querylist.size()>4)final_answer=complexWordnetIteration(querylist, query); + + //for a test only use: + if(querylist.size()>4)final_answer_tmp=newIteration(querylist,query); + + for(String i : final_answer_tmp){ + final_answer.add(i); + } } @@ -281,8 +295,9 @@ String out=""; for(String answer : final_answer){ + if(answer!=null){ //only answered question - //if(!answer.contains("Error in searching Wordnet with word") && !answer.contains("EmtyAnswer")&& !answer.contains("Error in getting Properties"))out=out+ "\n"+answer+"\n"; + if(!answer.contains("Error in searching Wordnet with word") && !answer.contains("EmtyAnswer")&& !answer.contains("Error in getting Properties"))out=out+ "\n"+answer+"\n"; /* //only questions with wordnet error @@ -294,11 +309,17 @@ //only questions with Error in Properties // if(answer.contains("Error in getting Properties"))out=out+ "\n"+answer+"\n"; - out+= "\n"+answer+"\n"; + //out+= "\n"+answer+"\n"; + } + else{ + System.out.println("Answer was null"); + } } System.out.println(question); + out = out.replace("@en","").replace("\"",""); System.out.println(out); + BufferedWriter outfile = new BufferedWriter( new OutputStreamWriter( new FileOutputStream( "/tmp/answer" ) ) ); @@ -307,6 +328,120 @@ outfile.close(); } + private ArrayList<String> newIteration(ArrayList<String> querylist, String query) throws SQLException, + JWNLException { + //only for special case, that the first condition has a resource + ArrayList<String> final_answer=new ArrayList<String>(); + String firstResource=""; + String firstProperty=""; + String secondProperty=null; + String sideOfProperty=null; + String sideOfPropertyTwo=null; + int tmpcounter=0; + for(String s : querylist){ + //we dont need the first one, because thats the query itself + tmpcounter=tmpcounter+1; + if(tmpcounter>=1&&tmpcounter<=4){ + if(s.contains("LEFT")){ + sideOfProperty="LEFT"; + firstResource=s.replace("LEFT",""); + } + if(s.contains("RIGHT")){ + sideOfProperty="RIGHT"; + firstResource=s.replace("RIGHT",""); + } + if(s.contains("PROPERTY")){ + firstProperty=s.replace("PROPERTY",""); + } + + } + if(tmpcounter>4){ + if(s.contains("LEFT")){ + sideOfPropertyTwo="LEFT"; + } + if(s.contains("RIGHT")){ + sideOfPropertyTwo="RIGHT"; + } + if(s.contains("PROPERTY")){ + secondProperty=s.replace("PROPERTY",""); + } + + } + + } + //first create Query and get the URI's + String firstquery=""; + if(sideOfProperty=="RIGHT"){ + firstquery="PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> SELECT ?y WHERE {<"+getUriFromIndex(firstResource.toLowerCase(),0)+"> <"+getUriFromIndex(firstProperty.toLowerCase(),1) +"> ?y}"; + } + if(sideOfProperty=="RIGHT"){ + firstquery="PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> SELECT ?y WHERE {<"+getUriFromIndex(firstProperty.toLowerCase(),1)+"> <"+getUriFromIndex(firstResource.toLowerCase(),0) +"> ?y}"; + + } + + //first try without iterating over wordnet and levensthein + ArrayList<String> answer_tmp=new ArrayList<String>(); + answer_tmp=sendServerQuestionRequestArray(firstquery); + + //if answer_tmp is emty try to iterate in this case with wordnet + ArrayList<String>querylist_new=new ArrayList<String>(); + querylist_new.add(firstquery); + querylist_new.add("PROPERTY"+firstProperty); + querylist_new.add(sideOfProperty+firstResource); + if(answer_tmp.isEmpty()){ + answer_tmp=simpleWordnetIterationArray(querylist_new,firstquery); + } + //if answer_tmp is still empty return null and exit function + if(answer_tmp.isEmpty()){final_answer.add("new Iteration didnt work"); + + return final_answer; + } + + ArrayList<ArrayList<String>>secondquerylist=new ArrayList<ArrayList<String>>(); + + //we have now the uri's for the second query and the result answers + //create now for every entry, if it contains something like http an new query + for(String s : answer_tmp){ + System.out.println("!!!!!!!!!!!!!"); + System.out.println("URI found: "+ s); + System.out.println("!!!!!!!!!!!!!"); + String secondquery =""; + ArrayList<String> tmp = new ArrayList<String>(); + if(s.contains("http:")){ + if(sideOfPropertyTwo=="RIGHT"){ + secondquery="PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> SELECT ?y WHERE {<"+getUriFromIndex(s.toLowerCase(),0)+"> <"+getUriFromIndex(secondProperty.toLowerCase(),1) +"> ?y}"; + tmp.add(secondquery); + tmp.add("PROPERTY"+secondProperty); + querylist_new.add(sideOfPropertyTwo+s); + secondquerylist.add(tmp); + } + if(sideOfPropertyTwo=="RIGHT"){ + secondquery="PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> SELECT ?y WHERE {<"+getUriFromIndex(secondProperty.toLowerCase(),1)+"> <"+getUriFromIndex(s.toLowerCase(),0) +"> ?y}"; + tmp.add(secondquery); + tmp.add("PROPERTY"+secondProperty); + querylist_new.add(sideOfPropertyTwo+s); + secondquerylist.add(tmp); + } + + } + } + + + //TODO: Check this part of the function!!! + for(ArrayList as: secondquerylist){ + ArrayList<String> answer_tmp_two=new ArrayList<String>(); + //answer_tmp_two=sendServerQuestionRequestArray(s); + answer_tmp=simpleWordnetIterationArray(as,as.get(0).toString()); + for(String t :answer_tmp_two){ + final_answer.add(t); + System.out.println("Answer from advanced Iteration: "+ t); + } + } + if(final_answer.isEmpty())final_answer.add("new Iteration didnt work"); + System.out.println("Returning the function"); + return final_answer; + + } private ArrayList<String> simpleLevinstheinIteration(ArrayList<String> querylist, String query) throws SQLException, JWNLException { @@ -335,6 +470,7 @@ } } + } System.out.println("Property to compare:: "+ property_to_compare_with); System.out.println("Resource: "+ resource); @@ -368,7 +504,10 @@ //iterate over properties for (Entry<String, String> entry : properties.entrySet()) { String key = entry.getKey(); + key=key.replace("\"",""); + key=key.replace("@en",""); String value = entry.getValue(); + //compare property gotten from the resource with the property from the original query double nld=Levenshtein.nld(property_to_compare_with.toLowerCase(), key); @@ -377,6 +516,9 @@ //if its so, replace old uri with the new one String querynew=query; String replacement = getUriFromIndex(property_to_compare_with.toLowerCase(),1); + if(!querynew.contains(replacement)){ + replacement=replacement.replace("ontology", "property"); + } querynew=querynew.replace(replacement,value); System.out.println("Simple Levensthein Query: "+ querynew); new_queries.add(querynew); @@ -384,6 +526,9 @@ } + + //add original query for iteration + new_queries.add(query); //iterate over all Queries and get answer from Server for(String anfrage : new_queries){ String answer_tmp; @@ -479,6 +624,8 @@ String queryOne=query; String keyOne = entryOne.getKey(); + keyOne=keyOne.replace("\"",""); + keyOne=keyOne.replace("@en",""); String valueOne = entryOne.getValue(); @@ -488,33 +635,43 @@ * and use that new query, for the property of the second resource */ if(levnstheinDistanzeOne>=LevenstheinMin){ - String test = getUriFromIndex(property_to_compare_withOne.toLowerCase(),1); - queryOne=queryOne.replace(test,valueOne); - } + String replacementOne = getUriFromIndex(property_to_compare_withOne.toLowerCase(),1); + if(!queryOne.contains(replacementOne)){ + replacementOne=replacementOne.replace("ontology", "property"); + } + queryOne=queryOne.replace(replacementOne,valueOne); + - /* - * Iterate now over the second set of properties, but this time not using the original query in which - * to replace the old uri with the new one, but using queryOne from the first step. - */ - for (Entry<String, String> entryTwo : propertiesTwo.entrySet()) { - String keyTwo = entryTwo.getKey(); - String valueTwo = entryTwo.getValue(); - - //again calculate the nld with the property from the second condition and the property from the propertyset - double levnstheinDistanzeTwo=Levenshtein.nld(property_to_compare_withTwo.toLowerCase(), keyTwo); - - if(levnstheinDistanzeTwo>LevenstheinMin){ - String queryTwo=queryOne; - String replacement = getUriFromIndex(property_to_compare_withTwo.toLowerCase(),1); - queryTwo=queryTwo.replace(replacement,valueTwo); - System.out.println("Complex Levensthein Query: "+ queryTwo); - new_queries.add(queryTwo); - } - + /* + * Iterate now over the second set of properties, but this time not using the original query in which + * to replace the old uri with the new one, but using queryOne from the first step. + */ + for (Entry<String, String> entryTwo : propertiesTwo.entrySet()) { + String keyTwo = entryTwo.getKey(); + String valueTwo = entryTwo.getValue(); + keyTwo=keyTwo.replace("\"",""); + keyTwo=keyTwo.replace("@en",""); + + //again calculate the nld with the property from the second condition and the property from the propertyset + double levnstheinDistanzeTwo=Levenshtein.nld(property_to_compare_withTwo.toLowerCase(), keyTwo); + + if(levnstheinDistanzeTwo>LevenstheinMin){ + String queryTwo=queryOne; + String replacement = getUriFromIndex(property_to_compare_withTwo.toLowerCase(),1); + if(!queryTwo.contains(replacement)){ + replacement=replacement.replace("ontology", "property"); + } + queryTwo=queryTwo.replace(replacement,valueTwo); + System.out.println("Complex Levensthein Query: "+ queryTwo); + new_queries.add(queryTwo); + } + + } } } - + //add original query for iteration + new_queries.add(query); //iterate over all Queries and get answer from Server for(String anfrage : new_queries){ String answer_tmp; @@ -527,6 +684,150 @@ return final_answer; } + + +private ArrayList<String> simpleWordnetIterationArray(ArrayList<String> querylist, String query) throws SQLException, + JWNLException { + ArrayList<String> final_answer=new ArrayList<String>(); + + System.out.println("In simpleWordnetIteration"); + + String resource=""; + String property_to_compare_with=""; + String sideOfProperty="LEFT"; + + + int tmpcounter=0; + for(String s : querylist){ + //we dont need the first one, because thats the query itself + tmpcounter=tmpcounter+1; + if(tmpcounter>=1){ + if(s.contains("LEFT")){ + sideOfProperty="LEFT"; + resource=s.replace("LEFT",""); + } + if(s.contains("RIGHT")){ + sideOfProperty="RIGHT"; + resource=s.replace("RIGHT",""); + } + if(s.contains("PROPERTY")){ + property_to_compare_with=s.replace("PROPERTY",""); + } + + } + } + System.out.println("Property to compare:: "+ property_to_compare_with); + System.out.println("Resource: "+ resource); + + + HashMap<String,String> properties = new HashMap<String, String>(); + GetRessourcePropertys property = new GetRessourcePropertys(); + Boolean goOnAfterProperty = true; + try { + properties=property.getPropertys(getUriFromIndex(resource.toLowerCase(),0),sideOfProperty); + if (properties==null){ + + final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + goOnAfterProperty=false; + } + + } catch (IOException e) { + + final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + goOnAfterProperty=false; + + } + if(goOnAfterProperty==true){ + + ArrayList<String> new_queries= new ArrayList<String>(); + + System.out.println("Start Iterating Wordnet with "+property_to_compare_with+" and deept of "+explorationdepthwordnet); + ArrayList<String> semantics=new ArrayList<String>(); + ArrayList<String> tmp_semantics=new ArrayList<String>(); + ArrayList<String> result_SemanticsMatchProperties=new ArrayList<String>(); + semantics.add(property_to_compare_with); + System.out.println("Semantics: "+ semantics); + + //first check, if there is a singular form in the wordnet dictionary.. eg children -> child + String _temp_=myindex.getWordnetHelp(property_to_compare_with); + if(_temp_==null){ + tmp_semantics=semantics; + } + else{ + semantics.clear(); + semantics.add(_temp_); + tmp_semantics=semantics; + } + + System.out.println("tmp_semantics: "+ tmp_semantics); + Boolean goOnAfterWordnet = true; + + for(int i=0;i<=explorationdepthwordnet;i++){ + + try { + tmp_semantics=getSemantics(tmp_semantics); + System.out.println("tmp_semantics in Iteration: "+ tmp_semantics); + if (tmp_semantics==null){ + goOnAfterWordnet=false; + final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); + + } + else{ + //each word only one time + for(String k : tmp_semantics){ + if(!semantics.contains(k)) semantics.add(k); + } + } + + } catch (IOException e) { + + goOnAfterWordnet=false; + final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); + + } + + } + + if(goOnAfterWordnet==true){ + + for (Entry<String, String> entry : properties.entrySet()) { + String key = entry.getKey(); + String value = entry.getValue(); + key=key.replace("\"",""); + key=key.replace("@en",""); + + for(String b : semantics){ + if(key.contains(b.toLowerCase())){ + if(!result_SemanticsMatchProperties.contains(key)){ + result_SemanticsMatchProperties.add(key); + String query_tmp=query; + String replacement = getUriFromIndex(property_to_compare_with.toLowerCase(),1); + if(!query_tmp.contains(replacement)){ + replacement=replacement.replace("ontology", "property"); + } + query_tmp=query_tmp.replace(replacement,value); + System.out.println("Simple Wordnet Query: "+ query_tmp); + new_queries.add(query_tmp); + } + } + } + } + + //add original query for iteration + new_queries.add(query); + //iterate over all Queries and get answer from Server + for(String bla : new_queries){ + ArrayList<String>answer_tmp=new ArrayList<String>(); + answer_tmp=sendServerQuestionRequestArray(bla); + for(String s: answer_tmp)final_answer.add(s); + //final_answer.add("Begin:\n"+bla +"\n"+answer_tmp+" \n End"); + } + } + } + + + return final_answer; +} private ArrayList<String> simpleWordnetIteration(ArrayList<String> querylist, String query) throws SQLException, JWNLException { @@ -637,29 +938,38 @@ } if(goOnAfterWordnet==true){ - + System.out.println("in actual wordnet function"); for (Entry<String, String> entry : properties.entrySet()) { String key = entry.getKey(); + key=key.replace("\"",""); + key=key.replace("@en",""); String value = entry.getValue(); + // System.out.println("Key propery: "+ key); + // System.out.println("Value propery: "+ value); for(String b : semantics){ if(key.contains(b.toLowerCase())){ - System.out.println("Hey, Iam in too!!!!!!!!!!!"); //to check, if no property is used twice... if(!result_SemanticsMatchProperties.contains(key)){ - //create new query - System.out.println("Hey, Iam in!!!!!!!!!!!"); result_SemanticsMatchProperties.add(key); String query_tmp=query; String test = getUriFromIndex(property_to_compare_with.toLowerCase(),1); + + //could happen, that there is an ontology and not a property or upsidedown in the query + if(!query_tmp.contains(test)){ + test=test.replace("ontology", "property"); + } query_tmp=query_tmp.replace(test,value); System.out.println("Simple Wordnet Query: "+ query_tmp); + System.out.println("\n"); new_queries.add(query_tmp); } } } } + //add original query for iteration + new_queries.add(query); //iterate over all Queries and get answer from Server for(String bla : new_queries){ String answer_tmp; @@ -855,6 +1165,8 @@ String keyOne = entryOne.getKey(); String valueOne = entryOne.getValue(); String queryOne=query; + keyOne=keyOne.replace("\"",""); + keyOne=keyOne.replace("@en",""); for(String b : semanticsOne){ if(keyOne.contains(b.toLowerCase())){ @@ -862,11 +1174,16 @@ //create new query result_SemanticsMatchPropertiesOne.add(keyOne); String replacementOne = getUriFromIndex(property_to_compare_withOne.toLowerCase(),1); + if(!queryOne.contains(replacementOne)){ + replacementOne=replacementOne.replace("ontology", "property"); + } queryOne=queryOne.replace(replacementOne,valueOne); for (Entry<String, String> entryTwo : propertiesTwo.entrySet()) { String keyTwo = entryTwo.getKey(); String valueTwo = entryTwo.getValue(); + keyTwo=keyTwo.replace("\"",""); + keyTwo=keyTwo.replace("@en",""); for(String z : semanticsTwo){ if(keyTwo.contains(z.toLowerCase())){ @@ -875,6 +1192,9 @@ result_SemanticsMatchPropertiesTwo.add(keyTwo); String queryTwo=queryOne; String replacementTwo = getUriFromIndex(property_to_compare_withTwo.toLowerCase(),1); + if(!queryTwo.contains(replacementTwo)){ + replacementTwo=replacementTwo.replace("ontology", "property"); + } queryTwo=queryTwo.replace(replacementTwo,valueTwo); System.out.println("Complexe Wordnet Query: "+ queryTwo); new_queries.add(queryTwo); @@ -892,7 +1212,8 @@ } - + //add original query for iteration + new_queries.add(query); //iterate over all Queries and get answer from Server for(String bla : new_queries){ String answer_tmp; @@ -1042,75 +1363,107 @@ //see below slotcounter=slotcounter+1; + //resource will be detectet. + //If its not a resource, it has to be a property! + String resource=""; + String property=""; + String slotstring=""; + if(slot.toString().contains("RESOURCE")){ + resource=slot.toString().replace("{","").replace("}","").replace(" RESOURCE ", ""); + System.out.println("Found Resource in getQuery: "+ resource); + } + else{ + property=slot.toString().replace("UNSPEC","").replace("RESOURCE","").replace("{","").replace("}","").replace(" PROPERTY ",""); + System.out.println("Found Property in getQuery: "+ property); + } - String slotstring=slot.toString().replace("UNSPEC","").replace("RESOURCE","").replace("{","").replace("}",""); - slotstring=slotstring.replace(" ",""); - //System.out.println(tmp); - //damit auch wirklich nur ?y und nicht ?y0 ersetzt wird, einfach nach "?y " suchen. - String[] array = slotstring.split(":"); - String replace; - if(array[0].length()<2)replace = "?"+array[0]+" "; - else replace="?"+array[0]; - - //TODO: Hotfix: get rid of " PROPERTY " - String _ThingGettingURIfor_=array[1]; - _ThingGettingURIfor_=_ThingGettingURIfor_.replace(" PROPERTY ","").toLowerCase(); - String hm_result=getUriFromIndex(_ThingGettingURIfor_,0); - try - { - if(hm_result.contains("Category:")) hm_result=hm_result.replace("Category:",""); - } - catch ( Exception e ) - { - - } + //query=query.replace(replace, "<"+hm_result+">"); - /*always the middle slot is the property - * so count and always take the second of third to become a property - */ - if(slotcounter%2==0){ - hm_result=getUriFromIndex(_ThingGettingURIfor_,1); - } - //set back to 0 to start new - if(slotcounter==3) slotcounter=0; - query=query.replace(replace, "<"+hm_result+">"); - query_upside_down=query_upside_down.replace(replace, "<"+hm_result+">"); + if(resource!=""){ + String replace=""; + String[] array = resource.split(":"); + if(array[0].length()<2)replace = "?"+array[0]+" "; + else replace="?"+array[0]; + + String hm_result=getUriFromIndex(array[1],0); + //System.out.print("URI for_ThingGettingURIfor: "+hm_result); + try + { + if(hm_result.contains("Category:")) hm_result=hm_result.replace("Category:",""); + } + catch ( Exception e ) + { + + } + + query=query.replace(replace, "<"+hm_result+">"); + //System.out.println("Query: "+query); + query_upside_down=query_upside_down.replace(replace, "<"+hm_result+">"); + //System.out.println("Query Up Side Down: "+query_upside_down); + + } + if(property!=""){ + String replace=""; + String[] array = property.split(":"); + if(array[0].length()<2)replace = "?"+array[0]+" "; + else replace="?"+array[0]; + + String hm_result=getUriFromIndex(array[1],1); + + query=query.replace(replace, "<"+hm_result+">"); + //System.out.println("Query: "+query); + query_upside_down=query_upside_down.replace(replace, "<"+hm_result+">"); + //System.out.println("Query Up Side Down: "+query_upside_down); + + } + } + query_upside_down=query_upside_down.replace("><","> <").replace(">?", "> ?"); + query=query.replace("><","> <").replace(">?", "> ?"); lstquerupsidedown.add(query_upside_down); lstquerynew.add(query); + System.out.println("Query: "+query); + System.out.println("Query Up Side Down: "+query_upside_down); ArrayList<String> lsttmp=createLeftAndRightPropertyArray(query); //if its lower than three, we dont have any conditions and dont need to check it. //also if the size%3 isnt 0, than something else is wrong and we dont need to test the query - if(lsttmp.size()>=3&&lsttmp.size()%3==0)for(String i : lsttmp) lstquerynew.add(i); + if(lsttmp.size()>=3&&lsttmp.size()%3==0){ + for(String i : lsttmp) lstquerynew.add(i.replace("__","")); + lstquery.add(lstquerynew); + } else{ lstquerynew.clear(); lstquerynew.add("ERROR"); + System.out.println("ERROR1"); addQuery=false; } lsttmp.clear(); lsttmp=createLeftAndRightPropertyArray(query_upside_down); - if(lsttmp.size()>=3&&lsttmp.size()%3==0)for(String i : lsttmp) lstquerupsidedown.add(i); + if(lsttmp.size()>=3&&lsttmp.size()%3==0){ + for(String i : lsttmp) lstquerupsidedown.add(i.replace("__","")); + lstquery.add(lstquerupsidedown); + } else{ lstquerupsidedown.clear(); lstquerupsidedown.add("ERROR"); + System.out.println("ERROR2"); addQuery=false; } - if(addQuery==true){ - lstquery.add(lstquerynew); - lstquery.add(lstquerupsidedown); - } + + + System.out.println("Add Query: "+addQuery); } } - + System.out.println("List of Query: "+lstquery); return lstquery; } @@ -1159,6 +1512,10 @@ * @throws SQLException */ private String getUriFromIndex(String string, int fall) throws SQLException{ + String originalString=string; + string=string.replace("_", " "); + string=string.replace("-", " "); + string=string.replace(".", " "); String result=null; String tmp1=null; String tmp2 = null; @@ -1169,15 +1526,15 @@ //first try: take always the ontology if existing and not the Resource tmp1=myindex.getResourceURI(string.toLowerCase()); tmp2=myindex.getontologyClassURI(string.toLowerCase()); - System.out.println("URI from resource: "+tmp1); - System.out.println("URI from ontologyClass: "+tmp2); + /*System.out.println("URI from resource: "+tmp1); + System.out.println("URI from ontologyClass: "+tmp2);*/ - System.out.println("value from http://dbpedia.org/resource/WikiLeaks : "+ myindex.getResourceURI("http://dbpedia.org/resource/WikiLeaks")); - System.out.println("value from author : "+ myindex.getResourceURI("author")); + if(tmp1!=null && tmp2!=null) result=tmp2; if(tmp1!=null && tmp2==null) result=tmp1; if(tmp1==null && tmp2!=null) result=tmp2; + //result=myindex.getResourceURI(string.toLowerCase()); if(result==null)result=myindex.getPropertyURI(string.toLowerCase()); } @@ -1194,12 +1551,25 @@ } String tmp=""; - tmp=string.toLowerCase(); + tmp=originalString.toLowerCase(); tmp=tmp.replace("property",""); tmp=tmp.replace(" ", "_"); if(result==null) { if(fall==1)return "http://dbpedia.org/property/"+tmp; - if(fall==0)return "http://dbpedia.org/resource/"+tmp; + if(fall==0) { + String bla ="http://dbpedia.org/resource/"+tmp; + if(tmp.contains("_")){ + String[] newarraytmp=tmp.split("_"); + String tmpneu=""; + for(String s :newarraytmp){ + tmpneu+= "_"+ Character.toUpperCase(s.charAt(0)) + s.substring(1); + } + tmpneu=tmpneu.replaceFirst("_", ""); + bla ="http://dbpedia.org/resource/"+tmpneu; + System.out.println("Hotfix: "+bla); + } + return bla; + } else{ System.out.println("return result: "+result); return result; @@ -1243,49 +1613,54 @@ } }*/ - for(String id :semantics){ - //System.out.println("in String id : semantics"); - //System.out.println("ID :"+id); - - //add id also to the result, if its not already in there - if(!result.contains(id))result.add(id); - List<String> array_relatedNouns=null; - List<String> array_bestsynonyms=null; - - System.out.println("Wordnet Word: "+id); - try{ - array_relatedNouns =wordnet.getRelatedNouns(id); - } - catch(Exception e){ - //array_relatedNouns.clear(); - } - System.out.println("array_relatedNouns: "+ array_relatedNouns); - //System.out.println("after relatedNouns"); - - try{ - array_bestsynonyms=wordnet.getBestSynonyms(POS.NOUN, id); - System.out.println("array_bestsynonyms: "+ array_bestsynonyms); - } - catch(Exception e){ - // - } - + try{ + for(String id :semantics){ + //System.out.println("in String id : semantics"); + //System.out.println("ID :"+id); + //add id also to the result, if its not already in there + if(!result.contains(id))result.add(id); + List<String> array_relatedNouns=null; + List<String> array_bestsynonyms=null; - if(array_relatedNouns!=null){ - for(String i:array_relatedNouns){ - if(!result.contains(i))result.add(i); + System.out.println("Wordnet Word: "+id); + try{ + array_relatedNouns =wordnet.getRelatedNouns(id); } - } - if(array_bestsynonyms!=null){ - for(String i:array_bestsynonyms){ - if(!result.contains(i))result.add(i); + catch(Exception e){ + //array_relatedNouns.clear(); } + System.out.println("array_relatedNouns: "+ array_relatedNouns); + //System.out.println("after relatedNouns"); + + try{ + array_bestsynonyms=wordnet.getBestSynonyms(POS.NOUN, id); + System.out.println("array_bestsynonyms: "+ array_bestsynonyms); + } + catch(Exception e){ + // + } + + + + if(array_relatedNouns!=null){ + for(String i:array_relatedNouns){ + if(!result.contains(i))result.add(i); + } + } + if(array_bestsynonyms!=null){ + for(String i:array_bestsynonyms){ + if(!result.contains(i))result.add(i); + } + } + + + } - - - } + catch(Exception e){ + return null; + } if(!result.isEmpty()) return result; else{ @@ -1356,6 +1731,8 @@ private String sendServerQuestionRequest(String query){ //SPARQL-Endpoint of Semantic Computing Group + + //5171 String tmp="http://greententacle.techfak.uni-bielefeld.de:5171/sparql?default-graph-uri=&query="+createServerRequest(query)+"&format=text%2Fhtml&debug=on&timeout="; System.out.println(tmp); URL url; @@ -1389,6 +1766,40 @@ return createAnswer(result); } + private ArrayList<String> sendServerQuestionRequestArray(String query){ + //SPARQL-Endpoint of Semantic Computing Group + String tmp="http://greententacle.techfak.uni-bielefeld.de:5171/sparql?default-graph-uri=&query="+createServerRequest(query)+"&format=text%2Fhtml&debug=on&timeout="; + System.out.println(tmp); + URL url; + InputStream is; + InputStreamReader isr; + BufferedReader r; + String str=""; + String result=""; + + try { + url = new URL(tmp); + is = url.openStream(); + isr = new InputStreamReader(is); + r = new BufferedReader(isr); + int counter=0; + do { + str = r.readLine(); + if (str != null){ + result=result.concat(str); + counter=counter+1;} + } while (str != null); + + } catch (MalformedURLException e) { + System.out.println("Must enter a valid URL"); + } catch (IOException e) { + System.out.println("Can not connect"); + } + + + + return createAnswerArray(result); + } private String createAnswer(String string){ //<td>Klaus Wowereit</td> @@ -1409,6 +1820,25 @@ return result; } + private ArrayList<String> createAnswerArray(String string){ + //<td>Klaus Wowereit</td> + + //get with regex all between <td> </td> + + Pattern p = Pattern.compile (".*<td>(.*)</td>.*"); + Matcher m = p.matcher (string); + ArrayList<String> result=new ArrayList<String>(); + + while (m.find()) { + if(m.group(1)!=null) + result.add(m.group(1)); + } + + //if (result.length()==0) result="EmtyAnswer"; + + return result; + + } private String createServerRequest(String query){ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2011-12-13 11:58:47
|
Revision: 3502 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3502&view=rev Author: lorenz_b Date: 2011-12-13 11:58:38 +0000 (Tue, 13 Dec 2011) Log Message: ----------- Made global enrichment script multi-threaded. Modified Paths: -------------- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java Modified: trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2011-12-13 11:57:29 UTC (rev 3501) +++ trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2011-12-13 11:58:38 UTC (rev 3502) @@ -278,8 +278,8 @@ SPARQLTasks st = new SPARQLTasks(se); //check if endpoint supports SPARQL 1.1 - boolean supportsSPARQL_1_1 = st.supportsSPARQL_1_1(); - ks.setSupportsSPARQL_1_1(supportsSPARQL_1_1); +// boolean supportsSPARQL_1_1 = st.supportsSPARQL_1_1(); +// ks.setSupportsSPARQL_1_1(supportsSPARQL_1_1); if(useInference){ reasoner = new SPARQLReasoner(ks); Modified: trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java 2011-12-13 11:57:29 UTC (rev 3501) +++ trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java 2011-12-13 11:58:38 UTC (rev 3502) @@ -32,9 +32,10 @@ import java.util.Map; import java.util.Map.Entry; import java.util.TreeSet; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; -import javax.xml.ws.http.HTTPException; - import org.apache.log4j.ConsoleAppender; import org.apache.log4j.Level; import org.apache.log4j.Logger; @@ -70,6 +71,15 @@ // directory for generated schemata private static String baseDir = "log/lod-enriched/"; + + //parameters for thread pool + //Parallel running Threads(Executor) on System + private static int corePoolSize = 10; + //Maximum Threads allowed in Pool + private static int maximumPoolSize = 20; + //Keep alive time for waiting threads for jobs(Runnable) + private static long keepAliveTime = 10; + /** * @param args * @throws MalformedURLException @@ -129,58 +139,91 @@ TreeSet<String> blacklist = new TreeSet<String>(); blacklist.add("rkb-explorer-crime"); // computation never completes + ArrayBlockingQueue<Runnable> workQueue = new ArrayBlockingQueue<Runnable>(endpoints.size()); + ThreadPoolExecutor threadPool = new ThreadPoolExecutor(corePoolSize, maximumPoolSize, keepAliveTime, TimeUnit.SECONDS, workQueue); + + // perform enrichment on endpoints - for(Entry<String,SparqlEndpoint> endpoint : endpoints.entrySet()) { - // run enrichment - SparqlEndpoint se = endpoint.getValue(); - String name = endpoint.getKey(); + for(final Entry<String,SparqlEndpoint> endpoint : endpoints.entrySet()) { - File f = new File(baseDir + name + ".ttl"); - File log = new File(baseDir + name + ".log"); + threadPool.execute(new Runnable() { + + @Override + public void run() { + // run enrichment + SparqlEndpoint se = endpoint.getValue(); + String name = endpoint.getKey(); + + File f = new File(baseDir + name + ".ttl"); + File log = new File(baseDir + name + ".log"); + + System.out.println("Enriching " + name + " using " + se.getURL()); + Enrichment e = new Enrichment(se, null, threshold, nrOfAxiomsToLearn, useInference, false); + + e.maxEntitiesPerType = 3; // hack for faster testing of endpoints + +// if(blacklist.contains(name)) { +// continue; +// } + + boolean success = false; + // run enrichment script - we make a case distinguish to see which kind of problems we get + // (could be interesting for statistics later on) + try { + e.start(); + success = true; + } catch(StackOverflowError error) { + try { + error.printStackTrace(new PrintStream(log)); + } catch (FileNotFoundException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } + Files.appendToFile(log, "stack overflows could be caused by cycles in class hierarchies"); + error.printStackTrace(); + } catch(ResultSetException ex) { + try { + ex.printStackTrace(new PrintStream(log)); + } catch (FileNotFoundException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } + Files.appendToFile(log, ex.getMessage()); + ex.printStackTrace(); + } catch(QueryExceptionHTTP ex) { + try { + ex.printStackTrace(new PrintStream(log)); + } catch (FileNotFoundException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } + Files.appendToFile(log, ex.getMessage()); + ex.printStackTrace(); + } + catch(Exception ex) { + System.out.println("class of exception: " + ex.getClass()); + } + + // save results to a file (TODO: check if enrichment format + if(success) { + SparqlEndpointKS ks = new SparqlEndpointKS(se); + List<AlgorithmRun> runs = e.getAlgorithmRuns(); + List<OWLAxiom> axioms = new LinkedList<OWLAxiom>(); + for(AlgorithmRun run : runs) { + axioms.addAll(e.toRDF(run.getAxioms(), run.getAlgorithm(), run.getParameters(), ks)); + } + Model model = e.getModel(axioms); + try { + model.write(new FileOutputStream(f), "TURTLE"); + } catch (FileNotFoundException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } + } + + } + }); - System.out.println("Enriching " + name + " using " + se); - Enrichment e = new Enrichment(se, null, threshold, nrOfAxiomsToLearn, useInference, false); - - e.maxEntitiesPerType = 3; // hack for faster testing of endpoints - - if(blacklist.contains(name)) { - continue; - } - - boolean success = false; - // run enrichment script - we make a case distinguish to see which kind of problems we get - // (could be interesting for statistics later on) - try { - e.start(); - success = true; - } catch(StackOverflowError error) { - error.printStackTrace(new PrintStream(log)); - Files.appendToFile(log, "stack overflows could be caused by cycles in class hierarchies"); - error.printStackTrace(); - } catch(ResultSetException ex) { - ex.printStackTrace(new PrintStream(log)); - Files.appendToFile(log, ex.getMessage()); - ex.printStackTrace(); - } catch(QueryExceptionHTTP ex) { - ex.printStackTrace(new PrintStream(log)); - Files.appendToFile(log, ex.getMessage()); - ex.printStackTrace(); - } -// catch(Exception ex) { -// System.out.println("class of exception: " + ex.getClass()); -// } - - // save results to a file (TODO: check if enrichment format - if(success) { - SparqlEndpointKS ks = new SparqlEndpointKS(se); - List<AlgorithmRun> runs = e.getAlgorithmRuns(); - List<OWLAxiom> axioms = new LinkedList<OWLAxiom>(); - for(AlgorithmRun run : runs) { - axioms.addAll(e.toRDF(run.getAxioms(), run.getAlgorithm(), run.getParameters(), ks)); - } - Model model = e.getModel(axioms); - model.write(new FileOutputStream(f), "TURTLE"); - } } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2011-12-13 11:57:39
|
Revision: 3501 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3501&view=rev Author: lorenz_b Date: 2011-12-13 11:57:29 +0000 (Tue, 13 Dec 2011) Log Message: ----------- Updated chunk of algorithms to work with SPARQL endpoints, which not support COUNT queries. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointDataPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentDataPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubDataPropertyOfAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubObjectPropertyOfAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java 2011-12-12 15:40:07 UTC (rev 3500) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java 2011-12-13 11:57:29 UTC (rev 3501) @@ -350,7 +350,7 @@ SortedSet<Description> mostGeneralClasses = reasoner.getClassHierarchy().getMostGeneralClasses(); } for(NamedClass cls : completeDisjointclasses){ - if(useClassPopularity){ + if(useClassPopularity && ks.supportsSPARQL_1_1()){ int popularity = reasoner.getIndividualsCount(cls); //we skip classes with no instances if(popularity == 0) continue; Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointDataPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointDataPropertyAxiomLearner.java 2011-12-12 15:40:07 UTC (rev 3500) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointDataPropertyAxiomLearner.java 2011-12-13 11:57:29 UTC (rev 3501) @@ -42,6 +42,8 @@ import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; @ComponentAnn(name="disjoint dataproperty axiom learner", shortName="dpldisjoint", version=0.1) public class DisjointDataPropertyAxiomLearner extends AbstractAxiomLearningAlgorithm { @@ -51,6 +53,8 @@ @ConfigOption(name="propertyToDescribe", description="", propertyEditorClass=DataPropertyEditor.class) private DatatypeProperty propertyToDescribe; + private Set<DatatypeProperty> allDataProperties; + public DisjointDataPropertyAxiomLearner(SparqlEndpointKS ks){ this.ks = ks; } @@ -73,45 +77,94 @@ //TODO //at first get all existing dataproperties in knowledgebase - Set<DatatypeProperty> dataProperties = new SPARQLTasks(ks.getEndpoint()).getAllDataProperties(); + allDataProperties = new SPARQLTasks(ks.getEndpoint()).getAllDataProperties(); + allDataProperties.remove(propertyToDescribe); + if(ks.supportsSPARQL_1_1()){ + runSPARQL1_1_Mode(); + } else { + runSPARQL1_0_Mode(); + } + + logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); + } + + private void runSPARQL1_0_Mode() { + Model model = ModelFactory.createDefaultModel(); + int limit = 1000; + int offset = 0; + String baseQuery = "CONSTRUCT {?s ?p ?o.} WHERE {?s <%s> ?o. ?s ?p ?o.} LIMIT %d OFFSET %d"; + String query = String.format(baseQuery, propertyToDescribe.getName(), limit, offset); + Model newModel = executeConstructQuery(query); + Map<DatatypeProperty, Integer> result = new HashMap<DatatypeProperty, Integer>(); + while(!terminationCriteriaSatisfied() && newModel.size() != 0){ + model.add(newModel); + query = "SELECT ?p (COUNT(?s) AS ?count) WHERE {?s ?p ?o.} GROUP BY ?p"; + + DatatypeProperty prop; + Integer oldCnt; + ResultSet rs = executeSelectQuery(query, model); + QuerySolution qs; + while(rs.hasNext()){ + qs = rs.next(); + prop = new DatatypeProperty(qs.getResource("p").getURI()); + int newCnt = qs.getLiteral("count").getInt(); + oldCnt = result.get(prop); + if(oldCnt == null){ + oldCnt = Integer.valueOf(newCnt); + } + result.put(prop, oldCnt); + qs.getLiteral("count").getInt(); + } + if(!result.isEmpty()){ + currentlyBestAxioms = buildAxioms(result, allDataProperties); + } + + + offset += limit; + query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + newModel = executeConstructQuery(query); + } + + } + + private void runSPARQL1_1_Mode() { //get properties and how often they occur - int limit = 1000; - int offset = 0; - String queryTemplate = "SELECT ?p (COUNT(?s) as ?count) WHERE {?s ?p ?o." + - "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + - "}"; - String query; - Map<DatatypeProperty, Integer> result = new HashMap<DatatypeProperty, Integer>(); - DatatypeProperty prop; - Integer oldCnt; - boolean repeat = true; - - ResultSet rs = null; - while(!terminationCriteriaSatisfied() && repeat){ - query = String.format(queryTemplate, propertyToDescribe, limit, offset); - rs = executeSelectQuery(query); - QuerySolution qs; - repeat = false; - while(rs.hasNext()){ - qs = rs.next(); - prop = new DatatypeProperty(qs.getResource("p").getURI()); - int newCnt = qs.getLiteral("count").getInt(); - oldCnt = result.get(prop); - if(oldCnt == null){ - oldCnt = Integer.valueOf(newCnt); - } - result.put(prop, oldCnt); - qs.getLiteral("count").getInt(); - repeat = true; - } - if(!result.isEmpty()){ - currentlyBestAxioms = buildAxioms(result, dataProperties); - offset += 1000; - } + int limit = 1000; + int offset = 0; + String queryTemplate = "SELECT ?p (COUNT(?s) as ?count) WHERE {?s ?p ?o." + + "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + + "}"; + String query; + Map<DatatypeProperty, Integer> result = new HashMap<DatatypeProperty, Integer>(); + DatatypeProperty prop; + Integer oldCnt; + boolean repeat = true; + + ResultSet rs = null; + while(!terminationCriteriaSatisfied() && repeat){ + query = String.format(queryTemplate, propertyToDescribe, limit, offset); + rs = executeSelectQuery(query); + QuerySolution qs; + repeat = false; + while(rs.hasNext()){ + qs = rs.next(); + prop = new DatatypeProperty(qs.getResource("p").getURI()); + int newCnt = qs.getLiteral("count").getInt(); + oldCnt = result.get(prop); + if(oldCnt == null){ + oldCnt = Integer.valueOf(newCnt); } + result.put(prop, oldCnt); + qs.getLiteral("count").getInt(); + repeat = true; + } + if(!result.isEmpty()){ + currentlyBestAxioms = buildAxioms(result, allDataProperties); + offset += 1000; + } + } - logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); } @Override Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java 2011-12-12 15:40:07 UTC (rev 3500) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java 2011-12-13 11:57:29 UTC (rev 3501) @@ -36,6 +36,7 @@ import org.dllearner.core.config.ConfigOption; import org.dllearner.core.config.ObjectPropertyEditor; import org.dllearner.core.owl.DisjointObjectPropertyAxiom; +import org.dllearner.core.owl.FunctionalObjectPropertyAxiom; import org.dllearner.core.owl.ObjectProperty; import org.dllearner.kb.SparqlEndpointKS; import org.dllearner.kb.sparql.SPARQLTasks; @@ -47,6 +48,8 @@ import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; @ComponentAnn(name="disjoint objectproperty axiom learner", shortName="opldisjoint", version=0.1) public class DisjointObjectPropertyAxiomLearner extends AbstractAxiomLearningAlgorithm { @@ -56,6 +59,8 @@ @ConfigOption(name="propertyToDescribe", description="", propertyEditorClass=ObjectPropertyEditor.class) private ObjectProperty propertyToDescribe; + private Set<ObjectProperty> allObjectProperties; + private boolean usePropertyPopularity = true; public DisjointObjectPropertyAxiomLearner(SparqlEndpointKS ks){ @@ -77,48 +82,96 @@ fetchedRows = 0; currentlyBestAxioms = new ArrayList<EvaluatedAxiom>(); - //TODO + //TODO detect existing axioms //at first get all existing objectproperties in knowledgebase - Set<ObjectProperty> objectProperties = new SPARQLTasks(ks.getEndpoint()).getAllObjectProperties(); - objectProperties.remove(propertyToDescribe); + allObjectProperties = new SPARQLTasks(ks.getEndpoint()).getAllObjectProperties(); + allObjectProperties.remove(propertyToDescribe); + if(ks.supportsSPARQL_1_1()){ + runSPARQL1_1_Mode(); + } else { + runSPARQL1_0_Mode(); + } + + logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); + } + + private void runSPARQL1_0_Mode() { + Model model = ModelFactory.createDefaultModel(); + int limit = 1000; + int offset = 0; + String baseQuery = "CONSTRUCT {?s ?p ?o.} WHERE {?s <%s> ?o. ?s ?p ?o.} LIMIT %d OFFSET %d"; + String query = String.format(baseQuery, propertyToDescribe.getName(), limit, offset); + Model newModel = executeConstructQuery(query); + Map<ObjectProperty, Integer> result = new HashMap<ObjectProperty, Integer>(); + while(!terminationCriteriaSatisfied() && newModel.size() != 0){ + model.add(newModel); + query = "SELECT ?p (COUNT(?s) AS ?count) WHERE {?s ?p ?o.} GROUP BY ?p"; + + ObjectProperty prop; + Integer oldCnt; + ResultSet rs = executeSelectQuery(query, model); + QuerySolution qs; + while(rs.hasNext()){ + qs = rs.next(); + prop = new ObjectProperty(qs.getResource("p").getURI()); + int newCnt = qs.getLiteral("count").getInt(); + oldCnt = result.get(prop); + if(oldCnt == null){ + oldCnt = Integer.valueOf(newCnt); + } + result.put(prop, oldCnt); + qs.getLiteral("count").getInt(); + } + if(!result.isEmpty()){ + currentlyBestAxioms = buildAxioms(result, allObjectProperties); + } + + + offset += limit; + query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + newModel = executeConstructQuery(query); + } + + } + + private void runSPARQL1_1_Mode() { //get properties and how often they occur - int limit = 1000; - int offset = 0; - String queryTemplate = "SELECT ?p COUNT(?s) AS ?count WHERE {?s ?p ?o." + - "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + - "}"; - String query; - Map<ObjectProperty, Integer> result = new HashMap<ObjectProperty, Integer>(); - ObjectProperty prop; - Integer oldCnt; - boolean repeat = true; - - while(!terminationCriteriaSatisfied() && repeat){ - query = String.format(queryTemplate, propertyToDescribe, limit, offset); - ResultSet rs = executeSelectQuery(query); - QuerySolution qs; - repeat = false; - while(rs.hasNext()){ - qs = rs.next(); - prop = new ObjectProperty(qs.getResource("p").getURI()); - int newCnt = qs.getLiteral("count").getInt(); - oldCnt = result.get(prop); - if(oldCnt == null){ - oldCnt = Integer.valueOf(newCnt); - } - result.put(prop, oldCnt); - qs.getLiteral("count").getInt(); - repeat = true; - } - if(!result.isEmpty()){ - currentlyBestAxioms = buildAxioms(result, objectProperties); - offset += 1000; - } + int limit = 1000; + int offset = 0; + String queryTemplate = "SELECT ?p COUNT(?s) AS ?count WHERE {?s ?p ?o." + + "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + + "}"; + String query; + Map<ObjectProperty, Integer> result = new HashMap<ObjectProperty, Integer>(); + ObjectProperty prop; + Integer oldCnt; + boolean repeat = true; + + while(!terminationCriteriaSatisfied() && repeat){ + query = String.format(queryTemplate, propertyToDescribe, limit, offset); + ResultSet rs = executeSelectQuery(query); + QuerySolution qs; + repeat = false; + while(rs.hasNext()){ + qs = rs.next(); + prop = new ObjectProperty(qs.getResource("p").getURI()); + int newCnt = qs.getLiteral("count").getInt(); + oldCnt = result.get(prop); + if(oldCnt == null){ + oldCnt = Integer.valueOf(newCnt); } + result.put(prop, oldCnt); + qs.getLiteral("count").getInt(); + repeat = true; + } + if(!result.isEmpty()){ + currentlyBestAxioms = buildAxioms(result, allObjectProperties); + offset += 1000; + } + } - logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); } private List<EvaluatedAxiom> buildAxioms(Map<ObjectProperty, Integer> property2Count, Set<ObjectProperty> allProperties){ Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentDataPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentDataPropertyAxiomLearner.java 2011-12-12 15:40:07 UTC (rev 3500) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentDataPropertyAxiomLearner.java 2011-12-13 11:57:29 UTC (rev 3501) @@ -41,6 +41,8 @@ import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; @ComponentAnn(name="equivalent dataproperty axiom learner", shortName="dplequiv", version=0.1) public class EquivalentDataPropertyAxiomLearner extends AbstractAxiomLearningAlgorithm { @@ -72,43 +74,91 @@ SortedSet<DatatypeProperty> existingSuperProperties = reasoner.getSuperProperties(propertyToDescribe); logger.debug("Existing super properties: " + existingSuperProperties); - //get subjects with types + if(ks.supportsSPARQL_1_1()){ + runSPARQL1_1_Mode(); + } else { + runSPARQL1_0_Mode(); + } + + logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); + } + + private void runSPARQL1_0_Mode() { + Model model = ModelFactory.createDefaultModel(); int limit = 1000; int offset = 0; - String queryTemplate = "SELECT ?p COUNT(?s) AS ?count WHERE {?s ?p ?o." + - "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + - "}"; + String baseQuery = "CONSTRUCT {?s ?p ?o.} WHERE {?s <%s> ?o. ?s ?p ?o.} LIMIT %d OFFSET %d"; + String query = String.format(baseQuery, propertyToDescribe.getName(), limit, offset); + Model newModel = executeConstructQuery(query); + Map<DatatypeProperty, Integer> result = new HashMap<DatatypeProperty, Integer>(); + while(!terminationCriteriaSatisfied() && newModel.size() != 0){ + model.add(newModel); + query = "SELECT ?p (COUNT(?s) AS ?count) WHERE {?s ?p ?o.} GROUP BY ?p"; + + DatatypeProperty prop; + Integer oldCnt; + ResultSet rs = executeSelectQuery(query, model); + QuerySolution qs; + while(rs.hasNext()){ + qs = rs.next(); + prop = new DatatypeProperty(qs.getResource("p").getURI()); + int newCnt = qs.getLiteral("count").getInt(); + oldCnt = result.get(prop); + if(oldCnt == null){ + oldCnt = Integer.valueOf(newCnt); + } + result.put(prop, oldCnt); + qs.getLiteral("count").getInt(); + } + if(!result.isEmpty()){ + currentlyBestAxioms = buildAxioms(result); + } + + + offset += limit; + query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + newModel = executeConstructQuery(query); + } + + } + + private void runSPARQL1_1_Mode() { + // get subjects with types + int limit = 1000; + int offset = 0; + String queryTemplate = "SELECT ?p COUNT(?s) AS ?count WHERE {?s ?p ?o." + + "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + "}"; String query; Map<DatatypeProperty, Integer> result = new HashMap<DatatypeProperty, Integer>(); DatatypeProperty prop; Integer oldCnt; boolean repeat = true; - - while(!terminationCriteriaSatisfied() && repeat){ - query = String.format(queryTemplate, propertyToDescribe, limit, offset); + + while (!terminationCriteriaSatisfied() && repeat) { + query = String.format(queryTemplate, propertyToDescribe, limit, + offset); ResultSet rs = executeSelectQuery(query); QuerySolution qs; repeat = false; - while(rs.hasNext()){ + while (rs.hasNext()) { qs = rs.next(); prop = new DatatypeProperty(qs.getResource("p").getURI()); int newCnt = qs.getLiteral("count").getInt(); oldCnt = result.get(prop); - if(oldCnt == null){ + if (oldCnt == null) { oldCnt = Integer.valueOf(newCnt); } result.put(prop, oldCnt); qs.getLiteral("count").getInt(); repeat = true; } - if(!result.isEmpty()){ + if (!result.isEmpty()) { currentlyBestAxioms = buildAxioms(result); offset += 1000; } - + } - - logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); + } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java 2011-12-12 15:40:07 UTC (rev 3500) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java 2011-12-13 11:57:29 UTC (rev 3501) @@ -40,12 +40,13 @@ import org.dllearner.core.owl.ObjectProperty; import org.dllearner.kb.SparqlEndpointKS; import org.dllearner.kb.sparql.SparqlEndpoint; -import org.dllearner.learningproblems.AxiomScore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; @ComponentAnn(name="equivalent objectproperty axiom learner", shortName="oplequiv", version=0.1) public class EquivalentObjectPropertyAxiomLearner extends AbstractAxiomLearningAlgorithm { @@ -77,23 +78,31 @@ SortedSet<ObjectProperty> existingSuperProperties = reasoner.getSuperProperties(propertyToDescribe); logger.debug("Existing super properties: " + existingSuperProperties); - //get subjects with types + if(ks.supportsSPARQL_1_1()){ + runSPARQL1_1_Mode(); + } else { + runSPARQL1_0_Mode(); + } + + logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); + } + + private void runSPARQL1_0_Mode() { + Model model = ModelFactory.createDefaultModel(); int limit = 1000; int offset = 0; - String queryTemplate = "SELECT ?p COUNT(?s) AS ?count WHERE {?s ?p ?o." + - "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + - "}"; - String query; + String baseQuery = "CONSTRUCT {?s ?p ?o.} WHERE {?s <%s> ?o. ?s ?p ?o.} LIMIT %d OFFSET %d"; + String query = String.format(baseQuery, propertyToDescribe.getName(), limit, offset); + Model newModel = executeConstructQuery(query); Map<ObjectProperty, Integer> result = new HashMap<ObjectProperty, Integer>(); - ObjectProperty prop; - Integer oldCnt; - boolean repeat = true; - - while(!terminationCriteriaSatisfied() && repeat){ - query = String.format(queryTemplate, propertyToDescribe, limit, offset); - ResultSet rs = executeSelectQuery(query); + while(!terminationCriteriaSatisfied() && newModel.size() != 0){ + model.add(newModel); + query = "SELECT ?p (COUNT(?s) AS ?count) WHERE {?s ?p ?o.} GROUP BY ?p"; + + ObjectProperty prop; + Integer oldCnt; + ResultSet rs = executeSelectQuery(query, model); QuerySolution qs; - repeat = false; while(rs.hasNext()){ qs = rs.next(); prop = new ObjectProperty(qs.getResource("p").getURI()); @@ -104,18 +113,58 @@ } result.put(prop, oldCnt); qs.getLiteral("count").getInt(); - repeat = true; } if(!result.isEmpty()){ currentlyBestAxioms = buildAxioms(result); - offset += 1000; } + + offset += limit; + query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + newModel = executeConstructQuery(query); } - logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); } + private void runSPARQL1_1_Mode() { + //get subjects with types + int limit = 1000; + int offset = 0; + String queryTemplate = "SELECT ?p COUNT(?s) AS ?count WHERE {?s ?p ?o." + + "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + + "}"; + String query; + Map<ObjectProperty, Integer> result = new HashMap<ObjectProperty, Integer>(); + ObjectProperty prop; + Integer oldCnt; + boolean repeat = true; + + while(!terminationCriteriaSatisfied() && repeat){ + query = String.format(queryTemplate, propertyToDescribe, limit, offset); + ResultSet rs = executeSelectQuery(query); + QuerySolution qs; + repeat = false; + while(rs.hasNext()){ + qs = rs.next(); + prop = new ObjectProperty(qs.getResource("p").getURI()); + int newCnt = qs.getLiteral("count").getInt(); + oldCnt = result.get(prop); + if(oldCnt == null){ + oldCnt = Integer.valueOf(newCnt); + } + result.put(prop, oldCnt); + qs.getLiteral("count").getInt(); + repeat = true; + } + if(!result.isEmpty()){ + currentlyBestAxioms = buildAxioms(result); + offset += 1000; + } + + } + + } + private List<EvaluatedAxiom> buildAxioms(Map<ObjectProperty, Integer> property2Count){ List<EvaluatedAxiom> axioms = new ArrayList<EvaluatedAxiom>(); Integer all = property2Count.get(propertyToDescribe); @@ -129,7 +178,7 @@ properties.add(entry.getKey()); int popularity = reasoner.getPropertyCount(entry.getKey()); int total = popularity;//Math.max(popularity, all); - int success = entry.getValue();System.out.println(entry.getKey());System.out.println(total);System.out.println(success); + int success = entry.getValue();//System.out.println(entry.getKey());System.out.println(total);System.out.println(success); Score score = computeScore(total, success); evalAxiom = new EvaluatedAxiom(new EquivalentObjectPropertiesAxiom(properties),score); axioms.add(evalAxiom); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubDataPropertyOfAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubDataPropertyOfAxiomLearner.java 2011-12-12 15:40:07 UTC (rev 3500) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubDataPropertyOfAxiomLearner.java 2011-12-13 11:57:29 UTC (rev 3501) @@ -40,6 +40,8 @@ import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; @ComponentAnn(name="data subPropertyOf axiom learner", shortName="dplsubprop", version=0.1) public class SubDataPropertyOfAxiomLearner extends AbstractAxiomLearningAlgorithm { @@ -71,23 +73,31 @@ SortedSet<DatatypeProperty> existingSuperProperties = reasoner.getSuperProperties(propertyToDescribe); logger.debug("Existing super properties: " + existingSuperProperties); - //get properties and how often they occur + if(ks.supportsSPARQL_1_1()){ + runSPARQL1_1_Mode(); + } else { + runSPARQL1_0_Mode(); + } + + logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); + } + + private void runSPARQL1_0_Mode() { + Model model = ModelFactory.createDefaultModel(); int limit = 1000; int offset = 0; - String queryTemplate = "SELECT ?p COUNT(?s) AS ?count WHERE {?p a <http://www.w3.org/2002/07/owl#DatatypeProperty>. ?s ?p ?o. " + - "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + - "}"; - String query; + String baseQuery = "CONSTRUCT {?s ?p ?o.} WHERE {?s <%s> ?o. ?s ?p ?o.} LIMIT %d OFFSET %d"; + String query = String.format(baseQuery, propertyToDescribe.getName(), limit, offset); + Model newModel = executeConstructQuery(query); Map<DatatypeProperty, Integer> result = new HashMap<DatatypeProperty, Integer>(); - DatatypeProperty prop; - Integer oldCnt; - boolean repeat = true; - - while(!terminationCriteriaSatisfied() && repeat){ - query = String.format(queryTemplate, propertyToDescribe, limit, offset); - ResultSet rs = executeSelectQuery(query); + while(!terminationCriteriaSatisfied() && newModel.size() != 0){ + model.add(newModel); + query = "SELECT ?p (COUNT(?s) AS ?count) WHERE {?s ?p ?o.} GROUP BY ?p"; + + DatatypeProperty prop; + Integer oldCnt; + ResultSet rs = executeSelectQuery(query, model); QuerySolution qs; - repeat = false; while(rs.hasNext()){ qs = rs.next(); prop = new DatatypeProperty(qs.getResource("p").getURI()); @@ -98,18 +108,58 @@ } result.put(prop, oldCnt); qs.getLiteral("count").getInt(); - repeat = true; } - if(!result.isEmpty()){ currentlyBestAxioms = buildAxioms(result); - offset += 1000; } + + + offset += limit; + query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + newModel = executeConstructQuery(query); } - logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); } + private void runSPARQL1_1_Mode() { + //get subjects with types + int limit = 1000; + int offset = 0; + String queryTemplate = "SELECT ?p COUNT(?s) AS ?count WHERE {?s ?p ?o." + + "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + + "}"; + String query; + Map<DatatypeProperty, Integer> result = new HashMap<DatatypeProperty, Integer>(); + DatatypeProperty prop; + Integer oldCnt; + boolean repeat = true; + + while(!terminationCriteriaSatisfied() && repeat){ + query = String.format(queryTemplate, propertyToDescribe, limit, offset); + ResultSet rs = executeSelectQuery(query); + QuerySolution qs; + repeat = false; + while(rs.hasNext()){ + qs = rs.next(); + prop = new DatatypeProperty(qs.getResource("p").getURI()); + int newCnt = qs.getLiteral("count").getInt(); + oldCnt = result.get(prop); + if(oldCnt == null){ + oldCnt = Integer.valueOf(newCnt); + } + result.put(prop, oldCnt); + qs.getLiteral("count").getInt(); + repeat = true; + } + if(!result.isEmpty()){ + currentlyBestAxioms = buildAxioms(result); + offset += 1000; + } + + } + + } + private List<EvaluatedAxiom> buildAxioms(Map<DatatypeProperty, Integer> property2Count){ List<EvaluatedAxiom> axioms = new ArrayList<EvaluatedAxiom>(); Integer total = property2Count.get(propertyToDescribe); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubObjectPropertyOfAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubObjectPropertyOfAxiomLearner.java 2011-12-12 15:40:07 UTC (rev 3500) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubObjectPropertyOfAxiomLearner.java 2011-12-13 11:57:29 UTC (rev 3501) @@ -40,6 +40,8 @@ import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; @ComponentAnn(name="object subPropertyOf axiom learner", shortName="oplsubprop", version=0.1) public class SubObjectPropertyOfAxiomLearner extends AbstractAxiomLearningAlgorithm { @@ -71,23 +73,31 @@ SortedSet<ObjectProperty> existingSuperProperties = reasoner.getSuperProperties(propertyToDescribe); logger.debug("Existing super properties: " + existingSuperProperties); - //get subjects with types + if(ks.supportsSPARQL_1_1()){ + runSPARQL1_1_Mode(); + } else { + runSPARQL1_0_Mode(); + } + + logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); + } + + private void runSPARQL1_0_Mode() { + Model model = ModelFactory.createDefaultModel(); int limit = 1000; int offset = 0; - String queryTemplate = "SELECT ?p COUNT(?s) AS ?count WHERE {?s ?p ?o." + - "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + - "}"; - String query; + String baseQuery = "CONSTRUCT {?s ?p ?o.} WHERE {?s <%s> ?o. ?s ?p ?o.} LIMIT %d OFFSET %d"; + String query = String.format(baseQuery, propertyToDescribe.getName(), limit, offset); + Model newModel = executeConstructQuery(query); Map<ObjectProperty, Integer> result = new HashMap<ObjectProperty, Integer>(); - ObjectProperty prop; - Integer oldCnt; - boolean repeat = true; - - while(!terminationCriteriaSatisfied() && repeat){ - query = String.format(queryTemplate, propertyToDescribe, limit, offset); - ResultSet rs = executeSelectQuery(query); + while(!terminationCriteriaSatisfied() && newModel.size() != 0){ + model.add(newModel); + query = "SELECT ?p (COUNT(?s) AS ?count) WHERE {?s ?p ?o.} GROUP BY ?p"; + + ObjectProperty prop; + Integer oldCnt; + ResultSet rs = executeSelectQuery(query, model); QuerySolution qs; - repeat = false; while(rs.hasNext()){ qs = rs.next(); prop = new ObjectProperty(qs.getResource("p").getURI()); @@ -98,17 +108,58 @@ } result.put(prop, oldCnt); qs.getLiteral("count").getInt(); - repeat = true; } if(!result.isEmpty()){ currentlyBestAxioms = buildAxioms(result); - offset += 1000; } + + + offset += limit; + query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + newModel = executeConstructQuery(query); } - logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); } + private void runSPARQL1_1_Mode() { + //get subjects with types + int limit = 1000; + int offset = 0; + String queryTemplate = "SELECT ?p COUNT(?s) AS ?count WHERE {?s ?p ?o." + + "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + + "}"; + String query; + Map<ObjectProperty, Integer> result = new HashMap<ObjectProperty, Integer>(); + ObjectProperty prop; + Integer oldCnt; + boolean repeat = true; + + while(!terminationCriteriaSatisfied() && repeat){ + query = String.format(queryTemplate, propertyToDescribe, limit, offset); + ResultSet rs = executeSelectQuery(query); + QuerySolution qs; + repeat = false; + while(rs.hasNext()){ + qs = rs.next(); + prop = new ObjectProperty(qs.getResource("p").getURI()); + int newCnt = qs.getLiteral("count").getInt(); + oldCnt = result.get(prop); + if(oldCnt == null){ + oldCnt = Integer.valueOf(newCnt); + } + result.put(prop, oldCnt); + qs.getLiteral("count").getInt(); + repeat = true; + } + if(!result.isEmpty()){ + currentlyBestAxioms = buildAxioms(result); + offset += 1000; + } + + } + + } + private List<EvaluatedAxiom> buildAxioms(Map<ObjectProperty, Integer> property2Count){ List<EvaluatedAxiom> axioms = new ArrayList<EvaluatedAxiom>(); Integer total = property2Count.get(propertyToDescribe); Modified: trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2011-12-12 15:40:07 UTC (rev 3500) +++ trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2011-12-13 11:57:29 UTC (rev 3501) @@ -191,7 +191,7 @@ queryExecution.setTimeout(maxExecutionTimeInSeconds * 1000); queryExecution.setDefaultGraphURIs(ks.getEndpoint().getDefaultGraphURIs()); queryExecution.setNamedGraphURIs(ks.getEndpoint().getNamedGraphURIs()); - System.out.println(query); + return queryExecution.execConstruct(); } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2011-12-12 15:40:07 UTC (rev 3500) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2011-12-13 11:57:29 UTC (rev 3501) @@ -595,7 +595,7 @@ public SortedSet<ObjectProperty> getInverseObjectProperties(ObjectProperty property){ SortedSet<ObjectProperty> inverseObjectProperties = new TreeSet<ObjectProperty>(); String query = "SELECT ?p WHERE {" + - "{<%p> <%ax> ?p.} UNION {?p <%ax> <%p>}}".replace("%p", property.getName()).replace("%ax", OWL.inverseOf.getURI()); + "{<%p> <%ax> ?p.} UNION {?p <%ax> <%p>}}".replace("%p", property.getName()).replace("%ax", OWL.inverseOf.getURI());System.out.println(query); ResultSet rs = executeSelectQuery(query); QuerySolution qs; while(rs.hasNext()){ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2011-12-12 15:40:16
|
Revision: 3500 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3500&view=rev Author: jenslehmann Date: 2011-12-12 15:40:07 +0000 (Mon, 12 Dec 2011) Log Message: ----------- added blacklist for global enrichment Modified Paths: -------------- trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java Modified: trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java 2011-12-12 14:37:51 UTC (rev 3499) +++ trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java 2011-12-12 15:40:07 UTC (rev 3500) @@ -31,6 +31,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.TreeSet; import javax.xml.ws.http.HTTPException; @@ -125,6 +126,9 @@ } System.out.println(endpoints.size() + " endpoints detected."); + TreeSet<String> blacklist = new TreeSet<String>(); + blacklist.add("rkb-explorer-crime"); // computation never completes + // perform enrichment on endpoints for(Entry<String,SparqlEndpoint> endpoint : endpoints.entrySet()) { // run enrichment @@ -139,6 +143,10 @@ e.maxEntitiesPerType = 3; // hack for faster testing of endpoints + if(blacklist.contains(name)) { + continue; + } + boolean success = false; // run enrichment script - we make a case distinguish to see which kind of problems we get // (could be interesting for statistics later on) This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2011-12-12 14:38:00
|
Revision: 3499 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3499&view=rev Author: lorenz_b Date: 2011-12-12 14:37:51 +0000 (Mon, 12 Dec 2011) Log Message: ----------- Removed 2 unused classes. Removed Paths: ------------- trunk/components-core/src/main/java/org/dllearner/utilities/ICFinder.java trunk/components-core/src/main/java/org/dllearner/utilities/WordnetSimilarity.java Deleted: trunk/components-core/src/main/java/org/dllearner/utilities/ICFinder.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/ICFinder.java 2011-12-12 14:36:59 UTC (rev 3498) +++ trunk/components-core/src/main/java/org/dllearner/utilities/ICFinder.java 2011-12-12 14:37:51 UTC (rev 3499) @@ -1,189 +0,0 @@ -package org.dllearner.utilities; - -import java.io.BufferedReader; -import java.io.FileReader; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Hashtable; -import java.util.Vector; - -/** - * - * @author Uthaya - * - */ -// n + v only -//David Hope, 2008, University Of Sussex - -public class ICFinder -{ - private String[] editor = null; - private String icfilename = ""; - private BufferedReader in = null; - private String line = ""; -// look up - private Hashtable<String, Double> lookup = null; // quick look up for synset counts (we require Double as Resnik counts are doubles) -// counts for nouns and verbs - private double nouns_sum = 0.0; - private double verbs_sum = 0.0; - private double nounsandverbs_sum = 0.0; // ** the ??? normaliser ??? ** for the 'getProbability' method -// <ROOTS> for nouns and verbs - private double nounroot_sum = 0.0; - private double verbroot_sum = 0.0; - private ArrayList<String> nounroots = null; - private ArrayList<String> verbroots = null; - - public ICFinder(String icfilename) - { - System.out.println("... calculating IC <roots> ..."); - System.out.println("... ICFinder"); - -// your IC file - this.icfilename = icfilename; -// quick look up table - lookup = new Hashtable<String, Double>(); -// get some useful 'constants' - nounroots = new ArrayList<String>(); - verbroots = new ArrayList<String>(); - Vector<Double> constants = setup(); - nouns_sum = constants.get(0); - verbs_sum = constants.get(1); - nounsandverbs_sum = ( nouns_sum + verbs_sum ); - nounroot_sum = constants.get(2); - verbroot_sum = constants.get(3); - } - - public double getRootSum(String pos) - { - if(pos.equalsIgnoreCase("v")) - return (verbroot_sum); - return (nounroot_sum); - } - - -// 'getFrequency': get the count for the {synset} from the IC file - private double getFrequency(String synset, String pos) - { - if(lookup.containsKey(synset + pos)) - return ( lookup.get(synset + pos) ); - return ( 0.0 ); - } - -// 'getProbability': get the probability of the {synset} - private double getProbability(String synset, String pos) - { - double freq = getFrequency(synset, pos); - if(freq == 0.0) - return ( 0.0 ); - - double probability = 0.0; - - if(pos.equalsIgnoreCase("n")) - probability = ( freq / nounroot_sum ); // Ted Pedersen et al. use the sum of the noun<root> counts *not* the sum of the noun counts - - if(pos.equalsIgnoreCase("v")) - probability = ( freq / verbroot_sum ); // Ted Pedersen et al. use the sum of the verb<root> counts *not* the sum of the verb counts - - return ( probability ); - } - - -// does all / any type of synset i.e. standard synset | <lcs> synset -// !!! we are using the notion of a 'fake'<root> as per the Perl implementation !!! -// !!! there is no option to turn the 'fake'<root> off in this implementation - it all gets a bit silly (hard to justify) if we do this !!! - public double getIC(String synset, String pos) - { - double ic = 0.0; -// Case 1. There is *no* <lcs> ............................................................................................................................................... -// If the 'synset' is empty (null Object or an empty String), - this implies that no <lcs>|synset was found for a (pair of synsets) and thus, -// they must join at an 'imaginary' <root> point in the WordNet space (tree). We call this the'fake'<root>. -// Further, *if* we are assuming a 'fake' root' (which we do; we default to it as per the Perl implementation), - this implies -// that it subsumes all other <roots>. This being the case, the 'fake'<root> must then have an Information Content(ic) value of 0 -// as it provides us with zero information - if(synset == null || synset.length() == 0) - { - return ( ic ); - } -// ....................................................................................................................................................................................... -// Case 2. There is an <lcs> but it has a frequency of zero and thus it has a probability of zero and thus is just not valid as input -// to the Information Content equation ( we will get 'Infinity') - so, we simply return 0 - double p = getProbability(synset, pos); - if(p == 0.0) - { - return ( ic ); - } - else - { - ic = -Math.log(p); - } -// ....................................................................................................................................................................................... -// Case 3. There is an <lcs>, -- it may be a <root> or it may be a boring old synset but - it does have a frequency, thus it does have -// a probability and thus we may calculate the Information Content for this synset. If the synset is a <root> and there is only 1 such -// <root> for the POS, then, effectively the Information Contente will be zero, otherwise we should get a value that is greater than zero - return ( ic ); - } - -// utility: get counts for {synsets} | just nouns | just verbs | noun'fake'<root> | verb'fake'<root> -// these are used to calculate probabilities of {synsets} and to 'back-off' to a <root> value if no LCS exists for 2 words - private Vector<Double> setup() - { - String unit = ""; - double uc = 0.0; - double nc = 0.0; - double vc = 0.0; - double nrc = 0.0; - double vrc = 0.0; - Vector<Double> counts = new Vector<Double>(); - try - { - in = new BufferedReader(new FileReader(icfilename)); - while ((line = in.readLine()) != null) - { - editor = line.split("\\s"); // IC files are space delimited - for(int i = 0; i < editor.length; i++) - { - unit = editor[i]; -// nouns - if(unit.endsWith("n")) - { - lookup.put(editor[0], Double.parseDouble(editor[1])); - uc = Double.parseDouble(editor[1]); // get the value: the 'count' for the {synset} - nc += uc;// add to noun total - if(editor.length == 3) // if ROOT - { - nrc += uc;// add to noun<root> total - // store noun <root> - nounroots.add(editor[0].substring(0,editor[0].length()-1)); - } - }else if(unit.endsWith("v")) // verbs - { - lookup.put(editor[0], Double.parseDouble(editor[1])); - uc = Double.parseDouble(editor[1]); // get the value: the 'count' for the {synset} - vc += uc; // add to verb total - if(editor.length == 3) // if ROOT - { - vrc += uc; // add to verb<root> total - // store verb<root> - verbroots.add(editor[0].substring(0,editor[0].length()-1)); - } - }/*else{ - System.err.println("Adj? "+ unit); - }*/ - } - } - in.close(); - } - catch (IOException e){e.printStackTrace();} - counts.add(nc); counts.add(vc); counts.add(nrc); counts.add(vrc); - return ( counts ); - } - - public ArrayList<String> getNounRoots() - { - return ( nounroots ); - } - public ArrayList<String> getVerbRoots() - { - return ( verbroots ); - } -} Deleted: trunk/components-core/src/main/java/org/dllearner/utilities/WordnetSimilarity.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/WordnetSimilarity.java 2011-12-12 14:36:59 UTC (rev 3498) +++ trunk/components-core/src/main/java/org/dllearner/utilities/WordnetSimilarity.java 2011-12-12 14:37:51 UTC (rev 3499) @@ -1,287 +0,0 @@ -package org.dllearner.utilities; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import net.didion.jwnl.JWNL; -import net.didion.jwnl.JWNLException; -import net.didion.jwnl.data.IndexWord; -import net.didion.jwnl.data.POS; -import net.didion.jwnl.data.PointerTarget; -import net.didion.jwnl.data.PointerType; -import net.didion.jwnl.data.Synset; -import net.didion.jwnl.data.Word; -import net.didion.jwnl.dictionary.Dictionary; - -public class WordnetSimilarity { - - public Dictionary dict; - - public WordnetSimilarity(){ - try { - JWNL.initialize(this.getClass().getClassLoader().getResourceAsStream("wordnet_properties.xml")); - dict = Dictionary.getInstance(); - } catch (JWNLException e) { - e.printStackTrace(); - } - } - - public double computeSimilarity(String s1, String s2, POS pos){ - List<String> synonyms = new ArrayList<String>(); - - try { - IndexWord iw1 = dict.getIndexWord(pos, s1); - IndexWord iw2 = dict.getIndexWord(pos, s2);//dict.getMorphologicalProcessor().lookupBaseForm(pos, s) -// IndexWord iw = dict.getMorphologicalProcessor().lookupBaseForm(pos, s); - getUpwardHierachy(s1, pos); - getUpwardHierachy(s2, pos); - - ICFinder icFinder = new ICFinder("src/main/resources/ic-semcor.dat"); - Synset synset1 = iw1.getSenses()[0]; - Synset synset2 = iw2.getSenses()[0]; - Synset lcs = getLCS(synset1, synset2, "NN", icFinder); - System.out.println(lcs); - - for(Synset synset : iw1.getSenses()){ - for(List<PointerTarget> tree : getHypernymTrees(synset, new HashSet<PointerTarget>())){ - for(PointerTarget t : tree){ - System.out.print(((Synset)t).getWords()[0].getLemma() + "-->"); - } - System.out.println(); - } - } - - - - } catch (JWNLException e) { - e.printStackTrace(); - } - - - return -1; - } - - private List<PointerTarget> getUpwardHierachy(PointerTarget target){ - List<PointerTarget> hierarchy = new ArrayList<PointerTarget>(); - try { - PointerTarget[] targets = target.getTargets(PointerType.HYPERNYM); - for (PointerTarget t : targets) { - hierarchy.add(t); - hierarchy.addAll(getUpwardHierachy(t)); - } - } catch (JWNLException e) { - e.printStackTrace(); - } - return hierarchy; - - } - -// private List<List<PointerTarget>> getUpwardHierachies(List<List<PointerTarget>> targets){ -// List<List<PointerTarget>> hierarchies = new ArrayList<List<PointerTarget>>(); -// try { -// PointerTarget[] targets = target.getTargets(PointerType.HYPERNYM); -// for (PointerTarget t : targets) { -// hierarchy.add(t); -// hierarchy.addAll(getUpwardHierachy(t)); -// } -// } catch (JWNLException e) { -// e.printStackTrace(); -// } -// return hierarchy; -// -// } - - private void getUpwardHierachy(String word, POS pos){ - try { - IndexWord iw = dict.getIndexWord(pos, word); - for(Synset synset : iw.getSenses()){ - for(PointerTarget t : getUpwardHierachy(synset)){ - System.out.print(((Synset)t).getWord(0).getLemma() + "-->"); - } - System.out.println(); - } - } catch (JWNLException e) { - e.printStackTrace(); - } - - } - - private void getHypernyms(IndexWord iw){ - try { - if(iw != null){ - Synset[] synsets = iw.getSenses(); - for(Synset s : synsets){ - System.out.println(s); - PointerTarget[] targets = s.getTargets(PointerType.HYPERNYM); - for (PointerTarget target : targets) { - Word[] words = ((Synset) target).getWords(); - for (Word word : words) { - System.out.println(word); - } - } - } - } - } catch (JWNLException e) { - e.printStackTrace(); - } - } - - public Synset getLCS(Synset synset1, Synset synset2, String pos, ICFinder icFinder) throws JWNLException - { - // synset1 - HashSet<Synset> s1 = new HashSet<Synset>(); s1.add(synset1); - HashSet<Synset> h1 = new HashSet<Synset>(); - h1 = getHypernyms(s1,h1); - // !!! important !!! we must add the original {synset} back in, as the 2 {synsets}(senses) we are comparing may be equivalent i.e. bthe same {synset}! - h1.add(synset1); - //System.out.println(">>>>>>>>>>>>>>>>>>>>>"); - // synset2 - HashSet<Synset> s2 = new HashSet<Synset>(); s2.add(synset2); - HashSet<Synset> h2 = new HashSet<Synset>(); - h2 = getHypernyms(s2,h2); - h2.add(synset2); // ??? don't really need this ??? - //System.out.println("JWNL,h1, "+toStr(synset1.getWords())+", :h2, "+toStr(synset2.getWords())+" ,=, "+h1.size()+", "+h2.size()); - // get the candidate <lcs>s i.e. the intersection of all <hypernyms> | {synsets} which subsume the 2 {synsets} - /*System.out.println("========================"); - System.out.println(h1); - System.out.println(h2); - System.out.println("========================");*/ - h1.retainAll(h2); - if(h1.isEmpty()) - { - return (null); // i.e. there is *no* <LCS> for the 2 synsets - } - - // get *a* <lcs> with the highest Information Content - double max = -Double.MAX_VALUE; - Synset maxlcs = null; - for (Synset h : h1) - { - double ic = icFinder.getIC("" + h.getOffset(), pos); // use ICfinder to get the Information Content value - if(ic > max) - { - max = ic; - maxlcs = h; - } - } - return maxlcs; // return the <synset} with *a* highest IC value - } - - - // 1.1 GET <HYPERNYMS> - private HashSet<Synset> getHypernyms(HashSet<Synset> synsets, HashSet<Synset> allhypernms) throws JWNLException - { - if(allhypernms.size()>= 100){ - return allhypernms; - } - - //System.out.println("IP: " + synsets); - HashSet<Synset> hypernyms = new HashSet<Synset>(); - for(Synset s : synsets) - { - - PointerTarget[] hyp = s.getTargets(PointerType.HYPERNYM); // get the <hypernyms> if there are any - for (PointerTarget pointerTarget : hyp) { - if (pointerTarget instanceof Synset) { - Synset poiSyn = (Synset) pointerTarget; - hypernyms.add(poiSyn); - }/*else{ - //System.out.println("PointerTarget is not instanceof Synset: "+pointerTarget); - }*/ - } - //System.out.println("\t"+hypernyms); - } - if(!hypernyms.isEmpty()) - { - if(allhypernms.size()+hypernyms.size()>= 100){ - return allhypernms; - } - try { - allhypernms.addAll(hypernyms); - } catch (StackOverflowError e) { - //System.out.println(allhypernms.size()); - //System.out.println(hypernyms.size()); - //e.printStackTrace(); - System.gc(); - System.gc(); - System.err.println(e.getMessage()); - return allhypernms; - } - allhypernms = getHypernyms(hypernyms, allhypernms); - } - //System.out.println(allhypernms); - return allhypernms; - } - - /** - * since this method is heavily used, inner cache would help for e.g. - * calculating similarity matrix - * - * Suroutine that returns an array of hypernym trees, given the offset of # - * the synset. Each hypernym tree is an array of offsets. - * - * @param synset - * @param mode - */ - public List<List<PointerTarget>> getHypernymTrees(PointerTarget synset, Set<PointerTarget> history) { - PointerTarget key = synset; - - // check if the input synset is one of the imaginary root nodes - if (synset.equals(new Synset(POS.NOUN, 0, new Word[]{new Word("ROOT", "ROOT", 0)}, null, null, null))) { - List<PointerTarget> tree = new ArrayList<PointerTarget>(); - tree.add(new Synset(POS.NOUN, 0, new Word[]{new Word("ROOT", "ROOT", 0)}, null, null, null)); - List<List<PointerTarget>> trees = new ArrayList<List<PointerTarget>>(); - trees.add(tree); - return trees; - } - - List<PointerTarget> synlinks = null; - try { - synlinks = Arrays.asList(synset.getTargets(PointerType.HYPERNYM)); - } catch (JWNLException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - List<List<PointerTarget>> returnList = new ArrayList<List<PointerTarget>>(); - if (synlinks.size() == 0) { - List<PointerTarget> tree = new ArrayList<PointerTarget>(); - tree.add(synset); - tree.add(0, new Synset(POS.NOUN, 0, new Word[]{new Word("ROOT", "ROOT", 0)}, null, null, null)); - returnList.add(tree); - } else { - for (PointerTarget hypernym : synlinks) { - if ( history.contains(hypernym) ) continue; - history.add(hypernym); - - List<List<PointerTarget>> hypernymTrees = getHypernymTrees(hypernym, history); - if ( hypernymTrees!=null ) { - for (List<PointerTarget> hypernymTree : hypernymTrees) { - hypernymTree.add(synset); - returnList.add(hypernymTree); - } - } - if (returnList.size() == 0) { - List<PointerTarget> newList = new ArrayList<PointerTarget>(); - newList.add(synset); - newList.add(0, new Synset(POS.NOUN, 0, new Word[]{new Word("ROOT", "ROOT", 0)}, null, null, null)); - returnList.add(newList); - } - } - } - - return returnList; - } - - - public static void main(String[] args) { - System.out.println(new WordnetSimilarity().computeSimilarity("writer", "teacher", POS.NOUN)); - -// ILexicalDatabase db = new NictWordNet(); -// System.out.println(new Lin(db).calcRelatednessOfWords("writer", "teacher")); - } - -} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2011-12-12 14:37:07
|
Revision: 3498 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3498&view=rev Author: lorenz_b Date: 2011-12-12 14:36:59 +0000 (Mon, 12 Dec 2011) Log Message: ----------- Updated chunk of algorithms to work with SPARQL endpoints, which not support COUNT queries. Fixed problem in hierarchy, which occurs when triples of type [<cls> <rdfs:subClassOf> <cls>] occur in the knowledge base. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/SimpleSubclassLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/AsymmetricObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalDataPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexiveObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/TransitiveObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java trunk/components-core/src/main/java/org/dllearner/core/owl/ClassHierarchy.java trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java trunk/components-core/src/main/java/org/dllearner/utilities/WordnetSimilarity.java Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/utilities/ICFinder.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java 2011-12-11 11:37:29 UTC (rev 3497) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java 2011-12-12 14:36:59 UTC (rev 3498) @@ -77,6 +77,8 @@ private boolean suggestMostGeneralClasses = true; private boolean useClassPopularity = true; + private Set<NamedClass> allClasses; + public DisjointClassesLearner(SparqlEndpointKS ks){ this.ks = ks; } @@ -115,8 +117,8 @@ //TODO //at first get all existing classes in knowledgebase - Set<NamedClass> classes = new SPARQLTasks(ks.getEndpoint()).getAllClasses(); - classes.remove(classToDescribe); + allClasses = new SPARQLTasks(ks.getEndpoint()).getAllClasses(); + allClasses.remove(classToDescribe); //get the subclasses if(reasoner.isPrepared()){ @@ -125,46 +127,97 @@ subClasses = reasoner.getSubClasses(classToDescribe, true); } + if(ks.supportsSPARQL_1_1()){ + runSPARQL1_1_Mode(); + } else { + runSPARQL1_0_Mode(); + } + //get classes and how often they occur - int limit = 1000; - int offset = 0; - String queryTemplate = "SELECT ?type COUNT(?s) AS ?count WHERE {?s a ?type." + - "{SELECT ?s WHERE {?s a <%s>.} LIMIT %d OFFSET %d}" + - "}"; - String query; - Map<NamedClass, Integer> result = new HashMap<NamedClass, Integer>(); - NamedClass cls; - Integer oldCnt; - boolean repeat = true; - while(!terminationCriteriaSatisfied() && repeat){ - query = String.format(queryTemplate, classToDescribe, limit, offset); - ResultSet rs = executeSelectQuery(query); - QuerySolution qs; - repeat = false; - while(rs.hasNext()){ - qs = rs.next(); - cls = new NamedClass(qs.getResource("type").getURI()); - int newCnt = qs.getLiteral("count").getInt(); - oldCnt = result.get(cls); - if(oldCnt == null){ - oldCnt = Integer.valueOf(newCnt); - } else { - oldCnt += newCnt; - } - - result.put(cls, oldCnt); - qs.getLiteral("count").getInt(); - repeat = true; - } - if(!result.isEmpty()){ - currentlyBestEvaluatedDescriptions = buildEvaluatedClassDescriptions(result, classes); - offset += 1000; - } - } logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); } + + private void runSPARQL1_0_Mode(){ + int limit = 1000; + int offset = 0; + String queryTemplate = "SELECT ?s ?type WHERE {?s a <%s>. ?s a ?type.} LIMIT %d OFFSET %d"; + String query; + Map<NamedClass, Integer> result = new HashMap<NamedClass, Integer>(); + NamedClass cls; + Integer oldCnt; + boolean repeat = true; + + int total = 0; + + String resource = ""; + while(!terminationCriteriaSatisfied() && repeat){ + query = String.format(queryTemplate, classToDescribe, limit, offset); + ResultSet rs = executeSelectQuery(query); + QuerySolution qs; + repeat = false; + while(rs.hasNext()){ + qs = rs.next(); + String newResource = qs.getResource("?s").getURI(); + if(newResource != resource){ + total++; + resource = newResource; + } + cls = new NamedClass(qs.getResource("type").getURI()); + oldCnt = result.get(cls); + if(oldCnt == null){ + oldCnt = Integer.valueOf(0); + } + int newCnt = oldCnt + 1; + + result.put(cls, newCnt); + repeat = true; + } + if(!result.isEmpty()){ + currentlyBestEvaluatedDescriptions = buildEvaluatedClassDescriptions(result, total); + offset += 1000; + } + } + } + + private void runSPARQL1_1_Mode(){ + int limit = 1000; + int offset = 0; + String queryTemplate = "SELECT ?type COUNT(?s) AS ?count WHERE {?s a ?type." + + "{SELECT ?s WHERE {?s a <%s>.} LIMIT %d OFFSET %d}" + + "}"; + String query; + Map<NamedClass, Integer> result = new HashMap<NamedClass, Integer>(); + NamedClass cls; + Integer oldCnt; + boolean repeat = true; + + while(!terminationCriteriaSatisfied() && repeat){ + query = String.format(queryTemplate, classToDescribe, limit, offset); + ResultSet rs = executeSelectQuery(query); + QuerySolution qs; + repeat = false; + while(rs.hasNext()){ + qs = rs.next(); + cls = new NamedClass(qs.getResource("type").getURI()); + int newCnt = qs.getLiteral("count").getInt(); + oldCnt = result.get(cls); + if(oldCnt == null){ + oldCnt = Integer.valueOf(newCnt); + } else { + oldCnt += newCnt; + } + + result.put(cls, oldCnt); + repeat = true; + } + if(!result.isEmpty()){ + currentlyBestEvaluatedDescriptions = buildEvaluatedClassDescriptions(result, allClasses); + offset += 1000; + } + } + } @Override public List<Description> getCurrentlyBestDescriptions(int nrOfDescriptions) { @@ -269,6 +322,63 @@ return evalDescs; } + private List<EvaluatedDescription> buildEvaluatedClassDescriptions(Map<NamedClass, Integer> class2Count, int total){ + List<EvaluatedDescription> evalDescs = new ArrayList<EvaluatedDescription>(); + + //Remove temporarily classToDescribe but keep track of their count + class2Count.remove(classToDescribe); + + //get complete disjoint classes + Set<NamedClass> completeDisjointclasses = new TreeSet<NamedClass>(allClasses); + completeDisjointclasses.removeAll(class2Count.keySet()); + + //drop all classes which have a super class in this set + if(suggestMostGeneralClasses && reasoner.isPrepared()){ + keepMostGeneralClasses(completeDisjointclasses); + } + + //we remove the asserted subclasses here + completeDisjointclasses.removeAll(subClasses); + for(Description subClass : subClasses){ + class2Count.remove(subClass); + } + + + EvaluatedDescription evalDesc; + //firstly, create disjoint classexpressions which not occur and give score of 1 + if(reasoner.isPrepared()){ + SortedSet<Description> mostGeneralClasses = reasoner.getClassHierarchy().getMostGeneralClasses(); + } + for(NamedClass cls : completeDisjointclasses){ + if(useClassPopularity){ + int popularity = reasoner.getIndividualsCount(cls); + //we skip classes with no instances + if(popularity == 0) continue; + double[] confidenceInterval = Heuristics.getConfidenceInterval95Wald(popularity, 0); + double accuracy = (confidenceInterval[0] + confidenceInterval[1]) / 2; + evalDesc = new EvaluatedDescription(cls, new AxiomScore(1- accuracy)); + } else { + evalDesc = new EvaluatedDescription(cls, new AxiomScore(1)); + } + + evalDescs.add(evalDesc); + } + + //secondly, create disjoint classexpressions with score 1 - (#occurence/#all) + for(Entry<NamedClass, Integer> entry : sortByValues(class2Count)){ +// evalDesc = new EvaluatedDescription(entry.getKey(), +// new AxiomScore(1 - (entry.getValue() / (double)all))); + double[] confidenceInterval = Heuristics.getConfidenceInterval95Wald(total, entry.getValue()); + double accuracy = (confidenceInterval[0] + confidenceInterval[1]) / 2; + evalDesc = new EvaluatedDescription(entry.getKey(), + new AxiomScore(1 - accuracy)); + evalDescs.add(evalDesc); + } + + class2Count.put(classToDescribe, total); + return evalDescs; + } + private void keepMostGeneralClasses(Set<NamedClass> classes){ ClassHierarchy h = reasoner.getClassHierarchy(); for(NamedClass nc : new HashSet<NamedClass>(classes)){ Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/SimpleSubclassLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/SimpleSubclassLearner.java 2011-12-11 11:37:29 UTC (rev 3497) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/SimpleSubclassLearner.java 2011-12-12 14:36:59 UTC (rev 3498) @@ -129,7 +129,6 @@ logger.info("Existing super classes: " + existingSuperClasses); } - Map<Individual, SortedSet<Description>> ind2Types = new HashMap<Individual, SortedSet<Description>>(); int limit = 1000; boolean repeat = true; @@ -142,7 +141,7 @@ logger.info("...finished in {}ms. (Got {} rows)", (System.currentTimeMillis()-startTime), fetchedRows); } - + public NamedClass getClassToDescribe() { return classToDescribe; } @@ -152,10 +151,13 @@ } private boolean addIndividualsWithTypes(Map<Individual, SortedSet<Description>> ind2Types, int limit, int offset){ -// String query = String.format("SELECT DISTINCT ?ind ?type WHERE {?ind a <%s>. ?ind a ?type} LIMIT %d OFFSET %d", classToDescribe.getName(), limit, offset); boolean notEmpty = false; - String query = String.format("SELECT DISTINCT ?ind ?type WHERE {?ind a ?type. {SELECT ?ind {?ind a <%s>} LIMIT %d OFFSET %d}}", classToDescribe.getName(), limit, offset); -// String query = String.format("SELECT DISTINCT ?ind ?type WHERE {?ind a <%s>. ?ind a ?type} LIMIT %d OFFSET %d", classToDescribe.getName(), limit, offset); + String query; + if(ks.supportsSPARQL_1_1()){ + query = String.format("SELECT DISTINCT ?ind ?type WHERE {?ind a ?type. {SELECT ?ind {?ind a <%s>} LIMIT %d OFFSET %d}}", classToDescribe.getName(), limit, offset); + } else { + query = String.format("SELECT DISTINCT ?ind ?type WHERE {?ind a <%s>. ?ind a ?type} LIMIT %d OFFSET %d", classToDescribe.getName(), limit, offset); + } ResultSet rs = executeSelectQuery(query); Individual ind; Description newType; Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/AsymmetricObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/AsymmetricObjectPropertyAxiomLearner.java 2011-12-11 11:37:29 UTC (rev 3497) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/AsymmetricObjectPropertyAxiomLearner.java 2011-12-12 14:36:59 UTC (rev 3498) @@ -21,18 +21,14 @@ import java.net.URL; import java.util.ArrayList; -import java.util.Collections; -import org.aksw.commons.collections.multimaps.BiHashMultimap; import org.dllearner.core.AbstractAxiomLearningAlgorithm; import org.dllearner.core.ComponentAnn; import org.dllearner.core.EvaluatedAxiom; import org.dllearner.core.config.ConfigOption; import org.dllearner.core.config.ObjectPropertyEditor; import org.dllearner.core.owl.AsymmetricObjectPropertyAxiom; -import org.dllearner.core.owl.Individual; import org.dllearner.core.owl.ObjectProperty; -import org.dllearner.core.owl.SymmetricObjectPropertyAxiom; import org.dllearner.kb.SparqlEndpointKS; import org.dllearner.kb.sparql.SparqlEndpoint; import org.slf4j.Logger; @@ -40,6 +36,8 @@ import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.vocabulary.OWL2; @ComponentAnn(name="asymmetric objectproperty axiom learner", shortName="oplasymm", version=0.1) @@ -49,6 +47,8 @@ @ConfigOption(name="propertyToDescribe", description="", propertyEditorClass=ObjectPropertyEditor.class) private ObjectProperty propertyToDescribe; + + private boolean declaredAsymmetric; public AsymmetricObjectPropertyAxiomLearner(SparqlEndpointKS ks){ this.ks = ks; @@ -71,7 +71,7 @@ //check if property is already declared as asymmetric in knowledge base String query = String.format("ASK {<%s> a <%s>}", propertyToDescribe, OWL2.AsymmetricProperty.getURI()); - boolean declaredAsymmetric = executeAskQuery(query); + declaredAsymmetric = executeAskQuery(query); if(declaredAsymmetric) { existingAxioms.add(new AsymmetricObjectPropertyAxiom(propertyToDescribe)); logger.info("Property is already declared as symmetric in knowledge base."); @@ -87,36 +87,42 @@ } private void runSPARQL1_0_Mode(){ - BiHashMultimap<Individual, Individual> individualsMap = new BiHashMultimap<Individual, Individual>(); - boolean repeat = true; + Model model = ModelFactory.createDefaultModel(); int limit = 1000; - while(!terminationCriteriaSatisfied() && repeat){ - String query = String.format("SELECT DISTINCT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d", propertyToDescribe.getURI().toString(), limit, fetchedRows); + int offset = 0; + String baseQuery = "CONSTRUCT {?s <%s> ?o.} WHERE {?s <%s> ?o} LIMIT %d OFFSET %d"; + String query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + Model newModel = executeConstructQuery(query); + while(newModel.size() != 0){ + model.add(newModel); + // get number of instances of s with <s p o> + query = "SELECT (COUNT(?s) AS ?total) WHERE {?s <%s> ?o.}"; + query = query.replace("%s", propertyToDescribe.getURI().toString()); ResultSet rs = executeSelectQuery(query); QuerySolution qs; - Individual s; - Individual o; - int cnt = 0; + int total = 0; while(rs.hasNext()){ qs = rs.next(); - s = new Individual(qs.getResource("s").getURI()); - o = new Individual(qs.getResource("o").getURI()); - individualsMap.put(s, o); - cnt++; + total = qs.getLiteral("total").getInt(); } - int total = individualsMap.size(); - int asymmetric = 0; + query = "SELECT (COUNT(?s) AS ?symmetric) WHERE {?s <%s> ?o. ?o <%s> ?s.}"; + query = query.replace("%s", propertyToDescribe.getURI().toString()); + rs = executeSelectQuery(query); + int symmetric = 0; + while(rs.hasNext()){ + qs = rs.next(); + symmetric = qs.getLiteral("symmetric").getInt(); + } + int asymmetric = total - symmetric; - for(java.util.Map.Entry<Individual, Individual> e : individualsMap.entries()){ - if(!individualsMap.getInverse().containsEntry(e.getKey(), e.getValue())){ - asymmetric++; - } + if(total > 0){ + currentlyBestAxioms.clear(); + currentlyBestAxioms.add(new EvaluatedAxiom(new AsymmetricObjectPropertyAxiom(propertyToDescribe), + computeScore(total, asymmetric), declaredAsymmetric)); } - - currentlyBestAxioms = Collections.singletonList(new EvaluatedAxiom(new AsymmetricObjectPropertyAxiom(propertyToDescribe), - computeScore(total, asymmetric))); - fetchedRows += limit; - repeat = (cnt == limit); + offset += limit; + query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + newModel = executeConstructQuery(query); } } @@ -142,7 +148,7 @@ if(total > 0){ currentlyBestAxioms.add(new EvaluatedAxiom(new AsymmetricObjectPropertyAxiom(propertyToDescribe), - computeScore(total, asymmetric))); + computeScore(total, asymmetric), declaredAsymmetric)); } } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java 2011-12-11 11:37:29 UTC (rev 3497) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java 2011-12-12 14:36:59 UTC (rev 3498) @@ -33,6 +33,7 @@ import org.dllearner.core.AbstractAxiomLearningAlgorithm; import org.dllearner.core.ComponentAnn; import org.dllearner.core.EvaluatedAxiom; +import org.dllearner.core.Score; import org.dllearner.core.config.ConfigOption; import org.dllearner.core.config.ObjectPropertyEditor; import org.dllearner.core.owl.EquivalentObjectPropertiesAxiom; @@ -126,8 +127,11 @@ properties = new HashSet<ObjectProperty>(); properties.add(propertyToDescribe); properties.add(entry.getKey()); - evalAxiom = new EvaluatedAxiom(new EquivalentObjectPropertiesAxiom(properties), - new AxiomScore(entry.getValue() / (double)all)); + int popularity = reasoner.getPropertyCount(entry.getKey()); + int total = popularity;//Math.max(popularity, all); + int success = entry.getValue();System.out.println(entry.getKey());System.out.println(total);System.out.println(success); + Score score = computeScore(total, success); + evalAxiom = new EvaluatedAxiom(new EquivalentObjectPropertiesAxiom(properties),score); axioms.add(evalAxiom); } @@ -138,7 +142,7 @@ public static void main(String[] args) throws Exception{ EquivalentObjectPropertyAxiomLearner l = new EquivalentObjectPropertyAxiomLearner(new SparqlEndpointKS(new SparqlEndpoint( new URL("http://dbpedia.aksw.org:8902/sparql"), Collections.singletonList("http://dbpedia.org"), Collections.<String>emptyList())));//.getEndpointDBpediaLiveAKSW())); - l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/country")); + l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/thirdDriverCountry")); l.setMaxExecutionTimeInSeconds(10); l.init(); l.start(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalDataPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalDataPropertyAxiomLearner.java 2011-12-11 11:37:29 UTC (rev 3497) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalDataPropertyAxiomLearner.java 2011-12-12 14:36:59 UTC (rev 3498) @@ -34,6 +34,8 @@ import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.vocabulary.OWL; @ComponentAnn(name="functional dataproperty axiom learner", shortName="dplfunc", version=0.1) @@ -43,6 +45,8 @@ @ConfigOption(name="propertyToDescribe", description="", propertyEditorClass=DataPropertyEditor.class) private DatatypeProperty propertyToDescribe; + + private boolean declaredAsFunctional; public FunctionalDataPropertyAxiomLearner(SparqlEndpointKS ks){ this.ks = ks; @@ -65,35 +69,90 @@ //check if property is already declared as functional in knowledge base String query = String.format("ASK {<%s> a <%s>}", propertyToDescribe, OWL.FunctionalProperty.getURI()); - boolean declaredAsFunctional = executeAskQuery(query); + declaredAsFunctional = executeAskQuery(query); if(declaredAsFunctional) { existingAxioms.add(new FunctionalDatatypePropertyAxiom(propertyToDescribe)); logger.info("Property is already declared as functional in knowledge base."); } - //get number of instances of s with <s p o> - query = String.format("SELECT (COUNT(DISTINCT ?s) AS ?all) WHERE {?s <%s> ?o.}", propertyToDescribe.getName()); + if(ks.supportsSPARQL_1_1()){ + runSPARQL1_1_Mode(); + } else { + runSPARQL1_0_Mode(); + } + + logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); + } + + private void runSPARQL1_0_Mode() { + Model model = ModelFactory.createDefaultModel(); + int limit = 1000; + int offset = 0; + String baseQuery = "CONSTRUCT {?s <%s> ?o.} WHERE {?s <%s> ?o} LIMIT %d OFFSET %d"; + String query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + Model newModel = executeConstructQuery(query); + while(newModel.size() != 0){ + model.add(newModel); + // get number of instances of s with <s p o> + query = String.format( + "SELECT (COUNT(DISTINCT ?s) AS ?all) WHERE {?s <%s> ?o.}", + propertyToDescribe.getName()); + ResultSet rs = executeSelectQuery(query); + QuerySolution qs; + int all = 1; + while (rs.hasNext()) { + qs = rs.next(); + all = qs.getLiteral("all").getInt(); + } + // get number of instances of s with <s p o> <s p o1> where o != o1 + query = "SELECT (COUNT(DISTINCT ?s) AS ?notfunctional) WHERE {?s <%s> ?o. ?s <%s> ?o1. FILTER(?o != ?o1) }"; + query = query.replace("%s", propertyToDescribe.getURI().toString()); + rs = executeSelectQuery(query); + int notFunctional = 1; + while (rs.hasNext()) { + qs = rs.next(); + notFunctional = qs.getLiteral("notfunctional").getInt(); + } + if (all > 0) { + currentlyBestAxioms.clear(); + currentlyBestAxioms.add(new EvaluatedAxiom( + new FunctionalDatatypePropertyAxiom(propertyToDescribe), + computeScore(all, all - notFunctional), + declaredAsFunctional)); + } + + offset += limit; + query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + newModel = executeConstructQuery(query); + } + } + + private void runSPARQL1_1_Mode() { + // get number of instances of s with <s p o> + String query = String.format( + "SELECT (COUNT(DISTINCT ?s) AS ?all) WHERE {?s <%s> ?o.}", + propertyToDescribe.getName()); ResultSet rs = executeSelectQuery(query); QuerySolution qs; int all = 1; - while(rs.hasNext()){ + while (rs.hasNext()) { qs = rs.next(); all = qs.getLiteral("all").getInt(); } - //get number of instances of s with <s p o> <s p o1> where o != o1 + // get number of instances of s with <s p o> <s p o1> where o != o1 query = "SELECT (COUNT(DISTINCT ?s) AS ?notfunctional) WHERE {?s <%s> ?o. ?s <%s> ?o1. FILTER(?o != ?o1) }"; query = query.replace("%s", propertyToDescribe.getURI().toString()); rs = executeSelectQuery(query); int notFunctional = 1; - while(rs.hasNext()){ + while (rs.hasNext()) { qs = rs.next(); notFunctional = qs.getLiteral("notfunctional").getInt(); } - if(all > 0){ - currentlyBestAxioms.add(new EvaluatedAxiom(new FunctionalDatatypePropertyAxiom(propertyToDescribe), - computeScore(all, all - notFunctional), declaredAsFunctional)); + if (all > 0) { + currentlyBestAxioms.add(new EvaluatedAxiom( + new FunctionalDatatypePropertyAxiom(propertyToDescribe), + computeScore(all, all - notFunctional), + declaredAsFunctional)); } - - logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); } } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalObjectPropertyAxiomLearner.java 2011-12-11 11:37:29 UTC (rev 3497) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalObjectPropertyAxiomLearner.java 2011-12-12 14:36:59 UTC (rev 3498) @@ -35,6 +35,8 @@ import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.vocabulary.OWL; @ComponentAnn(name="functional objectproperty axiom learner", shortName="oplfunc", version=0.1) @@ -44,6 +46,8 @@ @ConfigOption(name="propertyToDescribe", description="", propertyEditorClass=ObjectPropertyEditor.class) private ObjectProperty propertyToDescribe; + + private boolean declaredAsFunctional; public FunctionalObjectPropertyAxiomLearner(SparqlEndpointKS ks){ this.ks = ks; @@ -66,40 +70,94 @@ //check if property is already declared as symmetric in knowledge base String query = String.format("ASK {<%s> a <%s>}", propertyToDescribe, OWL.FunctionalProperty.getURI()); - boolean declaredAsFunctional = executeAskQuery(query); + declaredAsFunctional = executeAskQuery(query); if(declaredAsFunctional) { existingAxioms.add(new FunctionalObjectPropertyAxiom(propertyToDescribe)); logger.info("Property is already declared as functional in knowledge base."); } - //get number of instances of s with <s p o> - query = String.format("SELECT (COUNT(DISTINCT ?s) AS ?all) WHERE {?s <%s> ?o.}", propertyToDescribe.getName()); + if(ks.supportsSPARQL_1_1()){ + runSPARQL1_1_Mode(); + } else { + runSPARQL1_0_Mode(); + } + + logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); + } + + private void runSPARQL1_0_Mode() { + Model model = ModelFactory.createDefaultModel(); + int limit = 1000; + int offset = 0; + String baseQuery = "CONSTRUCT {?s <%s> ?o.} WHERE {?s <%s> ?o} LIMIT %d OFFSET %d"; + String query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + Model newModel = executeConstructQuery(query); + while(newModel.size() != 0){ + model.add(newModel); + // get number of instances of s with <s p o> + query = String.format( + "SELECT (COUNT(DISTINCT ?s) AS ?all) WHERE {?s <%s> ?o.}", + propertyToDescribe.getName()); + ResultSet rs = executeSelectQuery(query, model); + QuerySolution qs; + int all = 1; + while (rs.hasNext()) { + qs = rs.next(); + all = qs.getLiteral("all").getInt(); + } + // get number of instances of s with <s p o> <s p o1> where o != o1 + query = "SELECT (COUNT(DISTINCT ?s) AS ?notfunctional) WHERE {?s <%s> ?o. ?s <%s> ?o1. FILTER(?o != ?o1) }"; + query = query.replace("%s", propertyToDescribe.getURI().toString()); + rs = executeSelectQuery(query, model); + int notFunctional = 1; + while (rs.hasNext()) { + qs = rs.next(); + notFunctional = qs.getLiteral("notfunctional").getInt(); + } + if (all > 0) { + currentlyBestAxioms.clear(); + currentlyBestAxioms.add(new EvaluatedAxiom( + new FunctionalObjectPropertyAxiom(propertyToDescribe), + computeScore(all, all - notFunctional), + declaredAsFunctional)); + } + offset += limit; + query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + newModel = executeConstructQuery(query); + } + } + + private void runSPARQL1_1_Mode() { + // get number of instances of s with <s p o> + String query = String.format( + "SELECT (COUNT(DISTINCT ?s) AS ?all) WHERE {?s <%s> ?o.}", + propertyToDescribe.getName()); ResultSet rs = executeSelectQuery(query); QuerySolution qs; int all = 1; - while(rs.hasNext()){ + while (rs.hasNext()) { qs = rs.next(); all = qs.getLiteral("all").getInt(); } - //get number of instances of s with <s p o> <s p o1> where o != o1 + // get number of instances of s with <s p o> <s p o1> where o != o1 query = "SELECT (COUNT(DISTINCT ?s) AS ?notfunctional) WHERE {?s <%s> ?o. ?s <%s> ?o1. FILTER(?o != ?o1) }"; query = query.replace("%s", propertyToDescribe.getURI().toString()); rs = executeSelectQuery(query); int notFunctional = 1; - while(rs.hasNext()){ + while (rs.hasNext()) { qs = rs.next(); notFunctional = qs.getLiteral("notfunctional").getInt(); } - if(all > 0){ - currentlyBestAxioms.add(new EvaluatedAxiom(new FunctionalObjectPropertyAxiom(propertyToDescribe), - computeScore(all, all - notFunctional), declaredAsFunctional)); + if (all > 0) { + currentlyBestAxioms.add(new EvaluatedAxiom( + new FunctionalObjectPropertyAxiom(propertyToDescribe), + computeScore(all, all - notFunctional), + declaredAsFunctional)); } - - logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); } public static void main(String[] args) throws Exception{ - FunctionalObjectPropertyAxiomLearner l = new FunctionalObjectPropertyAxiomLearner(new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveAKSW())); + FunctionalObjectPropertyAxiomLearner l = new FunctionalObjectPropertyAxiomLearner(new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpedia())); l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/league")); l.setMaxExecutionTimeInSeconds(10); l.init(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java 2011-12-11 11:37:29 UTC (rev 3497) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java 2011-12-12 14:36:59 UTC (rev 3498) @@ -35,6 +35,8 @@ import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.vocabulary.OWL; @ComponentAnn(name="inversefunctional objectproperty axiom learner", shortName="oplinvfunc", version=0.1) @@ -44,6 +46,8 @@ @ConfigOption(name="propertyToDescribe", description="", propertyEditorClass=ObjectPropertyEditor.class) private ObjectProperty propertyToDescribe; + + private boolean declaredAsInverseFunctional; public InverseFunctionalObjectPropertyAxiomLearner(SparqlEndpointKS ks){ this.ks = ks; @@ -66,36 +70,99 @@ //check if property is already declared as symmetric in knowledge base String query = String.format("ASK {<%s> a <%s>}", propertyToDescribe, OWL.InverseFunctionalProperty.getURI()); - boolean declaredAsInverseFunctional = executeAskQuery(query); + declaredAsInverseFunctional = executeAskQuery(query); if(declaredAsInverseFunctional) { existingAxioms.add(new InverseFunctionalObjectPropertyAxiom(propertyToDescribe)); logger.info("Property is already declared as functional in knowledge base."); } - //get number of instances of s with <s p o> - query = String.format("SELECT (COUNT(DISTINCT ?o) AS ?all) WHERE {?s <%s> ?o.}", propertyToDescribe.getName()); + if(ks.supportsSPARQL_1_1()){ + runSPARQL1_1_Mode(); + } else { + runSPARQL1_0_Mode(); + } + + + + logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); + } + + private void runSPARQL1_0_Mode() { + Model model = ModelFactory.createDefaultModel(); + int limit = 1000; + int offset = 0; + String baseQuery = "CONSTRUCT {?s <%s> ?o.} WHERE {?s <%s> ?o} LIMIT %d OFFSET %d"; + String query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + Model newModel = executeConstructQuery(query); + while(newModel.size() != 0){ + model.add(newModel); + // get number of instances of s with <s p o> + query = String.format( + "SELECT (COUNT(DISTINCT ?o) AS ?all) WHERE {?s <%s> ?o.}", + propertyToDescribe.getName()); + ResultSet rs = executeSelectQuery(query); + QuerySolution qs; + int all = 1; + while (rs.hasNext()) { + qs = rs.next(); + all = qs.getLiteral("all").getInt(); + } + // get number of instances of s with <s p o> <s p o1> where o != o1 + query = "SELECT (COUNT(DISTINCT ?s1) AS ?noninversefunctional) WHERE {?s1 <%s> ?o. ?s2 <%s> ?o. FILTER(?s1 != ?s2) }"; + query = query.replace("%s", propertyToDescribe.getURI().toString()); + rs = executeSelectQuery(query); + int notInverseFunctional = 1; + while (rs.hasNext()) { + qs = rs.next(); + notInverseFunctional = qs.getLiteral("noninversefunctional") + .getInt(); + } + if (all > 0) { + currentlyBestAxioms.clear(); + currentlyBestAxioms + .add(new EvaluatedAxiom( + new InverseFunctionalObjectPropertyAxiom( + propertyToDescribe), computeScore(all, all + - notInverseFunctional), + declaredAsInverseFunctional)); + } + + offset += limit; + query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + newModel = executeConstructQuery(query); + } + } + + private void runSPARQL1_1_Mode() { + // get number of instances of s with <s p o> + String query = String.format( + "SELECT (COUNT(DISTINCT ?o) AS ?all) WHERE {?s <%s> ?o.}", + propertyToDescribe.getName()); ResultSet rs = executeSelectQuery(query); QuerySolution qs; int all = 1; - while(rs.hasNext()){ + while (rs.hasNext()) { qs = rs.next(); all = qs.getLiteral("all").getInt(); } - //get number of instances of s with <s p o> <s p o1> where o != o1 + // get number of instances of s with <s p o> <s p o1> where o != o1 query = "SELECT (COUNT(DISTINCT ?s1) AS ?noninversefunctional) WHERE {?s1 <%s> ?o. ?s2 <%s> ?o. FILTER(?s1 != ?s2) }"; query = query.replace("%s", propertyToDescribe.getURI().toString()); rs = executeSelectQuery(query); int notInverseFunctional = 1; - while(rs.hasNext()){ + while (rs.hasNext()) { qs = rs.next(); - notInverseFunctional = qs.getLiteral("noninversefunctional").getInt(); + notInverseFunctional = qs.getLiteral("noninversefunctional") + .getInt(); } - if(all > 0){ - currentlyBestAxioms.add(new EvaluatedAxiom(new InverseFunctionalObjectPropertyAxiom(propertyToDescribe), - computeScore(all, all - notInverseFunctional), declaredAsInverseFunctional)); + if (all > 0) { + currentlyBestAxioms + .add(new EvaluatedAxiom( + new InverseFunctionalObjectPropertyAxiom( + propertyToDescribe), computeScore(all, all + - notInverseFunctional), + declaredAsInverseFunctional)); } - - logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); } public static void main(String[] args) throws Exception{ Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseObjectPropertyAxiomLearner.java 2011-12-11 11:37:29 UTC (rev 3497) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseObjectPropertyAxiomLearner.java 2011-12-12 14:36:59 UTC (rev 3498) @@ -21,22 +21,15 @@ import java.net.URL; import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Map.Entry; import java.util.SortedSet; -import org.aksw.commons.collections.multimaps.BiHashMultimap; import org.dllearner.core.AbstractAxiomLearningAlgorithm; import org.dllearner.core.ComponentAnn; import org.dllearner.core.EvaluatedAxiom; import org.dllearner.core.config.ConfigOption; import org.dllearner.core.config.ObjectPropertyEditor; -import org.dllearner.core.owl.Individual; import org.dllearner.core.owl.InverseObjectPropertyAxiom; import org.dllearner.core.owl.ObjectProperty; -import org.dllearner.core.owl.SymmetricObjectPropertyAxiom; import org.dllearner.kb.SparqlEndpointKS; import org.dllearner.kb.sparql.SparqlEndpoint; import org.dllearner.reasoning.SPARQLReasoner; @@ -45,6 +38,8 @@ import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; @ComponentAnn(name="inverse objectproperty domain axiom learner", shortName="oplinv", version=0.1) public class InverseObjectPropertyAxiomLearner extends AbstractAxiomLearningAlgorithm { @@ -91,35 +86,36 @@ } private void runSPARQL1_0_Mode(){ - Map<ObjectProperty, Integer> prop2CountMap = new HashMap<ObjectProperty, Integer>(); - boolean repeat = true; + Model model = ModelFactory.createDefaultModel(); int limit = 1000; - int total = 0; - while(!terminationCriteriaSatisfied() && repeat){ - String query = String.format("SELECT ?s ?p WHERE {?s <%s> ?o. OPTIONAL{?o ?p ?s.}} LIMIT %d OFFSET %d", propertyToDescribe.getName(), limit, fetchedRows); + int offset = 0; + String baseQuery = "CONSTRUCT {?s <%s> ?o.} WHERE {?s <%s> ?o} LIMIT %d OFFSET %d"; + String query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + Model newModel = executeConstructQuery(query); + while(newModel.size() != 0){ + model.add(newModel); + // get number of instances of s with <s p o> + query = "SELECT (COUNT(?s) AS ?total) WHERE {?s <%s> ?o.}"; + query = query.replace("%s", propertyToDescribe.getURI().toString()); ResultSet rs = executeSelectQuery(query); QuerySolution qs; - ObjectProperty p; - int cnt = 0; + int total = 0; while(rs.hasNext()){ qs = rs.next(); - if(qs.getResource("p") != null){ - p = new ObjectProperty(qs.getResource("p").getURI()); - Integer oldCnt = prop2CountMap.get(p); - if(oldCnt == null){ - oldCnt = Integer.valueOf(0); - } - prop2CountMap.put(p, Integer.valueOf(oldCnt + 1)); - } - cnt++; + total = qs.getLiteral("total").getInt(); } - total += cnt; - for(Entry<ObjectProperty, Integer> entry : prop2CountMap.entrySet()){ - currentlyBestAxioms = Collections.singletonList(new EvaluatedAxiom(new InverseObjectPropertyAxiom(entry.getKey(), propertyToDescribe), - computeScore(total, entry.getValue()))); + + query = String.format("SELECT ?p (COUNT(?s) AS ?cnt) WHERE {?s <%s> ?o. ?o ?p ?s.} GROUP BY ?p", propertyToDescribe.getName()); + rs = executeSelectQuery(query); + while(rs.hasNext()){ + qs = rs.next(); + currentlyBestAxioms.add(new EvaluatedAxiom( + new InverseObjectPropertyAxiom(new ObjectProperty(qs.getResource("p").getURI()), propertyToDescribe), + computeScore(total, qs.getLiteral("cnt").getInt()))); } - fetchedRows += limit; - repeat = (cnt == limit); + offset += limit; + query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + newModel = executeConstructQuery(query); } } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java 2011-12-11 11:37:29 UTC (rev 3497) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java 2011-12-12 14:36:59 UTC (rev 3498) @@ -19,7 +19,6 @@ package org.dllearner.algorithms.properties; -import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; @@ -37,6 +36,8 @@ import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.vocabulary.OWL2; @ComponentAnn(name="irreflexive objectproperty axiom learner", shortName="oplirrefl", version=0.1) @@ -46,6 +47,8 @@ @ConfigOption(name="propertyToDescribe", description="", propertyEditorClass=ObjectPropertyEditor.class) private ObjectProperty propertyToDescribe; + + private boolean declaredAsIrreflexive; public IrreflexiveObjectPropertyAxiomLearner(SparqlEndpointKS ks){ this.ks = ks; @@ -68,39 +71,97 @@ //check if property is already declared as irreflexive in knowledge base String query = String.format("ASK {<%s> a <%s>}", propertyToDescribe, OWL2.IrreflexiveProperty.getURI()); - boolean declaredAsIrreflexive = executeAskQuery(query); + declaredAsIrreflexive = executeAskQuery(query); if(declaredAsIrreflexive) { existingAxioms.add(new IrreflexiveObjectPropertyAxiom(propertyToDescribe)); logger.info("Property is already declared as irreflexive in knowledge base."); } + + if(ks.supportsSPARQL_1_1()){ + runSPARQL1_1_Mode(); + } else { + runSPARQL1_0_Mode(); + } - //get all instance s with <s p o> - query = String.format("SELECT (COUNT(DISTINCT ?s) AS ?all) WHERE {?s <%s> ?o.}", propertyToDescribe); + + + logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); + } + + private void runSPARQL1_0_Mode() { + Model model = ModelFactory.createDefaultModel(); + int limit = 1000; + int offset = 0; + String baseQuery = "CONSTRUCT {?s <%s> ?o.} WHERE {?s <%s> ?o} LIMIT %d OFFSET %d"; + String query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + Model newModel = executeConstructQuery(query); + while(newModel.size() != 0){ + model.add(newModel); + // get all instance s with <s p o> + query = String.format( + "SELECT (COUNT(DISTINCT ?s) AS ?all) WHERE {?s <%s> ?o.}", + propertyToDescribe); + ResultSet rs = executeSelectQuery(query); + QuerySolution qs; + int all = 0; + while (rs.hasNext()) { + qs = rs.next(); + all = qs.getLiteral("all").getInt(); + + } + + // get number of instances s where not exists <s p s> + query = "SELECT (COUNT(DISTINCT ?s) AS ?irreflexive) WHERE {?s <%s> ?o. FILTER(?s != ?o)}"; + query = query.replace("%s", propertyToDescribe.getURI().toString()); + rs = executeSelectQuery(query); + int irreflexive = 0; + while (rs.hasNext()) { + qs = rs.next(); + irreflexive = qs.getLiteral("irreflexive").getInt(); + } + + if (all > 0) { + currentlyBestAxioms.clear(); + currentlyBestAxioms.add(new EvaluatedAxiom( + new IrreflexiveObjectPropertyAxiom(propertyToDescribe), + computeScore(all, irreflexive), declaredAsIrreflexive)); + } + + offset += limit; + query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + newModel = executeConstructQuery(query); + } + } + + private void runSPARQL1_1_Mode() { + // get all instance s with <s p o> + String query = String.format( + "SELECT (COUNT(DISTINCT ?s) AS ?all) WHERE {?s <%s> ?o.}", + propertyToDescribe); ResultSet rs = executeSelectQuery(query); QuerySolution qs; int all = 0; - while(rs.hasNext()){ + while (rs.hasNext()) { qs = rs.next(); all = qs.getLiteral("all").getInt(); - + } - - //get number of instances s where not exists <s p s> + + // get number of instances s where not exists <s p s> query = "SELECT (COUNT(DISTINCT ?s) AS ?irreflexive) WHERE {?s <%s> ?o. FILTER(?s != ?o)}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); rs = executeSelectQuery(query); int irreflexive = 0; - while(rs.hasNext()){ + while (rs.hasNext()) { qs = rs.next(); irreflexive = qs.getLiteral("irreflexive").getInt(); } - - if(all > 0){ - currentlyBestAxioms.add(new EvaluatedAxiom(new IrreflexiveObjectPropertyAxiom(propertyToDescribe), + + if (all > 0) { + currentlyBestAxioms.add(new EvaluatedAxiom( + new IrreflexiveObjectPropertyAxiom(propertyToDescribe), computeScore(all, irreflexive), declaredAsIrreflexive)); } - - logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); } public static void main(String[] args) throws Exception { Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexiveObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexiveObjectPropertyAxiomLearner.java 2011-12-11 11:37:29 UTC (rev 3497) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexiveObjectPropertyAxiomLearner.java 2011-12-12 14:36:59 UTC (rev 3498) @@ -35,6 +35,8 @@ import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.vocabulary.OWL2; @ComponentAnn(name="reflexive objectproperty axiom learner", shortName="oplrefl", version=0.1) @@ -44,6 +46,8 @@ @ConfigOption(name="propertyToDescribe", description="", propertyEditorClass=ObjectPropertyEditor.class) private ObjectProperty propertyToDescribe; + + private boolean declaredAsReflexive; public ReflexiveObjectPropertyAxiomLearner(SparqlEndpointKS ks){ this.ks = ks; @@ -66,19 +70,72 @@ //check if property is already declared as reflexive in knowledge base String query = String.format("ASK {<%s> a <%s>}", propertyToDescribe, OWL2.ReflexiveProperty.getURI()); - boolean declaredAsReflexive = executeAskQuery(query); + declaredAsReflexive = executeAskQuery(query); if(declaredAsReflexive) { existingAxioms.add(new ReflexiveObjectPropertyAxiom(propertyToDescribe)); logger.info("Property is already declared as reflexive in knowledge base."); } - //get fraction of instances s with <s p s> - query = "SELECT (COUNT(?s) AS ?total) WHERE {?s <%s> ?o.}"; + if(ks.supportsSPARQL_1_1()){ + runSPARQL1_1_Mode(); + } else { + runSPARQL1_0_Mode(); + } + + + + logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); + } + + private void runSPARQL1_0_Mode() { + Model model = ModelFactory.createDefaultModel(); + int limit = 1000; + int offset = 0; + String baseQuery = "CONSTRUCT {?s <%s> ?o.} WHERE {?s <%s> ?o} LIMIT %d OFFSET %d"; + String query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + Model newModel = executeConstructQuery(query); + while(newModel.size() != 0){ + model.add(newModel); + // get fraction of instances s with <s p s> + query = "SELECT (COUNT(?s) AS ?total) WHERE {?s <%s> ?o.}"; + query = query.replace("%s", propertyToDescribe.getURI().toString()); + ResultSet rs = executeSelectQuery(query); + QuerySolution qs; + int total = 0; + while (rs.hasNext()) { + qs = rs.next(); + total = qs.getLiteral("total").getInt(); + } + query = "SELECT (COUNT(?s) AS ?reflexive) WHERE {?s <%s> ?s.}"; + query = query.replace("%s", propertyToDescribe.getURI().toString()); + rs = executeSelectQuery(query); + int reflexive = 0; + while (rs.hasNext()) { + qs = rs.next(); + reflexive = qs.getLiteral("reflexive").getInt(); + + } + if (total > 0) { + currentlyBestAxioms.clear(); + currentlyBestAxioms.add(new EvaluatedAxiom( + new ReflexiveObjectPropertyAxiom(propertyToDescribe), + computeScore(total, reflexive), declaredAsReflexive)); + } + + offset += limit; + query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + newModel = executeConstructQuery(query); + } + } + + private void runSPARQL1_1_Mode() { + // get fraction of instances s with <s p s> + String query = "SELECT (COUNT(?s) AS ?total) WHERE {?s <%s> ?o.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); ResultSet rs = executeSelectQuery(query); QuerySolution qs; int total = 0; - while(rs.hasNext()){ + while (rs.hasNext()) { qs = rs.next(); total = qs.getLiteral("total").getInt(); } @@ -86,17 +143,16 @@ query = query.replace("%s", propertyToDescribe.getURI().toString()); rs = executeSelectQuery(query); int reflexive = 0; - while(rs.hasNext()){ + while (rs.hasNext()) { qs = rs.next(); reflexive = qs.getLiteral("reflexive").getInt(); - + } - if(total > 0){ - currentlyBestAxioms.add(new EvaluatedAxiom(new ReflexiveObjectPropertyAxiom(propertyToDescribe), + if (total > 0) { + currentlyBestAxioms.add(new EvaluatedAxiom( + new ReflexiveObjectPropertyAxiom(propertyToDescribe), computeScore(total, reflexive), declaredAsReflexive)); } - - logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); } public static void main(String[] args) throws Exception{ Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricObjectPropertyAxiomLearner.java 2011-12-11 11:37:29 UTC (rev 3497) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricObjectPropertyAxiomLearner.java 2011-12-12 14:36:59 UTC (rev 3498) @@ -21,15 +21,12 @@ import java.net.URL; import java.util.ArrayList; -import java.util.Collections; -import org.aksw.commons.collections.multimaps.BiHashMultimap; import org.dllearner.core.AbstractAxiomLearningAlgorithm; import org.dllearner.core.ComponentAnn; import org.dllearner.core.EvaluatedAxiom; import org.dllearner.core.config.ConfigOption; import org.dllearner.core.config.ObjectPropertyEditor; -import org.dllearner.core.owl.Individual; import org.dllearner.core.owl.ObjectProperty; import org.dllearner.core.owl.SymmetricObjectPropertyAxiom; import org.dllearner.kb.SparqlEndpointKS; @@ -39,6 +36,8 @@ import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.vocabulary.OWL2; @ComponentAnn(name="symmetric objectproperty axiom learner", shortName="oplsymm", version=0.1) @@ -48,6 +47,8 @@ @ConfigOption(name="propertyToDescribe", description="", propertyEditorClass=ObjectPropertyEditor.class) private ObjectProperty propertyToDescribe; + + private boolean declaredAsSymmetric; public SymmetricObjectPropertyAxiomLearner(SparqlEndpointKS ks){ this.ks = ks; @@ -70,7 +71,7 @@ //check if property is already declared as symmetric in knowledge base String query = String.format("ASK {<%s> a <%s>}", propertyToDescribe, OWL2.SymmetricProperty.getURI()); - boolean declaredAsSymmetric = executeAskQuery(query); + declaredAsSymmetric = executeAskQuery(query); if(declaredAsSymmetric) { existingAxioms.add(new SymmetricObjectPropertyAxiom(propertyToDescribe)); logger.info("Property is already declared as symmetric in knowledge base."); @@ -86,36 +87,42 @@ } private void runSPARQL1_0_Mode(){ - BiHashMultimap<Individual, Individual> individualsMap = new BiHashMultimap<Individual, Individual>(); - boolean repeat = true; + Model model = ModelFactory.createDefaultModel(); int limit = 1000; - while(!terminationCriteriaSatisfied() && repeat){ - String query = String.format("SELECT DISTINCT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d", propertyToDescribe.getURI().toString(), limit, fetchedRows); + int offset = 0; + String baseQuery = "CONSTRUCT {?s <%s> ?o.} WHERE {?s <%s> ?o} LIMIT %d OFFSET %d"; + String query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + Model newModel = executeConstructQuery(query); + while(newModel.size() != 0){ + model.add(newModel); + // get number of instances of s with <s p o> + query = "SELECT (COUNT(?s) AS ?total) WHERE {?s <%s> ?o.}"; + query = query.replace("%s", propertyToDescribe.getURI().toString()); ResultSet rs = executeSelectQuery(query); QuerySolution qs; - Individual s; - Individual o; - int cnt = 0; + int total = 0; while(rs.hasNext()){ qs = rs.next(); - s = new Individual(qs.getResource("s").getURI()); - o = new Individual(qs.getResource("o").getURI()); - individualsMap.put(s, o); - cnt++; + total = qs.getLiteral("total").getInt(); } - int total = individualsMap.size(); + query = "SELECT (COUNT(?s) AS ?symmetric) WHERE {?s <%s> ?o. ?o <%s> ?s}"; + query = query.replace("%s", propertyToDescribe.getURI().toString()); + rs = executeSelectQuery(query); int symmetric = 0; + while(rs.hasNext()){ + qs = rs.next(); + symmetric = qs.getLiteral("symmetric").getInt(); + } - for(java.util.Map.Entry<Individual, Individual> e : individualsMap.entries()){ - if(individualsMap.getInverse().containsEntry(e.getKey(), e.getValue())){ - symmetric++; - } + + if(total > 0){ + currentlyBestAxioms.clear(); + currentlyBestAxioms.add(new EvaluatedAxiom(new SymmetricObjectPropertyAxiom(propertyToDescribe), + computeScore(total, symmetric), declaredAsSymmetric)); } - - currentlyBestAxioms = Collections.singletonList(new EvaluatedAxiom(new SymmetricObjectPropertyAxiom(propertyToDescribe), - computeScore(total, symmetric))); - fetchedRows += limit; - repeat = (cnt == limit); + offset += limit; + query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + newModel = executeConstructQuery(query); } } @@ -141,7 +148,7 @@ if(total > 0){ currentlyBestAxioms.add(new EvaluatedAxiom(new SymmetricObjectPropertyAxiom(propertyToDescribe), - computeScore(total, symmetric))); + computeScore(total, symmetric), declaredAsSymmetric)); } } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/TransitiveObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/TransitiveObjectPropertyAxiomLearner.java 2011-12-11 11:37:29 UTC (rev 3497) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/TransitiveObjectPropertyAxiomLearner.java 2011-12-12 14:36:59 UTC (rev 3498) @@ -20,7 +20,6 @@ package org.dllearner.algorithms.properties; import java.util.ArrayList; -import java.util.Collections; import org.dllearner.core.AbstractAxiomLearningAlgorithm; import org.dllearner.core.ComponentAnn; @@ -36,6 +35,8 @@ import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.vocabulary.OWL; @ComponentAnn(name="transitive objectproperty axiom learner", shortName="opltrans", version=0.1) @@ -45,6 +46,8 @@ @ConfigOption(name="propertyToDescribe", description="", propertyEditorClass=ObjectPropertyEditor.class) private ObjectProperty propertyToDescribe; + + private boolean declaredAsTransitive; public TransitiveObjectPropertyAxiomLearner(SparqlEndpointKS ks){ this.ks = ks; @@ -67,13 +70,12 @@ //check if property is already declared as transitive in knowledge base String query = String.format("ASK {<%s> a <%s>}", propertyToDescribe, OWL.TransitiveProperty.getURI()); - boolean declaredAsTransitive = executeAskQuery(query); + declaredAsTransitive = executeAskQuery(query); if(declaredAsTransitive) { existingAxioms.add(new TransitiveObjectPropertyAxiom(propertyToDescribe)); logger.info("Property is already declared as transitive in knowledge base."); } - if(ks.supportsSPARQL_1_1()){ runSPARQL1_1_Mode(); } else { @@ -85,7 +87,42 @@ private void runSPARQL1_0_Mode(){ - currentlyBestAxioms = Collections.emptyList(); + Model model = ModelFactory.createDefaultModel(); + int limit = 1000; + int offset = 0; + String baseQuery = "CONSTRUCT {?s <%s> ?o.} WHERE {?s <%s> ?o} LIMIT %d OFFSET %d"; + String query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + Model newModel = executeConstructQuery(query); + while(newModel.size() != 0){ + model.add(newModel); + // get number of instances of s with <s p o> + query = "SELECT (COUNT(?o) AS ?total) WHERE {?s <%s> ?o. ?o <%s> ?o1.}"; + query = query.replace("%s", propertyToDescribe.getURI().toString()); + ResultSet rs = executeSelectQuery(query); + QuerySolution qs; + int total = 0; + while(rs.hasNext()){ + qs = rs.next(); + total = qs.getLiteral("total").getInt(); + } + query = "SELECT (COUNT(?o) AS ?transitive) WHERE {?s <%s> ?o. ?o <%s> ?o1. ?s <%s> ?o1.}"; + query = query.replace("%s", propertyToDescribe.getURI().toString()); + rs = executeSelectQuery(query); + int transitive = 0; + while(rs.hasNext()){ + qs = rs.next(); + transitive = qs.getLiteral("transitive").getInt(); + } + + if(total > 0){ + currentlyBestAxioms.clear(); + currentlyBestAxioms.add(new EvaluatedAxiom(new TransitiveObjectPropertyAxiom(propertyToDescribe), + computeScore(total, transitive), declaredAsTransitive)); + } + offset += limit; + query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + newModel = executeConstructQuery(query); + } } private void runSPARQL1_1_Mode(){ @@ -109,7 +146,7 @@ if(total > 0){ currentlyBestAxioms.add(new EvaluatedAxiom(new TransitiveObjectPropertyAxiom(propertyToDescribe), - computeScore(total, transitive))); + computeScore(total, transitive), declaredAsTransitive)); } } Modified: trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2011-12-11 11:37:29 UTC (rev 3497) +++ trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2011-12-12 14:36:59 UTC (rev 3498) @@ -28,7 +28,6 @@ import java.util.SortedSet; import java.util.TreeSet; -import org.aksw.commons.jena.CollectionResultSet; import org.dllearner.core.config.BooleanEditor; import org.dllearner.core.config.ConfigOption; import org.dllearner.core.config.IntegerEditor; @@ -45,8 +44,10 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import com.hp.hpl.jena.query.QuerySolution; +import com.hp.hpl.jena.query.QueryExecution; +import com.hp.hpl.jena.query.QueryExecutionFactory; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; /** @@ -183,6 +184,17 @@ return returnList; } + protected Model executeConstructQuery(String query) { + logger.info("Sending query\n{} ...", query); + queryExecution = new ExtendedQueryEngineHTTP(ks.getEndpoint().getURL().toString(), + query); + queryExecution.setTimeout(maxExecutionTimeInSeconds * 1000); + queryExecution.setDefaultGraphURIs(ks.getEndpoint().getDefaultGraphURIs()); + queryExecution.setNamedGraphURIs(ks.getEndpoint().getNamedGraphURIs()); + System.out.println(query); + return queryExecution.execConstruct(); + } + protected ResultSet executeSelectQuery(String query) { logger.info("Sending query\n{} ...", query); queryExecution = new ExtendedQueryEngineHTTP(ks.getEndpoint().getURL().toString(), @@ -203,6 +215,15 @@ return resultSet; } + protected ResultSet executeSelectQuery(String query, Model model) { + logger.info("Sending query\n{} ...", query); + QueryExecution qexec = QueryExecutionFactory.create(query, model); + ResultSet rs = qexec.execSelect();; + + + return rs; + } + protected v... [truncated message content] |
From: <seb...@us...> - 2011-12-11 11:37:37
|
Revision: 3497 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3497&view=rev Author: sebastianwtr Date: 2011-12-11 11:37:29 +0000 (Sun, 11 Dec 2011) Log Message: ----------- [tbsl expolration] found error in Sqlite function Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/mySQLDictionary.java Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java 2011-12-11 09:24:58 UTC (rev 3496) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java 2011-12-11 11:37:29 UTC (rev 3497) @@ -31,6 +31,9 @@ * @throws IOException */ private HashMap<String,String> sendServerPropertyRequest(String vergleich, String side) throws IOException{ + + System.out.println("Resource die gesucht wird: "+ vergleich); + System.out.println("Seite die gesucht wird: "+side); /* * * For the second Iteration, I can just add the sparql property here. @@ -53,12 +56,13 @@ //System.out.println("property right!!! : " +tmp_right); String tmp_right="http://greententacle.techfak.uni-bielefeld.de:5171/sparql?default-graph-uri=&query="+createServerRequest("PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT DISTINCT ?s ?p WHERE {<"+vergleichorig+"> ?p ?y. ?p rdfs:label ?s.}")+"%0D%0A&format=text%2Fhtml&debug=on&timeout="; - String verarbeitungsstring=null; - if(side.contains("RIGHT")) verarbeitungsstring=tmp_right; - if(side.contains("LEFT")) verarbeitungsstring=tmp_left; + String verarbeitungsurl=null; + if(side.contains("RIGHT")) verarbeitungsurl=tmp_right; + if(side.contains("LEFT")) verarbeitungsurl=tmp_left; + System.out.println(verarbeitungsurl); //just in case..... - if(!side.contains("LEFT") && !side.contains("RIGHT")) verarbeitungsstring=tmp_left; + if(!side.contains("LEFT") && !side.contains("RIGHT")) verarbeitungsurl=tmp_left; //String verarbeitungsstring="http://greententacle.techfak.uni-bielefeld.de:5171/sparql?default-graph-uri=&query=PREFIX+rdfs%3A+%3Chttp%3A%2F%2Fwww.w3.org%2F2000%2F01%2Frdf-schema%23%3E%0D%0APREFIX+res%3A+%3Chttp%3A%2F%2Fdbpedia.org%2Fresource%2F%3E%0D%0A%0D%0ASELECT+DISTINCT+%3Fp+%3Fl+WHERE++{%0D%0A+{+res%3A"+vergleich+"+%3Fp+%3Fo+.+}%0D%0A+UNION%0D%0A+{+%3Fs+%3Fp+res%3A"+vergleich+"+.+}%0D%0A+{+%3Fp+rdfs%3Alabel+%3Fl+.+}%0D%0A}%0D%0A&format=text%2Fhtml&debug=on&timeout="; URL url; @@ -69,7 +73,7 @@ String result=""; try { - url = new URL(verarbeitungsstring); + url = new URL(verarbeitungsurl); is = url.openStream(); isr = new InputStreamReader(is); r = new BufferedReader(isr); Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java 2011-12-11 09:24:58 UTC (rev 3496) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java 2011-12-11 11:37:29 UTC (rev 3497) @@ -47,29 +47,31 @@ //global Variable dict //start counting with 0 - static int explorationdepthwordnet=1; + static int explorationdepthwordnet=2; static int iterationdepth =0; static int numberofanswers=1; - static double LvenstheinMin = 0.95; + static double LevenstheinMin = 0.8; static WordNet wordnet; BasicTemplator btemplator; Templator templator; -/* private static HashMap<String, String> hm = new HashMap<String, String>(); - private static HashMap<String, String> hm_new = new HashMap<String, String>();*/ private static mySQLDictionary myindex; - //Konstruktor public SparqlObject() throws MalformedURLException, ClassNotFoundException, SQLException{ wordnet = new WordNet(); System.out.println("Loading SPARQL Templator"); + // btemplator = new BasicTemplator(); + //wenn ich das nehme, dann gebe ich dem parser den ideal.... + //btemplator.UNTAGGED_INPUT = false; templator = new Templator(); System.out.println("Loading SPARQL Templator Done\n"); System.out.println("Start Indexing"); myindex = new mySQLDictionary(); System.out.println("Done:Indexing"); + + //normaly 1 setExplorationdepthwordnet(1); //eigentlich immer mit 0 initialisieren setIterationdepth(1); @@ -155,7 +157,7 @@ new InputStreamReader( new FileInputStream( "/tmp/testresult.txt" ) ) ); while( null != (s = in.readLine()) ) { - tmp=tmp+"\n"+s; + tmp+="\n"+s; } } catch( FileNotFoundException ex ) { } catch( Exception ex ) { @@ -198,7 +200,7 @@ new InputStreamReader( new FileInputStream( "/tmp/answer.txt" ) ) ); while( null != (s = in.readLine()) ) { - tmp=tmp+"\n"+s; + tmp+="\n"+s; } } catch( FileNotFoundException ex ) { } catch( Exception ex ) { @@ -221,6 +223,9 @@ * ################################################################################################# */ //Iterration 1 + /* + * Only Levensthein!!! + */ if(getIterationdepth()==1&&startIterating==true){ /* //4, because of query + three conditions for the simple case @@ -240,7 +245,7 @@ */ //Iterration 2 /* - * Only Levensthein!!! + * Only Wordnet!!! */ if(getIterationdepth()==2&&startIterating==true){ if(querylist.size()==4)final_answer=simpleWordnetIteration(querylist, query); @@ -260,7 +265,7 @@ new FileInputStream( "/tmp/answer" ) ) ); String s; while( null != (s = in.readLine()) ) { - tmp=tmp+"\n"+s; + tmp+="\n"+s; } } catch( FileNotFoundException ex ) { } catch( Exception ex ) { @@ -277,7 +282,7 @@ String out=""; for(String answer : final_answer){ //only answered question - if(!answer.contains("Error in searching Wordnet with word") && !answer.contains("EmtyAnswer")&& !answer.contains("Error in getting Properties"))out=out+ "\n"+answer+"\n"; + //if(!answer.contains("Error in searching Wordnet with word") && !answer.contains("EmtyAnswer")&& !answer.contains("Error in getting Properties"))out=out+ "\n"+answer+"\n"; /* //only questions with wordnet error @@ -289,8 +294,8 @@ //only questions with Error in Properties // if(answer.contains("Error in getting Properties"))out=out+ "\n"+answer+"\n"; + out+= "\n"+answer+"\n"; - } System.out.println(question); System.out.println(out); @@ -305,9 +310,8 @@ private ArrayList<String> simpleLevinstheinIteration(ArrayList<String> querylist, String query) throws SQLException, JWNLException { - //asking server - String answer; - ArrayList<String> final_answer=new ArrayList<String>(); + + ArrayList<String> final_answer=new ArrayList<String>(); String resource=""; String property_to_compare_with=""; String sideOfProperty="LEFT"; @@ -334,54 +338,53 @@ } System.out.println("Property to compare:: "+ property_to_compare_with); System.out.println("Resource: "+ resource); - //contains uri AND string, every second is the string + + HashMap<String,String> properties = new HashMap<String, String>(); GetRessourcePropertys property = new GetRessourcePropertys(); Boolean goOnAfterProperty = true; + + System.out.println("URI from Resource "+ resource +": "+getUriFromIndex(resource.toLowerCase(),0)); + + //gets Propertys left or right from the resource! try { - /* - * TODO: Have to check now, if we need a right Property or a left one - */ properties=property.getPropertys(getUriFromIndex(resource.toLowerCase(),0),sideOfProperty); if (properties==null){ - //final_answer.add("Error in getting Properties\n"); final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); goOnAfterProperty=false; } - //System.out.println(properties); + + System.out.println("Properties from Resource "+resource+": "+properties); + } catch (IOException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); goOnAfterProperty=false; } if(goOnAfterProperty==true){ - //property_to_compare_with mit der Liste der propertys vergleichen, und wenn der normalisierte Wert >= LvenstheinMin ist, einbauen und neue query erzeugen. ArrayList<String> new_queries= new ArrayList<String>(); + //iterate over properties for (Entry<String, String> entry : properties.entrySet()) { - String key = entry.getKey(); - String value = entry.getValue(); - double tmp=Levenshtein.computeLevenshteinDistance(property_to_compare_with.toLowerCase(), key); + String key = entry.getKey(); + String value = entry.getValue(); + //compare property gotten from the resource with the property from the original query double nld=Levenshtein.nld(property_to_compare_with.toLowerCase(), key); - /* - * TODO: Implement Normalised levensthein - */ - //if(tmp<=3.0){ - if(nld>=LvenstheinMin){ - //alte property uri mit neuer ersetzen: - String query_tmp=query; - String test = getUriFromIndex(property_to_compare_with.toLowerCase(),1); - //query_tmp=query_tmp.replace(test,properties.get(i-1)); - query_tmp=query_tmp.replace(test,value); - new_queries.add(query_tmp); + //check if nld is greater than Levensthein + if(nld>=LevenstheinMin){ + //if its so, replace old uri with the new one + String querynew=query; + String replacement = getUriFromIndex(property_to_compare_with.toLowerCase(),1); + querynew=querynew.replace(replacement,value); + System.out.println("Simple Levensthein Query: "+ querynew); + new_queries.add(querynew); } } - + + //iterate over all Queries and get answer from Server for(String anfrage : new_queries){ String answer_tmp; answer_tmp=sendServerQuestionRequest(anfrage); @@ -396,15 +399,8 @@ private ArrayList<String> complexeLevinstheinIteration(ArrayList<String> querylist, String query) throws SQLException, JWNLException { - - /* - * in this case we need a inner and outer loop, because the properties are different from the resource or up side down - * Only for questions with two sets of conditions(s p o . s p o) - */ - - //asking server - String answer; - ArrayList<String> final_answer=new ArrayList<String>(); + + ArrayList<String> final_answer=new ArrayList<String>(); String resourceOne=""; String property_to_compare_withOne=""; String resourceTwo=""; @@ -416,6 +412,8 @@ int tmpcounter=0; for(String s : querylist){ //we dont need the first one, because thats the query itself + + //for condition One tmpcounter=tmpcounter+1; if(tmpcounter>=1&&tmpcounter<=4){ if(s.contains("LEFT")){ @@ -431,6 +429,8 @@ } } + + //for condition Two if(tmpcounter>4){ if(s.contains("LEFT")){ sideOfPropertyTwo="LEFT"; @@ -450,19 +450,19 @@ HashMap<String,String> propertiesTwo = new HashMap<String, String>(); GetRessourcePropertys property = new GetRessourcePropertys(); Boolean goOnAfterProperty = true; + + //Get Properties for Resource in condition One and Two from Server try { propertiesOne=property.getPropertys(getUriFromIndex(resourceOne.toLowerCase(),0),sideOfPropertyOne); propertiesTwo=property.getPropertys(getUriFromIndex(resourceTwo.toLowerCase(),0),sideOfPropertyTwo); + if (propertiesOne==null){ - //final_answer.add("Error in getting Properties\n"); - final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); goOnAfterProperty=false; } - //System.out.println(properties); + } catch (IOException e) { - //e.printStackTrace(); final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); goOnAfterProperty=false; @@ -471,55 +471,51 @@ if(goOnAfterProperty==true){ - //property_to_compare_with mit der Liste der propertys vergleichen, und wenn der normalisierte Wert >= LvenstheinMin ist, einbauen und neue query erzeugen. + ArrayList<String> new_queries= new ArrayList<String>(); + //Iterate over property from resource one for (Entry<String, String> entryOne : propertiesOne.entrySet()) { String queryOne=query; String keyOne = entryOne.getKey(); String valueOne = entryOne.getValue(); - //double levnstheinDistanzeOne=Levenshtein.computeLevenshteinDistance(property_to_compare_withOne.toLowerCase(), keyOne); - //if(levnstheinDistanzeOne<=3.0){ + + double levnstheinDistanzeOne=Levenshtein.nld(property_to_compare_withOne.toLowerCase(), keyOne); - - /* - * TODO: Implement Normalised levensthein - */ - //if(tmp<=3.0){ - if(levnstheinDistanzeOne>=LvenstheinMin){ + + /*if distance is higher or equals LevenstheinMin, replace old uri with new uri + * and use that new query, for the property of the second resource + */ + if(levnstheinDistanzeOne>=LevenstheinMin){ String test = getUriFromIndex(property_to_compare_withOne.toLowerCase(),1); queryOne=queryOne.replace(test,valueOne); } + /* + * Iterate now over the second set of properties, but this time not using the original query in which + * to replace the old uri with the new one, but using queryOne from the first step. + */ for (Entry<String, String> entryTwo : propertiesTwo.entrySet()) { String keyTwo = entryTwo.getKey(); String valueTwo = entryTwo.getValue(); - // double levnstheinDistanzeTwo=Levenshtein.computeLevenshteinDistance(property_to_compare_withTwo.toLowerCase(), keyTwo); - - /* - * TODO: Implement Normalised levensthein - */ - //if(levnstheinDistanzeTwo<=3.0){ - double levnstheinDistanzeTwo=Levenshtein.nld(property_to_compare_withTwo.toLowerCase(), keyTwo); + + //again calculate the nld with the property from the second condition and the property from the propertyset + double levnstheinDistanzeTwo=Levenshtein.nld(property_to_compare_withTwo.toLowerCase(), keyTwo); - /* - * TODO: Implement Normalised levensthein - */ - //if(tmp<=3.0){ - if(levnstheinDistanzeTwo>=0.9){ - //alte property uri mit neuer ersetzen: - String queryTwo=queryOne; - String test = getUriFromIndex(property_to_compare_withTwo.toLowerCase(),1); - queryTwo=queryTwo.replace(test,valueTwo); - new_queries.add(queryTwo); - } - + if(levnstheinDistanzeTwo>LevenstheinMin){ + String queryTwo=queryOne; + String replacement = getUriFromIndex(property_to_compare_withTwo.toLowerCase(),1); + queryTwo=queryTwo.replace(replacement,valueTwo); + System.out.println("Complex Levensthein Query: "+ queryTwo); + new_queries.add(queryTwo); + } + } } - + //iterate over all Queries and get answer from Server for(String anfrage : new_queries){ String answer_tmp; answer_tmp=sendServerQuestionRequest(anfrage); @@ -534,16 +530,9 @@ private ArrayList<String> simpleWordnetIteration(ArrayList<String> querylist, String query) throws SQLException, JWNLException { - //asking server - String answer; ArrayList<String> final_answer=new ArrayList<String>(); - /* - * First try the original query on the server. If that doesnt work, try it with Iteration - */ - answer=sendServerQuestionRequest(query); - - if(answer.contains("EmtyAnswer")){ + System.out.println("In simpleWordnetIteration"); String resource=""; String property_to_compare_with=""; @@ -571,28 +560,27 @@ } System.out.println("Property to compare:: "+ property_to_compare_with); System.out.println("Resource: "+ resource); - //contains uri AND string, every second is the string + + HashMap<String,String> properties = new HashMap<String, String>(); GetRessourcePropertys property = new GetRessourcePropertys(); Boolean goOnAfterProperty = true; try { properties=property.getPropertys(getUriFromIndex(resource.toLowerCase(),0),sideOfProperty); if (properties==null){ - //final_answer.add("Error in getting Properties\n"); - + final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); goOnAfterProperty=false; } - //System.out.println(properties); + } catch (IOException e) { - //e.printStackTrace(); final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); goOnAfterProperty=false; } if(goOnAfterProperty==true){ - //property_to_compare_with mit der Liste der propertys vergleichen, und wenn der normalisierte Wert >= LvenstheinMin ist, einbauen und neue query erzeugen. + ArrayList<String> new_queries= new ArrayList<String>(); System.out.println("Start Iterating Wordnet with "+property_to_compare_with+" and deept of "+explorationdepthwordnet); @@ -600,6 +588,7 @@ ArrayList<String> tmp_semantics=new ArrayList<String>(); ArrayList<String> result_SemanticsMatchProperties=new ArrayList<String>(); semantics.add(property_to_compare_with); + System.out.println("Semantics: "+ semantics); //first check, if there is a singular form in the wordnet dictionary.. eg children -> child String _temp_=myindex.getWordnetHelp(property_to_compare_with); @@ -611,11 +600,21 @@ semantics.add(_temp_); tmp_semantics=semantics; } + + System.out.println("tmp_semantics: "+ tmp_semantics); Boolean goOnAfterWordnet = true; + + + System.out.println("##########################"); + System.out.println("properties for "+getUriFromIndex(resource.toLowerCase(),0)+": "+properties); + System.out.println("Property to compare with: "+property_to_compare_with); + System.out.println("Semantics: "+semantics); + System.out.println("##########################"); for(int i=0;i<=explorationdepthwordnet;i++){ try { tmp_semantics=getSemantics(tmp_semantics); + System.out.println("tmp_semantics in Iteration: "+ tmp_semantics); if (tmp_semantics==null){ goOnAfterWordnet=false; final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); @@ -629,8 +628,7 @@ } } catch (IOException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); + goOnAfterWordnet=false; final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); @@ -646,19 +644,23 @@ for(String b : semantics){ if(key.contains(b.toLowerCase())){ + System.out.println("Hey, Iam in too!!!!!!!!!!!"); + //to check, if no property is used twice... if(!result_SemanticsMatchProperties.contains(key)){ //create new query - result_SemanticsMatchProperties.add(key); + System.out.println("Hey, Iam in!!!!!!!!!!!"); + result_SemanticsMatchProperties.add(key); String query_tmp=query; String test = getUriFromIndex(property_to_compare_with.toLowerCase(),1); query_tmp=query_tmp.replace(test,value); - System.out.println("New query after wordnet: "+ query_tmp); + System.out.println("Simple Wordnet Query: "+ query_tmp); new_queries.add(query_tmp); } } } } + //iterate over all Queries and get answer from Server for(String bla : new_queries){ String answer_tmp; answer_tmp=sendServerQuestionRequest(bla); @@ -667,25 +669,16 @@ } } } - } + return final_answer; } private ArrayList<String> complexWordnetIteration(ArrayList<String> querylist, String query) throws SQLException, JWNLException { - //asking server - String answer; ArrayList<String> final_answer=new ArrayList<String>(); - /* - * First try the original query on the server. If that doesnt work, try it with Iteration - */ - answer=sendServerQuestionRequest(query); - - if(answer.contains("EmtyAnswer")){ - String resourceOne=""; String property_to_compare_withOne=""; String resourceTwo=""; @@ -698,6 +691,7 @@ for(String s : querylist){ //we dont need the first one, because thats the query itself tmpcounter=tmpcounter+1; + //get resource and property from the first condtion if(tmpcounter>=1&&tmpcounter<=4){ if(s.contains("LEFT")){ sideOfPropertyOne="LEFT"; @@ -712,6 +706,7 @@ } } + //get resource and property from the second condtion if(tmpcounter>4){ if(s.contains("LEFT")){ sideOfPropertyTwo="LEFT"; @@ -729,24 +724,24 @@ } System.out.println("Property to compare:: "+ property_to_compare_withOne); System.out.println("Resource: "+ resourceOne); + HashMap<String,String> propertiesOne = new HashMap<String, String>(); HashMap<String,String> propertiesTwo = new HashMap<String, String>(); GetRessourcePropertys property = new GetRessourcePropertys(); Boolean goOnAfterProperty = true; + + //gets the properties for both conditions try { propertiesOne=property.getPropertys(getUriFromIndex(resourceOne.toLowerCase(),0),sideOfPropertyOne); propertiesTwo=property.getPropertys(getUriFromIndex(resourceTwo.toLowerCase(),0),sideOfPropertyTwo); if (propertiesOne==null){ - //final_answer.add("Error in getting Properties\n"); final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); goOnAfterProperty=false; } - //System.out.println(properties); + } catch (IOException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - + final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); goOnAfterProperty=false; @@ -756,7 +751,7 @@ /* * #################################### Semantics One############################################# */ - //property_to_compare_with mit der Liste der propertys vergleichen, und wenn der normalisierte Wert >= LvenstheinMin ist, einbauen und neue query erzeugen. + ArrayList<String> new_queries= new ArrayList<String>(); //System.out.println("Start Iterating Wordnet with "+property_to_compare_withOne+" and deept of "+explorationdepthwordnet); @@ -775,6 +770,8 @@ semanticsOne.add(_temp_One); tmp_semanticsOne=semanticsOne; } + + //get the "semantics" from wordnet. Iterate as long as the explorationdepthwordnet is reached Boolean goOnAfterWordnet = true; for(int i=0;i<=explorationdepthwordnet;i++){ @@ -793,8 +790,7 @@ } } catch (IOException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); + goOnAfterWordnet=false; final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semanticsOne+" \n End"); @@ -823,6 +819,7 @@ tmp_semanticsTwo=semanticsTwo; } + //get the "semantics" from wordnet. Iterate as long as the explorationdepthwordnet is reached for(int i=0;i<=explorationdepthwordnet;i++){ try { @@ -840,8 +837,7 @@ } } catch (IOException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); + goOnAfterWordnet=false; final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semanticsTwo+" \n End"); @@ -853,6 +849,8 @@ if(goOnAfterWordnet==true){ + + //start iterating over the propery sets for (Entry<String, String> entryOne : propertiesOne.entrySet()) { String keyOne = entryOne.getKey(); String valueOne = entryOne.getValue(); @@ -878,7 +876,7 @@ String queryTwo=queryOne; String replacementTwo = getUriFromIndex(property_to_compare_withTwo.toLowerCase(),1); queryTwo=queryTwo.replace(replacementTwo,valueTwo); - System.out.println("New query after wordnet: "+ queryTwo); + System.out.println("Complexe Wordnet Query: "+ queryTwo); new_queries.add(queryTwo); } } @@ -895,7 +893,7 @@ - + //iterate over all Queries and get answer from Server for(String bla : new_queries){ String answer_tmp; answer_tmp=sendServerQuestionRequest(bla); @@ -904,7 +902,6 @@ } } } - } return final_answer; } @@ -1033,8 +1030,8 @@ - /*System.out.println("Conditions: " + conditions); - System.out.println("Conditions_new: " + conditions_new);*/ + System.out.println("Conditions: " + conditions); + System.out.println("Conditions_new: " + conditions_new); String query_upside_down = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> "+temp.getQt().toString()+" "+selTerms+" WHERE {"+ conditions_new.replace("--","") +filters+ "}"; @@ -1163,14 +1160,33 @@ */ private String getUriFromIndex(String string, int fall) throws SQLException{ String result=null; + String tmp1=null; + String tmp2 = null; //just to be sure its only 0 or 1 if(fall!=0 && fall!=1) fall=0; if(fall==0){ - result=myindex.getResourceURI(string.toLowerCase()); + + //first try: take always the ontology if existing and not the Resource + tmp1=myindex.getResourceURI(string.toLowerCase()); + tmp2=myindex.getontologyClassURI(string.toLowerCase()); + System.out.println("URI from resource: "+tmp1); + System.out.println("URI from ontologyClass: "+tmp2); + + + System.out.println("value from http://dbpedia.org/resource/WikiLeaks : "+ myindex.getResourceURI("http://dbpedia.org/resource/WikiLeaks")); + System.out.println("value from author : "+ myindex.getResourceURI("author")); + if(tmp1!=null && tmp2!=null) result=tmp2; + if(tmp1!=null && tmp2==null) result=tmp1; + if(tmp1==null && tmp2!=null) result=tmp2; + //result=myindex.getResourceURI(string.toLowerCase()); if(result==null)result=myindex.getPropertyURI(string.toLowerCase()); } if(fall==1){ - result=myindex.getPropertyURI(string.toLowerCase()); + tmp1=myindex.getPropertyURI(string.toLowerCase()); + tmp2=myindex.getontologyURI(string.toLowerCase()); + if(tmp1!=null && tmp2!=null) result=tmp2; + if(tmp1!=null && tmp2==null) result=tmp1; + if(tmp1==null && tmp2!=null) result=tmp2; if(result==null){ result=myindex.getResourceURI(string.toLowerCase()); if(result!=null) result=result.replace("resource", "property"); @@ -1184,10 +1200,11 @@ if(result==null) { if(fall==1)return "http://dbpedia.org/property/"+tmp; if(fall==0)return "http://dbpedia.org/resource/"+tmp; - else{ + else{ + System.out.println("return result: "+result); return result; - } } + } else return result; } @@ -1198,22 +1215,62 @@ * TODO: if for example title,name,label is given, replace , and get for each thing the semantics * */ - private static ArrayList<String> getSemantics (ArrayList<String> semantics) throws IOException, JWNLException { + private static ArrayList<String> getSemantics (ArrayList<String> semanticsOrig) throws IOException, JWNLException { ArrayList<String> result = new ArrayList<String>(); - //result.clear(); - //try{ - try{ + + //System.out.println("in function get Semantics!"); + + ArrayList<String> semantics = new ArrayList<String>(); + semantics=semanticsOrig; + /*//check out, if in the semantics are still terms, with _ or , + //if so, split on _ and , and add them to the semantic list + for(String id :semanticsOrig){ + if(id.contains("_")){ + System.out.println("in _"); + String[] tmp=id.split("_"); + for(String i: tmp) if(!semantics.contains(i))semantics.add(i); + + //and also add a term without _ + if(!semantics.contains(id.replace("_"," ")))semantics.add(id.replace("_"," ")); + //remove old id + //semantics.remove(id); + } + if(id.contains(",")){ + System.out.println("in ,"); + String[] tmp=id.split(","); + for(String i: tmp) if(!semantics.contains(i))semantics.add(i); + //semantics.remove(id); + } + }*/ + for(String id :semantics){ + //System.out.println("in String id : semantics"); + //System.out.println("ID :"+id); + + //add id also to the result, if its not already in there + if(!result.contains(id))result.add(id); List<String> array_relatedNouns=null; List<String> array_bestsynonyms=null; - List<String> array_siterterms=null; - //array.clear(); - System.out.println("Wordnet Word: "+id); + + System.out.println("Wordnet Word: "+id); + try{ array_relatedNouns =wordnet.getRelatedNouns(id); + } + catch(Exception e){ + //array_relatedNouns.clear(); + } + System.out.println("array_relatedNouns: "+ array_relatedNouns); + //System.out.println("after relatedNouns"); + try{ array_bestsynonyms=wordnet.getBestSynonyms(POS.NOUN, id); + System.out.println("array_bestsynonyms: "+ array_bestsynonyms); + } + catch(Exception e){ + // + } + - array_siterterms=wordnet.getSisterTerms(POS.NOUN, id); if(array_relatedNouns!=null){ for(String i:array_relatedNouns){ @@ -1225,20 +1282,11 @@ if(!result.contains(i))result.add(i); } } - if(array_siterterms!=null){ - for(String i:array_siterterms){ - if(!result.contains(i))result.add(i); - } - } } - } - catch(Exception e){ - if(result.isEmpty()) return null; - } - + if(!result.isEmpty()) return result; else{ //System.out.println("Didnt find ") @@ -1248,20 +1296,33 @@ */ try{ for(String id :semantics){ + //System.out.println("in String id : semantics TWO"); String[] tmp_array=id.split(" "); + //System.out.println("ID TWO:"+id); if(tmp_array.length>=2){ - for(String tmp : tmp_array){ + for(String advanced_id : tmp_array){ List<String> array_relatedNouns=null; List<String> array_bestsynonyms=null; - List<String> array_siterterms=null; - //array.clear(); - //System.out.println("Wordnet Word: "+tmp); - array_relatedNouns =wordnet.getRelatedNouns(tmp); - - array_bestsynonyms=wordnet.getBestSynonyms(POS.NOUN, tmp); + //add id also to the result, if its not already in there + if(!result.contains(advanced_id))result.add(advanced_id); + + try{ + array_relatedNouns =wordnet.getRelatedNouns(advanced_id); + } + catch(Exception e){ + //array_relatedNouns.clear(); + } + System.out.println("array_relatedNouns: "+ array_relatedNouns); + //System.out.println("after relatedNouns"); + + try{ + array_bestsynonyms=wordnet.getBestSynonyms(POS.NOUN, advanced_id); + System.out.println("array_bestsynonyms: "+ array_bestsynonyms); + } + catch(Exception e){ + // + } - array_siterterms=wordnet.getSisterTerms(POS.NOUN, tmp); - if(array_relatedNouns!=null){ for(String i:array_relatedNouns){ if(!result.contains(i))result.add(i); @@ -1272,11 +1333,7 @@ if(!result.contains(i))result.add(i); } } - if(array_siterterms!=null){ - for(String i:array_siterterms){ - if(!result.contains(i))result.add(i); - } - } + } } @@ -1284,7 +1341,7 @@ } } catch(Exception e){ - if(result.isEmpty()) return null; + if(result.isEmpty()) return semanticsOrig; } } Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/mySQLDictionary.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/mySQLDictionary.java 2011-12-11 09:24:58 UTC (rev 3496) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/mySQLDictionary.java 2011-12-11 11:37:29 UTC (rev 3497) @@ -22,6 +22,8 @@ createIndexPropertys(); createIndexResource(); createWordnetHelp(); + createIndexOntology(); + createIndexoOntologyClass(); //optional!! //createIndexWikipedia(); @@ -57,6 +59,38 @@ } + public String getontologyURI(String string) throws SQLException{ + Statement stat = conn.createStatement(); + ResultSet rs; + try { + rs = stat.executeQuery("select uri from ontology where name='"+string.toLowerCase()+"';"); + return rs.getString("uri"); + } catch (Exception e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + return null; + } + + + } + + public String getontologyClassURI(String string) throws SQLException{ + Statement stat = conn.createStatement(); + ResultSet rs; + try { + rs = stat.executeQuery("select uri from ontologyClass where name='"+string.toLowerCase()+"';"); + return rs.getString("uri"); + } catch (Exception e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + return null; + } + + + } + + + public String getWikipediaURI(String string) throws SQLException{ Statement stat = conn.createStatement(); ResultSet rs; @@ -197,40 +231,6 @@ prep.executeBatch(); conn.setAutoCommit(true); System.out.println("Done"); - //Statement stat = conn.createStatement(); - /* long start = System.currentTimeMillis(); - // zu messender Code - - ResultSet rs = stat.executeQuery("select * from people where name='kornyval';"); - while (rs.next()) - { - System.out.println("name = " + rs.getString("name")); - System.out.println("job = " + rs.getString("occupation")); - } - System.out.println("Duration in ms: " + (System.currentTimeMillis() - start)); - - start = System.currentTimeMillis(); - // zu messender Code - - rs = stat.executeQuery("select * from people where name='barack obama';"); - while (rs.next()) - { - System.out.println("name = " + rs.getString("name")); - System.out.println("job = " + rs.getString("occupation")); - } - System.out.println("Duration in ms: " + (System.currentTimeMillis() - start)); - - rs = stat.executeQuery("select * from people where name='kornyval';"); - while (rs.next()) - { - System.out.println("name = " + rs.getString("name")); - System.out.println("job = " + rs.getString("occupation")); - } - System.out.println("Duration in ms: " + (System.currentTimeMillis() - start)); - - - rs.close();*/ - // conn.close(); } private void createIndexPropertys() throws ClassNotFoundException, SQLException{ /*System.out.println("Start SQL test"); @@ -252,8 +252,8 @@ while( null != (s = in.readLine()) ) { String[] tmp_array =s.split(":::"); if(tmp_array.length>=2){ - prep.setString(1, tmp_array[1]); - prep.setString(2, tmp_array[0]); + prep.setString(1, tmp_array[0]); + prep.setString(2, tmp_array[1]); prep.addBatch(); zaehler=zaehler+1; //if(zaehler%10000==0) System.out.println(zaehler); @@ -303,8 +303,8 @@ while( null != (s = in.readLine()) ) { String[] tmp_array =s.split(":::"); if(tmp_array.length>=2){ - prep.setString(1, tmp_array[1]); - prep.setString(2, tmp_array[0]); + prep.setString(1, tmp_array[0]); + prep.setString(2, tmp_array[1]); prep.addBatch(); zaehler=zaehler+1; // if(zaehler%10000==0) System.out.println(zaehler); @@ -312,7 +312,7 @@ conn.setAutoCommit(false); prep.executeBatch(); conn.setAutoCommit(false); - System.out.println("done"); + System.out.println("done"+zaehler); } } @@ -334,7 +334,113 @@ prep.executeBatch(); conn.setAutoCommit(true); System.out.println("Done"); + + } +private void createIndexOntology() throws ClassNotFoundException, SQLException{ + /*System.out.println("Start SQL test");*/ + System.out.println("start indexing Ontology"); + Statement stat = conn.createStatement(); + stat.executeUpdate("drop table if exists ontology;"); + stat.executeUpdate("create table ontology (name, uri);"); + PreparedStatement prep = conn.prepareStatement("insert into ontology values (?, ?);"); + BufferedReader in=null; + // conn.setAutoCommit(false); + int zaehler=0; + try { + in = new BufferedReader( + new InputStreamReader( + new FileInputStream( "/home/swalter/workspace/ontology" ) ) ); + String s; + while( null != (s = in.readLine()) ) { + String[] tmp_array =s.split(":::"); + if(tmp_array.length>=2){ + prep.setString(1, tmp_array[0]); + prep.setString(2, tmp_array[1]); + prep.addBatch(); + zaehler=zaehler+1; + // if(zaehler%10000==0) System.out.println(zaehler); + if(zaehler%1000000==0){ + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(false); + System.out.println("done" + zaehler); + } + } + } + } catch( FileNotFoundException ex ) { + } catch( Exception ex ) { + System.out.println( ex ); + } finally { + if( in != null ) + try { + in.close(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(true); + System.out.println("Done"); + + } + +private void createIndexoOntologyClass() throws ClassNotFoundException, SQLException{ + /*System.out.println("Start SQL test");*/ + System.out.println("start indexing ontologyClass"); + Statement stat = conn.createStatement(); + stat.executeUpdate("drop table if exists ontologyClass;"); + stat.executeUpdate("create table ontologyClass (name, uri);"); + PreparedStatement prep = conn.prepareStatement("insert into ontologyClass values (?, ?);"); + BufferedReader in=null; + // conn.setAutoCommit(false); + int zaehler=0; + try { + in = new BufferedReader( + new InputStreamReader( + new FileInputStream( "/home/swalter/workspace/ontologyClass" ) ) ); + String s; + while( null != (s = in.readLine()) ) { + String[] tmp_array =s.split(":::"); + if(tmp_array.length>=2){ + prep.setString(1, tmp_array[0]); + prep.setString(2, tmp_array[1]); + prep.addBatch(); + zaehler=zaehler+1; + // if(zaehler%10000==0) System.out.println(zaehler); + if(zaehler%1000000==0){ + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(false); + System.out.println("done" + zaehler); + } + + } + } + } catch( FileNotFoundException ex ) { + } catch( Exception ex ) { + System.out.println( ex ); + } finally { + if( in != null ) + try { + in.close(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(true); + System.out.println("Done"); + + } + + } \ No newline at end of file This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <seb...@us...> - 2011-12-11 09:25:04
|
Revision: 3496 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3496&view=rev Author: sebastianwtr Date: 2011-12-11 09:24:58 +0000 (Sun, 11 Dec 2011) Log Message: ----------- [tbsl] changed private boolean UNTAGGED_INPUT = true; to public boolean UNTAGGED_INPUT = true; Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicTemplator.java Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicTemplator.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicTemplator.java 2011-12-09 13:52:45 UTC (rev 3495) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicTemplator.java 2011-12-11 09:24:58 UTC (rev 3496) @@ -32,7 +32,7 @@ Preprocessor pp; boolean ONE_SCOPE_ONLY = true; - boolean UNTAGGED_INPUT = true; + public boolean UNTAGGED_INPUT = true; public BasicTemplator() { List<InputStream> grammarFiles = new ArrayList<InputStream>(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2011-12-09 13:52:52
|
Revision: 3495 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3495&view=rev Author: jenslehmann Date: 2011-12-09 13:52:45 +0000 (Fri, 09 Dec 2011) Log Message: ----------- fixes for enrichment script Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/ROLearner2.java trunk/components-core/src/main/java/org/dllearner/algorithms/refinement/ROLearner.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlKnowledgeSource.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlQuery.java trunk/components-core/src/main/java/org/dllearner/reasoning/DIGHTTPConnector.java trunk/components-core/src/main/java/org/dllearner/utilities/Files.java trunk/components-core/src/main/java/org/dllearner/utilities/JamonMonitorLogger.java trunk/interfaces/src/main/java/org/dllearner/cli/CrossValidation.java trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java trunk/interfaces/src/main/java/org/dllearner/cli/Start.java trunk/scripts/src/main/java/org/dllearner/examples/Alzheimer.java trunk/scripts/src/main/java/org/dllearner/examples/BreastTissue.java trunk/scripts/src/main/java/org/dllearner/examples/Carcinogenesis.java trunk/scripts/src/main/java/org/dllearner/examples/Cardiotocography.java trunk/scripts/src/main/java/org/dllearner/examples/Heart.java trunk/scripts/src/main/java/org/dllearner/examples/Mammographic.java trunk/scripts/src/main/java/org/dllearner/examples/MonogenicDiseases.java trunk/scripts/src/main/java/org/dllearner/examples/Mutagenesis.java trunk/scripts/src/main/java/org/dllearner/examples/Parkinsons.java trunk/scripts/src/main/java/org/dllearner/examples/Suramin.java trunk/scripts/src/main/java/org/dllearner/scripts/CrossValidation.java trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBible.java trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBibleComparison.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -489,7 +489,7 @@ if (replaceSearchTree) Files.createFile(new File(searchTreeFile), treeString); else - Files.appendFile(new File(searchTreeFile), treeString); + Files.appendToFile(new File(searchTreeFile), treeString); } // System.out.println(loop); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -507,7 +507,7 @@ if (replaceSearchTree) Files.createFile(new File(searchTreeFile), treeString); else - Files.appendFile(new File(searchTreeFile), treeString); + Files.appendToFile(new File(searchTreeFile), treeString); } // System.out.println(loop); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/ROLearner2.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/ROLearner2.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/ROLearner2.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -475,7 +475,7 @@ if (replaceSearchTree) Files.createFile(searchTreeFile, treeString); else - Files.appendFile(searchTreeFile, treeString); + Files.appendToFile(searchTreeFile, treeString); } // Anzahl Schleifendurchläufe Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/refinement/ROLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/refinement/ROLearner.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/refinement/ROLearner.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -521,7 +521,7 @@ if(replaceSearchTree) Files.createFile(searchTreeFile, treeString); else - Files.appendFile(searchTreeFile, treeString); + Files.appendToFile(searchTreeFile, treeString); }//write search tree // Anzahl Schleifendurchläufe Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlKnowledgeSource.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -357,7 +357,7 @@ File jamonlog = new File("log/jamon.html"); Files.createFile(jamonlog, MonitorFactory.getReport()); - Files.appendFile(jamonlog, "<xmp>\n" + Files.appendToFile(jamonlog, "<xmp>\n" + JamonMonitorLogger.getStringForAllSortedByLabel()); System.exit(0); } Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlQuery.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlQuery.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlQuery.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -245,7 +245,7 @@ Files.createFile(f, s + "\n"); logDeletedOnStart = true; } else { - Files.appendFile(f, s + "\n"); + Files.appendToFile(f, s + "\n"); } } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/DIGHTTPConnector.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/DIGHTTPConnector.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/DIGHTTPConnector.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -176,7 +176,7 @@ osw.close(); if(protocolFile != null) - Files.appendFile(protocolFile, "DIG code send to reasoner:\n\n"+send+"\n\n"); + Files.appendToFile(protocolFile, "DIG code send to reasoner:\n\n"+send+"\n\n"); // receive answer InputStream is = connection.getInputStream(); @@ -198,7 +198,7 @@ // } if(protocolFile != null) - Files.appendFile(protocolFile, "DIG code received from reasoner:\n\n"+answer+"\n\n"); + Files.appendToFile(protocolFile, "DIG code received from reasoner:\n\n"+answer+"\n\n"); return answer.toString(); } Modified: trunk/components-core/src/main/java/org/dllearner/utilities/Files.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/Files.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/components-core/src/main/java/org/dllearner/utilities/Files.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -186,7 +186,7 @@ * @param content * Content of the file. */ - public static void appendFile(File file, String content) { + public static void appendToFile(File file, String content) { try { FileOutputStream fos = new FileOutputStream(file, true); fos.write(content.getBytes()); Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JamonMonitorLogger.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JamonMonitorLogger.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JamonMonitorLogger.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -168,7 +168,7 @@ public static void writeHTMLReport(String filename){ File jamonlog = new File(filename); Files.createFile(jamonlog, MonitorFactory.getReport()); - Files.appendFile(jamonlog, "<xmp>\n"+JamonMonitorLogger.getStringForAllSortedByLabel()); + Files.appendToFile(jamonlog, "<xmp>\n"+JamonMonitorLogger.getStringForAllSortedByLabel()); } Modified: trunk/interfaces/src/main/java/org/dllearner/cli/CrossValidation.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/CrossValidation.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/interfaces/src/main/java/org/dllearner/cli/CrossValidation.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -275,7 +275,7 @@ private void outputWriter(String output) { if(writeToFile) { - Files.appendFile(outputFile, output +"\n"); + Files.appendToFile(outputFile, output +"\n"); System.out.println(output); } else { System.out.println(output); Modified: trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -199,7 +199,7 @@ // restrict tested number of entities per type (only for testing purposes); // should be set to -1 in production mode - private int maxEntitiesPerType = -1; + int maxEntitiesPerType = -1; // number of axioms which will be learned/considered (only applies to // some learners) @@ -583,8 +583,11 @@ OWLNamedIndividual knowldegeBaseInd = f.getOWLNamedIndividual(IRI.create(ks.getEndpoint().getURL())); ax = f.getOWLClassAssertionAxiom(EnrichmentVocabulary.SPARQLEndpoint, knowldegeBaseInd); axioms.add(ax); - ax = f.getOWLObjectPropertyAssertionAxiom(EnrichmentVocabulary.defaultGraph, knowldegeBaseInd, f.getOWLNamedIndividual(IRI.create(ks.getEndpoint().getDefaultGraphURIs().iterator().next()))); - axioms.add(ax); + if(!ks.getEndpoint().getDefaultGraphURIs().isEmpty()) { + // TODO: only writes one default graph + ax = f.getOWLObjectPropertyAssertionAxiom(EnrichmentVocabulary.defaultGraph, knowldegeBaseInd, f.getOWLNamedIndividual(IRI.create(ks.getEndpoint().getDefaultGraphURIs().iterator().next()))); + axioms.add(ax); + } ax = f.getOWLObjectPropertyAssertionAxiom(EnrichmentVocabulary.hasInput, algorithmRunInd, knowldegeBaseInd); axioms.add(ax); Modified: trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -22,6 +22,7 @@ import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; +import java.io.PrintStream; import java.lang.reflect.InvocationTargetException; import java.net.MalformedURLException; import java.net.URL; @@ -31,6 +32,8 @@ import java.util.Map; import java.util.Map.Entry; +import javax.xml.ws.http.HTTPException; + import org.apache.log4j.ConsoleAppender; import org.apache.log4j.Level; import org.apache.log4j.Logger; @@ -41,11 +44,14 @@ import org.dllearner.kb.SparqlEndpointKS; import org.dllearner.kb.sparql.SparqlEndpoint; import org.dllearner.kb.sparql.SparqlQuery; +import org.dllearner.utilities.Files; import org.semanticweb.owlapi.model.OWLAxiom; import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.sparql.engine.http.QueryExceptionHTTP; +import com.hp.hpl.jena.sparql.resultset.ResultSetException; /** * Enriches all of the LOD cloud. @@ -124,19 +130,49 @@ // run enrichment SparqlEndpoint se = endpoint.getValue(); String name = endpoint.getKey(); + + File f = new File(baseDir + name + ".ttl"); + File log = new File(baseDir + name + ".log"); + System.out.println("Enriching " + name + " using " + se); Enrichment e = new Enrichment(se, null, threshold, nrOfAxiomsToLearn, useInference, false); - e.start(); - // save results to a file - SparqlEndpointKS ks = new SparqlEndpointKS(se); - List<AlgorithmRun> runs = e.getAlgorithmRuns(); - List<OWLAxiom> axioms = new LinkedList<OWLAxiom>(); - for(AlgorithmRun run : runs) { - axioms.addAll(e.toRDF(run.getAxioms(), run.getAlgorithm(), run.getParameters(), ks)); + + e.maxEntitiesPerType = 3; // hack for faster testing of endpoints + + boolean success = false; + // run enrichment script - we make a case distinguish to see which kind of problems we get + // (could be interesting for statistics later on) + try { + e.start(); + success = true; + } catch(StackOverflowError error) { + error.printStackTrace(new PrintStream(log)); + Files.appendToFile(log, "stack overflows could be caused by cycles in class hierarchies"); + error.printStackTrace(); + } catch(ResultSetException ex) { + ex.printStackTrace(new PrintStream(log)); + Files.appendToFile(log, ex.getMessage()); + ex.printStackTrace(); + } catch(QueryExceptionHTTP ex) { + ex.printStackTrace(new PrintStream(log)); + Files.appendToFile(log, ex.getMessage()); + ex.printStackTrace(); + } +// catch(Exception ex) { +// System.out.println("class of exception: " + ex.getClass()); +// } + + // save results to a file (TODO: check if enrichment format + if(success) { + SparqlEndpointKS ks = new SparqlEndpointKS(se); + List<AlgorithmRun> runs = e.getAlgorithmRuns(); + List<OWLAxiom> axioms = new LinkedList<OWLAxiom>(); + for(AlgorithmRun run : runs) { + axioms.addAll(e.toRDF(run.getAxioms(), run.getAlgorithm(), run.getParameters(), ks)); + } + Model model = e.getModel(axioms); + model.write(new FileOutputStream(f), "TURTLE"); } - Model model = e.getModel(axioms); - File f = new File(baseDir + name + ".ttl"); - model.write(new FileOutputStream(f), "TURTLE"); } } Modified: trunk/interfaces/src/main/java/org/dllearner/cli/Start.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/Start.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/interfaces/src/main/java/org/dllearner/cli/Start.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -216,7 +216,7 @@ // write JaMON report in HTML file File jamonlog = new File("log/jamon.html"); Files.createFile(jamonlog, MonitorFactory.getReport()); - Files.appendFile(jamonlog, "<xmp>\n"+JamonMonitorLogger.getStringForAllSortedByLabel()); + Files.appendToFile(jamonlog, "<xmp>\n"+JamonMonitorLogger.getStringForAllSortedByLabel()); } /** Modified: trunk/scripts/src/main/java/org/dllearner/examples/Alzheimer.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/Alzheimer.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/Alzheimer.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -766,7 +766,7 @@ content.append("-\"" + getIndividual(key) + "\"\n"); } } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } private static void generateConfFile(File file) { @@ -778,7 +778,7 @@ confHeader += "refexamples.writeSearchTree = false;\n"; confHeader += "refexamples.searchTreeFile = \"log/alzheimer/searchTree.log\";\n"; confHeader += "\n"; - Files.appendFile(file, confHeader); + Files.appendToFile(file, confHeader); } // returns URI including quotationsmark (need for KBparser) Modified: trunk/scripts/src/main/java/org/dllearner/examples/BreastTissue.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/BreastTissue.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/BreastTissue.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -296,7 +296,7 @@ confHeader += "refexamples.writeSearchTree = false;\n"; confHeader += "refexamples.searchTreeFile = \"log/breasttissue/searchTree.log\";\n"; confHeader += "\n"; - Files.appendFile(file, confHeader); + Files.appendToFile(file, confHeader); } private static void generateExamples(File file, HashMap<String, Integer> patients, int i) { @@ -311,7 +311,7 @@ } } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } private static String getURI(String name) { Modified: trunk/scripts/src/main/java/org/dllearner/examples/Carcinogenesis.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/Carcinogenesis.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/Carcinogenesis.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -268,7 +268,7 @@ confHeader += "refexamples.writeSearchTree = false;\n"; confHeader += "refexamples.searchTreeFile = \"log/carcinogenesis/searchTree.log\";\n"; confHeader += "\n"; - Files.appendFile(confTrainFile, confHeader); + Files.appendToFile(confTrainFile, confHeader); // generating training examples File trainingFilePositives = new File(prologDirectory + "train.f"); @@ -291,7 +291,7 @@ appendNegExamples(confTrainFile, negPTE1Examples); if(createPTE1Conf) { Files.clearFile(confPTE1File); - Files.appendFile(confPTE1File, "import(\"pte.owl\");\nreasoner=fastInstanceChecker;\n\n"); + Files.appendToFile(confPTE1File, "import(\"pte.owl\");\nreasoner=fastInstanceChecker;\n\n"); appendPosExamples(confPTE1File, posPTE1Examples); appendNegExamples(confPTE1File, negPTE1Examples); } @@ -300,8 +300,8 @@ if(createPTE2Conf) { File confPTE2File = new File("examples/carcinogenesis/testpte2.conf"); Files.clearFile(confPTE2File); - Files.appendFile(confPTE2File, "import(\"pte.owl\");\nreasoner=fastInstanceChecker;\n\n"); - Files.appendFile(confPTE2File, getPTE2Examples()); + Files.appendToFile(confPTE2File, "import(\"pte.owl\");\nreasoner=fastInstanceChecker;\n\n"); + Files.appendToFile(confPTE2File, getPTE2Examples()); } } @@ -492,7 +492,7 @@ else content.append("-\""+example.toString()+"\"\n"); } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } public static void appendNegExamples(File file, List<Individual> examples) { @@ -503,7 +503,7 @@ else content.append("+\""+example.toString()+"\"\n"); } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } private static String getAtomClass(String element, String atomType) { Modified: trunk/scripts/src/main/java/org/dllearner/examples/Cardiotocography.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/Cardiotocography.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/Cardiotocography.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -546,7 +546,7 @@ confHeader += "refexamples.writeSearchTree = false;\n"; confHeader += "refexamples.searchTreeFile = \"log/cardiotocography/searchTree.log\";\n"; confHeader += "\n"; - Files.appendFile(file, confHeader); + Files.appendToFile(file, confHeader); } private static void generateExamples(File file, HashMap<String, Integer> patients, int i) { @@ -561,7 +561,7 @@ } } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } private static String getURI(String name) { Modified: trunk/scripts/src/main/java/org/dllearner/examples/Heart.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/Heart.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/Heart.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -307,7 +307,7 @@ confHeader += "refexamples.writeSearchTree = false;\n"; confHeader += "refexamples.searchTreeFile = \"log/heart/searchTree.log\";\n"; confHeader += "\n"; - Files.appendFile(file, confHeader); + Files.appendToFile(file, confHeader); } private static void generateExamples(File file) { @@ -321,7 +321,7 @@ content.append("-\"" + getIndividual(key) + "\"\n"); } } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } private static String getURI(String name) { Modified: trunk/scripts/src/main/java/org/dllearner/examples/Mammographic.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/Mammographic.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/Mammographic.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -255,7 +255,7 @@ confHeader += "refexamples.writeSearchTree = false;\n"; confHeader += "refexamples.searchTreeFile = \"log/mammographic/searchTree.log\";\n"; confHeader += "\n"; - Files.appendFile(file, confHeader); + Files.appendToFile(file, confHeader); } private static void generateExamples(File file) { @@ -269,7 +269,7 @@ content.append("-\"" + getIndividual(key) + "\"\n"); } } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } private static String getURI(String name) { Modified: trunk/scripts/src/main/java/org/dllearner/examples/MonogenicDiseases.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/MonogenicDiseases.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/MonogenicDiseases.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -361,61 +361,61 @@ String pt = rs.getString("phenotype"); if ((pt.toLowerCase().contains("polymorphism"))&&( neg_count<=negEx )) { if (writeAlephFiles) { - Files.appendFile(badFile, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(badFile, "deleterious(id"+mutationID+").\n"); } if (writeYYFiles) { - Files.appendFile(yybadFile, "http://example.com/mutation/mutation#mutation"+mutationID+"\n"); + Files.appendToFile(yybadFile, "http://example.com/mutation/mutation#mutation"+mutationID+"\n"); } if (cvAleph){ switch (kn) { case 1: aneg++; - Files.appendFile(split1n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split1n, "deleterious(id"+mutationID+").\n"); if (aneg == neps) {aneg = 0; kn++;} break; case 2: aneg++; - Files.appendFile(split2n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split2n, "deleterious(id"+mutationID+").\n"); if (aneg == neps) {aneg = 0; kn++;} break; case 3: aneg++; - Files.appendFile(split3n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split3n, "deleterious(id"+mutationID+").\n"); if (aneg == neps) {aneg = 0; kn++;} break; case 4: aneg++; - Files.appendFile(split4n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split4n, "deleterious(id"+mutationID+").\n"); if (aneg == neps) {aneg = 0; kn++;} break; case 5: aneg++; - Files.appendFile(split5n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split5n, "deleterious(id"+mutationID+").\n"); if (aneg == neps) {aneg = 0; kn++;} break; case 6: aneg++; - Files.appendFile(split6n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split6n, "deleterious(id"+mutationID+").\n"); if (aneg == neps) {aneg = 0; kn++;} break; case 7: aneg++; - Files.appendFile(split7n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split7n, "deleterious(id"+mutationID+").\n"); if (aneg == neps) {aneg = 0; kn++;} break; case 8: aneg++; - Files.appendFile(split8n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split8n, "deleterious(id"+mutationID+").\n"); if (aneg == neps) {aneg = 0; kn++;} break; case 9: aneg++; - Files.appendFile(split9n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split9n, "deleterious(id"+mutationID+").\n"); if (aneg == neps) {aneg = 0; kn++;} break; case 10: aneg++; - Files.appendFile(split10n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split10n, "deleterious(id"+mutationID+").\n"); // if (aneg == neps) {aneg = 0; kn++;} break; // case 11: // without comment its round negExamples / 10 @@ -427,61 +427,61 @@ } if ((!pt.toLowerCase().contains("polymorphism"))&& ( pos_count<=posEx)) { if (writeAlephFiles) { - Files.appendFile(posFile, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(posFile, "deleterious(id"+mutationID+").\n"); } if (writeYYFiles) { - Files.appendFile(yyposFile, "http://example.com/mutation/mutation#mutation"+mutationID+"\n"); + Files.appendToFile(yyposFile, "http://example.com/mutation/mutation#mutation"+mutationID+"\n"); } if (cvAleph){ switch (kp) { case 1: apos++; - Files.appendFile(split1f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split1f, "deleterious(id"+mutationID+").\n"); if (apos == peps) {apos = 0; kp++;} break; case 2: apos++; - Files.appendFile(split2f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split2f, "deleterious(id"+mutationID+").\n"); if (apos == peps) {apos = 0; kp++;} break; case 3: apos++; - Files.appendFile(split3f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split3f, "deleterious(id"+mutationID+").\n"); if (apos == peps) {apos = 0; kp++;} break; case 4: apos++; - Files.appendFile(split4f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split4f, "deleterious(id"+mutationID+").\n"); if (apos == peps) {apos = 0; kp++;} break; case 5: apos++; - Files.appendFile(split5f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split5f, "deleterious(id"+mutationID+").\n"); if (apos == peps) {apos = 0; kp++;} break; case 6: apos++; - Files.appendFile(split6f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split6f, "deleterious(id"+mutationID+").\n"); if (apos == peps) {apos = 0; kp++;} break; case 7: apos++; - Files.appendFile(split7f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split7f, "deleterious(id"+mutationID+").\n"); if (apos == peps) {apos = 0; kp++;} break; case 8: apos++; - Files.appendFile(split8f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split8f, "deleterious(id"+mutationID+").\n"); if (apos == peps) {apos = 0; kp++;} break; case 9: apos++; - Files.appendFile(split9f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split9f, "deleterious(id"+mutationID+").\n"); if (apos == peps) {apos = 0; kp++;} break; case 10: apos++; - Files.appendFile(split10f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split10f, "deleterious(id"+mutationID+").\n"); // if (apos == peps) {apos = 0; kp++;} break; // case 11: // without comments its round to posExamples / 10 @@ -712,7 +712,7 @@ confHeader += "\n"; } - Files.appendFile(confFile, confHeader); + Files.appendToFile(confFile, confHeader); if(!generatePosExampleClass) { MonogenicDiseases.appendPosExamples(confFile, posExamples); MonogenicDiseases.appendNegExamples(confFile, negExamples); @@ -751,7 +751,7 @@ else content.append("-\""+example.toString()+"\"\n"); } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } public static void appendNegExamples(File file, List<Individual> examples) { @@ -762,6 +762,6 @@ else content.append("+\""+example.toString()+"\"\n"); } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } } Modified: trunk/scripts/src/main/java/org/dllearner/examples/Mutagenesis.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/Mutagenesis.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/Mutagenesis.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -243,7 +243,7 @@ confHeader += "refexamples.writeSearchTree = false;\n"; confHeader += "refexamples.searchTreeFile = \"log/mutagenesis/searchTree.log\";\n"; confHeader += "\n"; - Files.appendFile(file, confHeader); + Files.appendToFile(file, confHeader); } private static void generatePositiveExamples(String fileName) @@ -439,7 +439,7 @@ else content.append("-\"" + example.toString() + "\"\n"); } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } public static void appendNegExamples(File file, List<Individual> examples) { @@ -450,7 +450,7 @@ else content.append("+\"" + example.toString() + "\"\n"); } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } private static String getAtomClass(String element, String atomType) { @@ -583,7 +583,7 @@ for (String negEx : negativeExamples) { content.append("-\"" + getIndividual(negEx) + "\"\n"); } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } private static BooleanDatatypePropertyAssertion getBooleanDatatypePropertyAssertion( Modified: trunk/scripts/src/main/java/org/dllearner/examples/Parkinsons.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/Parkinsons.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/Parkinsons.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -283,7 +283,7 @@ confHeader += "refexamples.writeSearchTree = false;\n"; confHeader += "refexamples.searchTreeFile = \"log/parkinsons/searchTree.log\";\n"; confHeader += "\n"; - Files.appendFile(file, confHeader); + Files.appendToFile(file, confHeader); } private static void generateExamples(File file) { @@ -298,7 +298,7 @@ } } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } private static String getURI(String name) { Modified: trunk/scripts/src/main/java/org/dllearner/examples/Suramin.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/Suramin.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/Suramin.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -171,7 +171,7 @@ confHeader += "refexamples.writeSearchTree = false;\n"; confHeader += "refexamples.searchTreeFile = \"log/suramin/searchTree.log\";\n"; confHeader += "\n"; - Files.appendFile(confTrainFile, confHeader); + Files.appendToFile(confTrainFile, confHeader); appendExamples(confTrainFile, posExamples); } @@ -382,7 +382,7 @@ content.append("-\""+getIndividual(compound.toString())+"\"\n"); } } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } } Modified: trunk/scripts/src/main/java/org/dllearner/scripts/CrossValidation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/CrossValidation.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/scripts/CrossValidation.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -374,7 +374,7 @@ private void outputWriter(String output) { if(writeToFile) { - Files.appendFile(outputFile, output +"\n"); + Files.appendToFile(outputFile, output +"\n"); System.out.println(output); } else { System.out.println(output); Modified: trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBible.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBible.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBible.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -79,8 +79,8 @@ initLogger(); logger.info("Start"); Files.clearFile(file); - Files.appendFile(file, "neg Ex random: "+RANDOMNEGATIVES+"\n"); - Files.appendFile(file, "negfactor : "+NEGFACTOR+"\n"); + Files.appendToFile(file, "neg Ex random: "+RANDOMNEGATIVES+"\n"); + Files.appendToFile(file, "negfactor : "+NEGFACTOR+"\n"); //String fileURL = new File(ontologyFile).toURI().toString(); @@ -113,7 +113,7 @@ for (NamedClass target : classesToRelearn) { - Files.appendFile(file,"now learning: "+target+"\n"); + Files.appendToFile(file,"now learning: "+target+"\n"); waitForInput(); positiveEx.clear(); @@ -138,7 +138,7 @@ if(negativeEx.size()<0) { System.out.println(target); waitForInput(); - Files.appendFile(file, "\tSKIPPED negEX "+negativeEx+"\n"); + Files.appendToFile(file, "\tSKIPPED negEX "+negativeEx+"\n"); continue; } // reasoningService.prepareSubsumptionHierarchy(); @@ -151,7 +151,7 @@ e.printStackTrace(); } waitForInput(); - Files.appendFile(file, "*************\n"); + Files.appendToFile(file, "*************\n"); } Modified: trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBibleComparison.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBibleComparison.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBibleComparison.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -237,7 +237,7 @@ public static void writeJamonLog(String filename){ File jamonlog = new File(filename); Files.createFile(jamonlog, MonitorFactory.getReport()); - Files.appendFile(jamonlog, "<xmp>\n"+JamonMonitorLogger.getStringForAllSortedByLabel()); + Files.appendToFile(jamonlog, "<xmp>\n"+JamonMonitorLogger.getStringForAllSortedByLabel()); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2011-12-09 12:54:34
|
Revision: 3494 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3494&view=rev Author: lorenz_b Date: 2011-12-09 12:54:27 +0000 (Fri, 09 Dec 2011) Log Message: ----------- Started justification based coherent ontology extractor. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/utilities/CoherentOntologyExtractor.java trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java trunk/components-core/src/main/java/org/dllearner/utilities/MapUtils.java Added: trunk/components-core/src/main/java/org/dllearner/utilities/CoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/CoherentOntologyExtractor.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/utilities/CoherentOntologyExtractor.java 2011-12-09 12:54:27 UTC (rev 3494) @@ -0,0 +1,9 @@ +package org.dllearner.utilities; + +import org.semanticweb.owlapi.model.OWLOntology; + +public interface CoherentOntologyExtractor { + + OWLOntology getCoherentOntology(OWLOntology incoherentOntology); + +} Modified: trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java 2011-12-09 09:22:49 UTC (rev 3493) +++ trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java 2011-12-09 12:54:27 UTC (rev 3494) @@ -1,62 +1,107 @@ package org.dllearner.utilities; +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Set; import org.apache.commons.collections15.BidiMap; import org.apache.commons.collections15.bidimap.DualHashBidiMap; import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.io.RDFXMLOntologyFormat; import org.semanticweb.owlapi.model.AxiomType; import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.model.OWLOntologyStorageException; import org.semanticweb.owlapi.reasoner.OWLReasoner; import com.clarkparsia.modularity.IncrementalClassifier; public class GreedyCohaerencyExtractor { + private static final double STEP_SIZE = 0.001; + private static final int ALLOWED_UNSATISFIABLE_CLASSES = 5; + public GreedyCohaerencyExtractor() { // TODO Auto-generated constructor stub } public OWLOntology getCoharentOntology(OWLOntology ontology) throws OWLOntologyCreationException{ - IncrementalClassifier reasoner = new IncrementalClassifier(ontology); - reasoner.classify(); - BidiMap<AxiomType<? extends OWLAxiom>, Integer> axiomType2CountMap = getAxiomTypeCount(ontology); Map<AxiomType<? extends OWLAxiom>, List<OWLAxiom>> axiomType2AxiomsMap = new HashMap<AxiomType<? extends OWLAxiom>, List<OWLAxiom>>(); for(AxiomType<? extends OWLAxiom> type : AxiomType.AXIOM_TYPES){ axiomType2AxiomsMap.put(type, new ArrayList<OWLAxiom>(ontology.getAxioms(type))); } + System.out.println(ontology.getLogicalAxiomCount()); + double[] stepSize = new double[axiomType2CountMap.entrySet().size()]; + double[] cnt = new double[axiomType2CountMap.entrySet().size()]; + AxiomType[] type = new AxiomType[axiomType2CountMap.entrySet().size()]; + int i=0; + for(Entry<AxiomType<? extends OWLAxiom>, Integer> entry : axiomType2CountMap.entrySet()){ + stepSize[i] = STEP_SIZE * entry.getValue(); + type[i] = entry.getKey(); + cnt[i] = 0; + i++; + } - int lcm = lcm(new ArrayList<Integer>(axiomType2CountMap.values())); OWLOntologyManager man = OWLManager.createOWLOntologyManager(); - man.addOntologyChangeListener(reasoner); OWLOntology cohaerentOntology = man.createOntology(); + IncrementalClassifier reasoner = new IncrementalClassifier(cohaerentOntology); + man.addOntologyChangeListener(reasoner); + reasoner.classify(); + + boolean isCohaerent = true; - for(int i = 0; i < lcm; i++){ + for(double j = 0; j < 1; j += STEP_SIZE){System.out.println(j); if(isCohaerent){ - for(Entry<AxiomType<? extends OWLAxiom>, Integer> entry : axiomType2CountMap.entrySet()){ - if((i % entry.getValue()) == 0){ - OWLAxiom ax = axiomType2AxiomsMap.get(entry.getKey()).remove(0); - man.addAxiom(cohaerentOntology, ax); - isCohaerent = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom().isEmpty(); - if(!isCohaerent){ - man.removeAxiom(cohaerentOntology, ax); - break; - } + for(i = 0; i < stepSize.length; i++){ + cnt[i] = cnt[i] + stepSize[i]; + int x = (int)cnt[i]; + System.out.println("Adding " + x + " " + type[i] + " axioms from " + axiomType2CountMap.get(type[i])); +// System.out.println(axiomType2AxiomsMap.get(type[i]).size()); +// for(int k = 0; k < x; k++){ +// OWLAxiom ax = axiomType2AxiomsMap.get(type[i]).remove(0); +// man.addAxiom(cohaerentOntology, ax); +// isCohaerent = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom().isEmpty(); +// if(!isCohaerent){ +// man.removeAxiom(cohaerentOntology, ax); +// break; +// } +// } + Set<OWLAxiom> toAdd = new HashSet<OWLAxiom>(axiomType2AxiomsMap.get(type[i]).subList(0, x)); + man.addAxioms(cohaerentOntology, toAdd); + axiomType2AxiomsMap.get(type[i]).removeAll(toAdd); + isCohaerent = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom().size() <= ALLOWED_UNSATISFIABLE_CLASSES; + if(!isCohaerent){ + man.removeAxioms(cohaerentOntology, toAdd);System.out.println("Incohaerency detected"); + break; } + cnt[i] = cnt[i] - x; } } + System.out.println(cohaerentOntology.getLogicalAxiomCount()); + } + try { + man.saveOntology(cohaerentOntology, new RDFXMLOntologyFormat(), new BufferedOutputStream(new FileOutputStream(new File("coherent.owl")))); + } catch (OWLOntologyStorageException e) { + e.printStackTrace(); + } catch (FileNotFoundException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } return cohaerentOntology; } @@ -68,47 +113,22 @@ BidiMap<AxiomType<? extends OWLAxiom>, Integer> axiomType2CountMap = new DualHashBidiMap<AxiomType<? extends OWLAxiom>, Integer>(); for(AxiomType<? extends OWLAxiom> type : AxiomType.AXIOM_TYPES){ - axiomType2CountMap.put(type, ontology.getAxiomCount(type)); + int cnt = ontology.getAxiomCount(type); + if(cnt > 0){ + axiomType2CountMap.put(type, Integer.valueOf(cnt)); + } + Set<? extends OWLAxiom> axioms = ontology.getAxioms(type); } return axiomType2CountMap; } - private int lcm(int x1,int x2) { - if(x1<=0 || x2<=0) { - throw new IllegalArgumentException("Cannot compute the least "+ - "common multiple of two "+ - "numbers if one, at least,"+ - "is negative."); - } - int max,min; - if (x1>x2) { - max = x1; - min = x2; - } else { - max = x2; - min = x1; - } - for(int i=1; i<=min; i++) { - if( (max*i)%min == 0 ) { - return i*max; - } - } - throw new Error("Cannot find the least common multiple of numbers "+ - x1+" and "+x2); - } - - private int lcm(List<Integer> values) { - if(values.size() == 1){ - return values.get(0); - } else { - List<Integer> list = new ArrayList<Integer>(); - list.add(lcm(values.get(0), values.get(1))); - if(values.size() > 2){ - list.addAll(values.subList(2, values.size())); - } - return lcm(list); - } + public static void main(String[] args) throws Exception{ + OWLOntologyManager man = OWLManager.createOWLOntologyManager(); + OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("/home/lorenz/arbeit/dbpedia_0.75_no_datapropaxioms.owl")); + + GreedyCohaerencyExtractor ge = new GreedyCohaerencyExtractor(); + ge.getCoharentOntology(schema); } } Added: trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JustificationBasedCoherentOntologyExtractor.java 2011-12-09 12:54:27 UTC (rev 3494) @@ -0,0 +1,139 @@ +package org.dllearner.utilities; + +import java.io.File; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.mindswap.pellet.RBox; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLClass; +import org.semanticweb.owlapi.model.OWLEntity; +import org.semanticweb.owlapi.model.OWLLogicalAxiom; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.model.RemoveAxiom; + +import uk.ac.manchester.cs.owlapi.modularity.ModuleType; + +import com.clarkparsia.modularity.IncrementalClassifier; +import com.clarkparsia.modularity.ModularityUtils; +import com.clarkparsia.owlapi.explanation.PelletExplanation; +import com.clarkparsia.owlapiv3.OntologyUtils; + +public class JustificationBasedCoherentOntologyExtractor implements CoherentOntologyExtractor{ + + private static final int NUMBER_OF_JUSTIFICATIONS = 1; +// private PelletReasoner reasoner; + private IncrementalClassifier reasoner; + + private OWLOntology incoherentOntology; + private OWLOntology ontology; + + static {PelletExplanation.setup();} + + @Override + public OWLOntology getCoherentOntology(OWLOntology ontology) { + this.ontology = ontology; + this.incoherentOntology = getOntologyWithoutAnnotations(ontology); + +// reasoner = PelletReasonerFactory.getInstance().createNonBufferingReasoner(incoherentOntology); +// reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); + reasoner = new IncrementalClassifier(incoherentOntology); + reasoner.classify(); + + OWLOntologyManager man = incoherentOntology.getOWLOntologyManager(); +// man.addOntologyChangeListener(reasoner); + + Set<OWLClass> unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); + + //if the ontology is not incoherent we return it here + if(unsatClasses.isEmpty()){ + return incoherentOntology; + } + + while(!unsatClasses.isEmpty()){ + //for each unsatisfiable class we compute n justifications here and count how often each axiom occurs globally + Map<OWLAxiom, Integer> axiom2CountMap = new HashMap<OWLAxiom, Integer>(); + for(OWLClass unsatClass : unsatClasses){ + Set<Set<OWLAxiom>> explanations = computeExplanations(unsatClass); + for(Set<OWLAxiom> explanation : explanations){ + for(OWLAxiom ax : explanation){ + Integer cnt = axiom2CountMap.get(ax); + if(cnt == null){ + cnt = 0; + } + cnt = cnt + 1; + axiom2CountMap.put(ax, cnt); + } + } + } + //get a sorted list of entries with the highest axiom count first + List<Entry<OWLAxiom, Integer>> sortedEntries = MapUtils.sortByValues(axiom2CountMap); + for(Entry<OWLAxiom, Integer> entry : sortedEntries){ + System.out.println(entry.getKey() + ":" + entry.getValue()); + } + //we remove the most occuring axiom + OWLAxiom toRemove = sortedEntries.get(0).getKey(); + man.removeAxiom(incoherentOntology, toRemove); + man.applyChange(new RemoveAxiom(incoherentOntology, toRemove)); + reasoner.classify(); + unsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); + } + System.out.println(incoherentOntology.getLogicalAxiomCount()); + + return getOntologyWithAnnotations(incoherentOntology); + } + + private OWLOntology getOntologyWithoutAnnotations(OWLOntology ontology){ + try { + OWLOntologyManager man = ontology.getOWLOntologyManager(); + OWLOntology ontologyWithoutAnnotations = ontology.getOWLOntologyManager().createOntology(); + for(OWLAxiom ax : ontology.getLogicalAxioms()){ + man.addAxiom(ontologyWithoutAnnotations, ax.getAxiomWithoutAnnotations()); + } + return ontologyWithoutAnnotations; + } catch (OWLOntologyCreationException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + return null; + } + + private OWLOntology getOntologyWithAnnotations(OWLOntology ontologyWithOutAnnotations){ + OWLOntologyManager man = ontology.getOWLOntologyManager(); + for (Iterator<OWLLogicalAxiom> iterator = ontology.getLogicalAxioms().iterator(); iterator.hasNext();) { + OWLLogicalAxiom axiom = iterator.next(); + if(!ontologyWithOutAnnotations.containsAxiomIgnoreAnnotations(axiom)){ + man.removeAxiom(ontology, axiom); + } + } + return ontology; + } + + private Set<Set<OWLAxiom>> computeExplanations(OWLClass unsatClass){ + OWLOntology module = OntologyUtils.getOntologyFromAxioms( + ModularityUtils.extractModule(incoherentOntology, Collections.singleton((OWLEntity)unsatClass), ModuleType.TOP_OF_BOT)); + PelletExplanation expGen = new PelletExplanation(module); + return expGen.getUnsatisfiableExplanations(unsatClass, NUMBER_OF_JUSTIFICATIONS); + } + + public static void main(String[] args) throws Exception{ + Logger.getLogger(RBox.class.getName()).setLevel(Level.OFF); + OWLOntologyManager man = OWLManager.createOWLOntologyManager(); + OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("../components-core/cohaerent.owl")); +// OWLOntology schema = man.loadOntologyFromOntologyDocument(new File("/home/lorenz/arbeit/dbpedia_0.75_no_datapropaxioms.owl")); + + JustificationBasedCoherentOntologyExtractor extractor = new JustificationBasedCoherentOntologyExtractor(); + OWLOntology coherentOntology = extractor.getCoherentOntology(schema);System.out.println(coherentOntology.getLogicalAxiomCount()); + } + +} Added: trunk/components-core/src/main/java/org/dllearner/utilities/MapUtils.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/MapUtils.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/utilities/MapUtils.java 2011-12-09 12:54:27 UTC (rev 3494) @@ -0,0 +1,41 @@ +package org.dllearner.utilities; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +public class MapUtils { + + /** + * Returns a list of entries sorted by the values descending. + * @param map + * @return + */ + public static <K, V extends Comparable<V>> List<Entry<K, V>> sortByValues(Map<K, V> map){ + return sortByValues(map, false); + } + + /** + * Returns a list of entries sorted by the values either ascending or descending. + * @param map + * @return + */ + public static <K, V extends Comparable<V>> List<Entry<K, V>> sortByValues(Map<K, V> map, final boolean ascending){ + List<Entry<K, V>> entries = new ArrayList<Entry<K, V>>(map.entrySet()); + Collections.sort(entries, new Comparator<Entry<K, V>>() { + + @Override + public int compare(Entry<K, V> o1, Entry<K, V> o2) { + if(ascending){ + return o1.getValue().compareTo(o2.getValue()); + } else { + return o2.getValue().compareTo(o1.getValue()); + } + } + }); + return entries; + } +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2011-12-09 09:23:00
|
Revision: 3493 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3493&view=rev Author: jenslehmann Date: 2011-12-09 09:22:49 +0000 (Fri, 09 Dec 2011) Log Message: ----------- basic LOD enrichment script done Modified Paths: -------------- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java Modified: trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2011-12-08 16:59:38 UTC (rev 3492) +++ trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2011-12-09 09:22:49 UTC (rev 3493) @@ -155,7 +155,7 @@ public class Enrichment { // data structure for holding the result of an algorithm run - private class AlgorithmRun { + protected class AlgorithmRun { // we only store the algorithm class and not the learning algorithm object, // since otherwise we run into memory problems for full enrichment @@ -521,7 +521,7 @@ /* * Generates list of OWL axioms. */ - private List<OWLAxiom> toRDF(List<EvaluatedAxiom> evalAxioms, Class<? extends LearningAlgorithm> algorithm, Map<ConfigOption,Object> parameters, SparqlEndpointKS ks){ + List<OWLAxiom> toRDF(List<EvaluatedAxiom> evalAxioms, Class<? extends LearningAlgorithm> algorithm, Map<ConfigOption,Object> parameters, SparqlEndpointKS ks){ return toRDF(evalAxioms, algorithm, parameters, ks, null); } @@ -653,7 +653,7 @@ // return model; // } - private Model getModel(List<OWLAxiom> axioms) { + Model getModel(List<OWLAxiom> axioms) { Model model = ModelFactory.createDefaultModel(); try { OWLOntology ontology = OWLManager.createOWLOntologyManager().createOntology(new HashSet<OWLAxiom>(axioms)); Modified: trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java 2011-12-08 16:59:38 UTC (rev 3492) +++ trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java 2011-12-09 09:22:49 UTC (rev 3493) @@ -19,16 +19,33 @@ */ package org.dllearner.cli; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.lang.reflect.InvocationTargetException; import java.net.MalformedURLException; import java.net.URL; +import java.util.HashMap; import java.util.LinkedList; import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.apache.log4j.SimpleLayout; +import org.dllearner.cli.Enrichment.AlgorithmRun; +import org.dllearner.core.ComponentInitException; +import org.dllearner.core.LearningProblemUnsupportedException; +import org.dllearner.kb.SparqlEndpointKS; import org.dllearner.kb.sparql.SparqlEndpoint; import org.dllearner.kb.sparql.SparqlQuery; +import org.semanticweb.owlapi.model.OWLAxiom; import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.rdf.model.Model; /** * Enriches all of the LOD cloud. @@ -38,36 +55,89 @@ */ public class GlobalEnrichment { + // parameters + private static double threshold = 0.8; + private static int nrOfAxiomsToLearn = 10; + private static boolean useInference = true; + + // directory for generated schemata + private static String baseDir = "log/lod-enriched/"; + /** * @param args * @throws MalformedURLException + * @throws LearningProblemUnsupportedException + * @throws NoSuchMethodException + * @throws InvocationTargetException + * @throws IllegalAccessException + * @throws InstantiationException + * @throws ComponentInitException + * @throws SecurityException + * @throws IllegalArgumentException + * @throws FileNotFoundException */ - public static void main(String[] args) throws MalformedURLException { - // get all SPARQL endpoints and their graphs - List<SparqlEndpoint> endpoints = new LinkedList<SparqlEndpoint>(); + public static void main(String[] args) throws MalformedURLException, IllegalArgumentException, SecurityException, ComponentInitException, InstantiationException, IllegalAccessException, InvocationTargetException, NoSuchMethodException, LearningProblemUnsupportedException, FileNotFoundException { + SimpleLayout layout = new SimpleLayout(); + ConsoleAppender consoleAppender = new ConsoleAppender(layout); + Logger.getRootLogger().setLevel(Level.WARN); + Logger.getLogger("org.dllearner").setLevel(Level.WARN); // seems to be needed for some reason (?) + Logger.getRootLogger().removeAllAppenders(); + Logger.getRootLogger().addAppender(consoleAppender); + + // get all SPARQL endpoints and their graphs - the key is a name-identifier + Map<String,SparqlEndpoint> endpoints = new HashMap<String,SparqlEndpoint>(); + String query = ""; - query += "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> "; - query += "PREFIX void: <http://rdfs.org/ns/void#> "; - query += "PREFIX dcterms: <http://purl.org/dc/terms/> "; - query += "SELECT ?endpoint "; - query += "WHERE { "; - query += "?item rdf:type void:Dataset . "; - query += "?item dcterms:isPartOf <http://ckan.net/group/lodcloud> . "; - query += "?item void:sparqlEndpoint ?endpoint . "; - query += "}"; + query += "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> \n"; + query += "PREFIX void: <http://rdfs.org/ns/void#> \n"; + query += "PREFIX dcterms: <http://purl.org/dc/terms/> \n"; + query += "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> \n"; + query += "PREFIX ov: <http://open.vocab.org/terms/> \n"; + query += "SELECT * \n"; + query += "WHERE { \n"; + query += " ?item rdf:type void:Dataset . \n"; + query += " ?item dcterms:isPartOf <http://ckan.net/group/lodcloud> . \n"; + query += " ?item void:sparqlEndpoint ?endpoint . \n"; +// query += " ?item dcterms:subject ?subject . \n"; +// query += " ?item rdfs:label ?label . \n"; + query += " ?item ov:shortName ?shortName . \n"; + query += "}"; // query += "LIMIT 20"; + System.out.println("Getting list of SPARQL endpoints from LATC DSI:"); + System.out.println(query); - // LATC DSI/MDS + // contact LATC DSI/MDS SparqlEndpoint dsi = new SparqlEndpoint(new URL("http://api.talis.com/stores/latc-mds/services/sparql")); SparqlQuery sq = new SparqlQuery(query, dsi); ResultSet rs = sq.send(); while(rs.hasNext()) { QuerySolution qs = rs.next(); String endpoint = qs.get("endpoint").toString(); -// String graph = qs.getLiteral("graph").getString(); - System.out.println(endpoint); + String shortName = qs.get("shortName").toString(); + endpoints.put(shortName, new SparqlEndpoint(new URL(endpoint))); } + System.out.println(endpoints.size() + " endpoints detected."); + + // perform enrichment on endpoints + for(Entry<String,SparqlEndpoint> endpoint : endpoints.entrySet()) { + // run enrichment + SparqlEndpoint se = endpoint.getValue(); + String name = endpoint.getKey(); + System.out.println("Enriching " + name + " using " + se); + Enrichment e = new Enrichment(se, null, threshold, nrOfAxiomsToLearn, useInference, false); + e.start(); + // save results to a file + SparqlEndpointKS ks = new SparqlEndpointKS(se); + List<AlgorithmRun> runs = e.getAlgorithmRuns(); + List<OWLAxiom> axioms = new LinkedList<OWLAxiom>(); + for(AlgorithmRun run : runs) { + axioms.addAll(e.toRDF(run.getAxioms(), run.getAlgorithm(), run.getParameters(), ks)); + } + Model model = e.getModel(axioms); + File f = new File(baseDir + name + ".ttl"); + model.write(new FileOutputStream(f), "TURTLE"); + } } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2011-12-08 16:59:47
|
Revision: 3492 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3492&view=rev Author: jenslehmann Date: 2011-12-08 16:59:38 +0000 (Thu, 08 Dec 2011) Log Message: ----------- script to get SPARQL endpoints in LOD cloud using LATC MDS Added Paths: ----------- trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java Added: trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java (rev 0) +++ trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java 2011-12-08 16:59:38 UTC (rev 3492) @@ -0,0 +1,73 @@ +/** + * Copyright (C) 2007-2011, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package org.dllearner.cli; + +import java.net.MalformedURLException; +import java.net.URL; +import java.util.LinkedList; +import java.util.List; + +import org.dllearner.kb.sparql.SparqlEndpoint; +import org.dllearner.kb.sparql.SparqlQuery; + +import com.hp.hpl.jena.query.QuerySolution; +import com.hp.hpl.jena.query.ResultSet; + +/** + * Enriches all of the LOD cloud. + * + * @author Jens Lehmann + * + */ +public class GlobalEnrichment { + + /** + * @param args + * @throws MalformedURLException + */ + public static void main(String[] args) throws MalformedURLException { + // get all SPARQL endpoints and their graphs + List<SparqlEndpoint> endpoints = new LinkedList<SparqlEndpoint>(); + + String query = ""; + query += "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> "; + query += "PREFIX void: <http://rdfs.org/ns/void#> "; + query += "PREFIX dcterms: <http://purl.org/dc/terms/> "; + query += "SELECT ?endpoint "; + query += "WHERE { "; + query += "?item rdf:type void:Dataset . "; + query += "?item dcterms:isPartOf <http://ckan.net/group/lodcloud> . "; + query += "?item void:sparqlEndpoint ?endpoint . "; + query += "}"; +// query += "LIMIT 20"; + + // LATC DSI/MDS + SparqlEndpoint dsi = new SparqlEndpoint(new URL("http://api.talis.com/stores/latc-mds/services/sparql")); + SparqlQuery sq = new SparqlQuery(query, dsi); + ResultSet rs = sq.send(); + while(rs.hasNext()) { + QuerySolution qs = rs.next(); + String endpoint = qs.get("endpoint").toString(); +// String graph = qs.getLiteral("graph").getString(); + System.out.println(endpoint); + } + } + +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <seb...@us...> - 2011-12-08 13:05:50
|
Revision: 3491 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3491&view=rev Author: sebastianwtr Date: 2011-12-08 13:05:41 +0000 (Thu, 08 Dec 2011) Log Message: ----------- [tbsl exploration] fixed Levenshtein distance Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Levenshtein.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java 2011-12-08 10:43:20 UTC (rev 3490) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java 2011-12-08 13:05:41 UTC (rev 3491) @@ -99,6 +99,7 @@ result=result.replace(" ", ""); result=result.replaceFirst("<td>", ""); + String[] tmp_array=result.split("</td><td>"); for(int i =1; i<=tmp_array.length-2;i=i+2) { Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Levenshtein.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Levenshtein.java 2011-12-08 10:43:20 UTC (rev 3490) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Levenshtein.java 2011-12-08 13:05:41 UTC (rev 3491) @@ -7,59 +7,24 @@ public class Levenshtein { - //http://de.wikipedia.org/wiki/Levenshtein-Distanz - public double nld(String orig, String eing){ - //int result = diff(orig,eing); - int result = computeLevenshteinDistance(orig,eing); - int length=Math.max(orig.length(),eing.length()); + public static double nld(String orig, String eing){ + double result = computeLevenshteinDistance(orig,eing); + //System.out + double length=Math.max(orig.length(),eing.length()); - //if distance between both is zero, then the NLD must be one - if(result==0 ){ + if(result==0.0 ){ return 1; } else{ - BigDecimal m = new BigDecimal(result); - BigDecimal n = new BigDecimal(length); - BigDecimal c = new BigDecimal(0); - c=m.divide(n, 5, BigDecimal.ROUND_FLOOR); - - return c.doubleValue(); + double result_nld =result/length; + return result_nld; } } - public int diff(String orig, String eing) { - - int matrix[][] = new int[orig.length() + 1][eing.length() + 1]; - for (int i = 0; i < orig.length() + 1; i++) { - matrix[i][0] = i; - } - for (int i = 0; i < eing.length() + 1; i++) { - matrix[0][i] = i; - } - for (int a = 1; a < orig.length() + 1; a++) { - for (int b = 1; b < eing.length() + 1; b++) { - int right = 0; - if (orig.charAt(a - 1) != eing.charAt(b - 1)) { - right = 1; - } - int mini = matrix[a - 1][b] + 1; - if (matrix[a][b - 1] + 1 < mini) { - mini = matrix[a][b - 1] + 1; - } - if (matrix[a - 1][b - 1] + right < mini) { - mini = matrix[a - 1][b - 1] + right; - } - matrix[a][b] = mini; - } - } - - return matrix[orig.length()][eing.length()]; - } - //http://en.wikibooks.org/wiki/Algorithm_Implementation/Strings/Levenshtein_distance#Java private static int minimum(int a, int b, int c) { return Math.min(Math.min(a, b), c); Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java 2011-12-08 10:43:20 UTC (rev 3490) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java 2011-12-08 13:05:41 UTC (rev 3491) @@ -61,13 +61,13 @@ //Konstruktor public SparqlObject() throws MalformedURLException, ClassNotFoundException, SQLException{ - this.wordnet = new WordNet(); + wordnet = new WordNet(); System.out.println("Loading SPARQL Templator"); - this.btemplator = new BasicTemplator(); - this.templator = new Templator(); + btemplator = new BasicTemplator(); + templator = new Templator(); System.out.println("Loading SPARQL Templator Done\n"); System.out.println("Start Indexing"); - this.myindex = new mySQLDictionary(); + myindex = new mySQLDictionary(); System.out.println("Done:Indexing"); setExplorationdepthwordnet(1); @@ -132,8 +132,7 @@ if(lstquery.isEmpty()){ saveNotParsedQuestions(question); } - //for each querry - //TODO: Add function that no qery is send to the server, if querylist==null + for(ArrayList<String> querylist : lstquery){ boolean startIterating=true; @@ -271,7 +270,6 @@ try { in.close(); } catch (IOException e) { - // TODO Auto-generated catch block e.printStackTrace(); } } @@ -279,7 +277,7 @@ String out=""; for(String answer : final_answer){ //only answered question - // if(!answer.contains("Error in searching Wordnet with word") && !answer.contains("EmtyAnswer")&& !answer.contains("Error in getting Properties"))out=out+ "\n"+answer+"\n"; + if(!answer.contains("Error in searching Wordnet with word") && !answer.contains("EmtyAnswer")&& !answer.contains("Error in getting Properties"))out=out+ "\n"+answer+"\n"; /* //only questions with wordnet error @@ -289,7 +287,7 @@ if(answer.contains("EmtyAnswer"))out=out+ "\n"+answer+"\n"; */ //only questions with Error in Properties - if(answer.contains("Error in getting Properties"))out=out+ "\n"+answer+"\n"; + // if(answer.contains("Error in getting Properties"))out=out+ "\n"+answer+"\n"; @@ -367,11 +365,13 @@ String key = entry.getKey(); String value = entry.getValue(); double tmp=Levenshtein.computeLevenshteinDistance(property_to_compare_with.toLowerCase(), key); + double nld=Levenshtein.nld(property_to_compare_with.toLowerCase(), key); /* * TODO: Implement Normalised levensthein */ - if(tmp<=3.0){ + //if(tmp<=3.0){ + if(nld>=LvenstheinMin){ //alte property uri mit neuer ersetzen: String query_tmp=query; String test = getUriFromIndex(property_to_compare_with.toLowerCase(),1); @@ -433,11 +433,11 @@ } if(tmpcounter>4){ if(s.contains("LEFT")){ - sideOfPropertyOne="LEFT"; + sideOfPropertyTwo="LEFT"; resourceTwo=s.replace("LEFT",""); } if(s.contains("RIGHT")){ - sideOfPropertyOne="RIGHT"; + sideOfPropertyTwo="RIGHT"; resourceTwo=s.replace("RIGHT",""); } if(s.contains("PROPERTY")){ @@ -451,10 +451,9 @@ GetRessourcePropertys property = new GetRessourcePropertys(); Boolean goOnAfterProperty = true; try { - /* - * TODO: Have to check now, if we need a right Property or a left one - */ + propertiesOne=property.getPropertys(getUriFromIndex(resourceOne.toLowerCase(),0),sideOfPropertyOne); + propertiesTwo=property.getPropertys(getUriFromIndex(resourceTwo.toLowerCase(),0),sideOfPropertyTwo); if (propertiesOne==null){ //final_answer.add("Error in getting Properties\n"); @@ -463,34 +462,14 @@ } //System.out.println(properties); } catch (IOException e) { - // TODO Auto-generated catch block //e.printStackTrace(); final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); goOnAfterProperty=false; } - try { - /* - * TODO: Have to check now, if we need a right Property or a left one - */ - propertiesTwo=property.getPropertys(getUriFromIndex(resourceOne.toLowerCase(),0),sideOfPropertyTwo); - if (propertiesOne==null){ - //final_answer.add("Error in getting Properties\n"); - - final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); - goOnAfterProperty=false; - } - //System.out.println(properties); - } catch (IOException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - - final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); - goOnAfterProperty=false; - - } + if(goOnAfterProperty==true){ //property_to_compare_with mit der Liste der propertys vergleichen, und wenn der normalisierte Wert >= LvenstheinMin ist, einbauen und neue query erzeugen. ArrayList<String> new_queries= new ArrayList<String>(); @@ -500,8 +479,15 @@ String queryOne=query; String keyOne = entryOne.getKey(); String valueOne = entryOne.getValue(); - double levnstheinDistanzeOne=Levenshtein.computeLevenshteinDistance(property_to_compare_withOne.toLowerCase(), keyOne); - if(levnstheinDistanzeOne<=3.0){ + //double levnstheinDistanzeOne=Levenshtein.computeLevenshteinDistance(property_to_compare_withOne.toLowerCase(), keyOne); + //if(levnstheinDistanzeOne<=3.0){ + double levnstheinDistanzeOne=Levenshtein.nld(property_to_compare_withOne.toLowerCase(), keyOne); + + /* + * TODO: Implement Normalised levensthein + */ + //if(tmp<=3.0){ + if(levnstheinDistanzeOne>=LvenstheinMin){ String test = getUriFromIndex(property_to_compare_withOne.toLowerCase(),1); queryOne=queryOne.replace(test,valueOne); } @@ -509,12 +495,19 @@ for (Entry<String, String> entryTwo : propertiesTwo.entrySet()) { String keyTwo = entryTwo.getKey(); String valueTwo = entryTwo.getValue(); - double levnstheinDistanzeTwo=Levenshtein.computeLevenshteinDistance(property_to_compare_withTwo.toLowerCase(), keyTwo); + // double levnstheinDistanzeTwo=Levenshtein.computeLevenshteinDistance(property_to_compare_withTwo.toLowerCase(), keyTwo); /* * TODO: Implement Normalised levensthein */ - if(levnstheinDistanzeTwo<=3.0){ + //if(levnstheinDistanzeTwo<=3.0){ + double levnstheinDistanzeTwo=Levenshtein.nld(property_to_compare_withTwo.toLowerCase(), keyTwo); + + /* + * TODO: Implement Normalised levensthein + */ + //if(tmp<=3.0){ + if(levnstheinDistanzeTwo>=0.9){ //alte property uri mit neuer ersetzen: String queryTwo=queryOne; String test = getUriFromIndex(property_to_compare_withTwo.toLowerCase(),1); @@ -583,9 +576,6 @@ GetRessourcePropertys property = new GetRessourcePropertys(); Boolean goOnAfterProperty = true; try { - /* - * TODO: Have to check now, if we need a right Property or a left one - */ properties=property.getPropertys(getUriFromIndex(resource.toLowerCase(),0),sideOfProperty); if (properties==null){ //final_answer.add("Error in getting Properties\n"); @@ -595,7 +585,6 @@ } //System.out.println(properties); } catch (IOException e) { - // TODO Auto-generated catch block //e.printStackTrace(); final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); @@ -725,11 +714,11 @@ } if(tmpcounter>4){ if(s.contains("LEFT")){ - sideOfPropertyOne="LEFT"; + sideOfPropertyTwo="LEFT"; resourceTwo=s.replace("LEFT",""); } if(s.contains("RIGHT")){ - sideOfPropertyOne="RIGHT"; + sideOfPropertyTwo="RIGHT"; resourceTwo=s.replace("RIGHT",""); } if(s.contains("PROPERTY")){ @@ -740,15 +729,11 @@ } System.out.println("Property to compare:: "+ property_to_compare_withOne); System.out.println("Resource: "+ resourceOne); - //contains uri AND string, every second is the string HashMap<String,String> propertiesOne = new HashMap<String, String>(); HashMap<String,String> propertiesTwo = new HashMap<String, String>(); GetRessourcePropertys property = new GetRessourcePropertys(); Boolean goOnAfterProperty = true; try { - /* - * TODO: Have to check now, if we need a right Property or a left one - */ propertiesOne=property.getPropertys(getUriFromIndex(resourceOne.toLowerCase(),0),sideOfPropertyOne); propertiesTwo=property.getPropertys(getUriFromIndex(resourceTwo.toLowerCase(),0),sideOfPropertyTwo); if (propertiesOne==null){ @@ -774,7 +759,7 @@ //property_to_compare_with mit der Liste der propertys vergleichen, und wenn der normalisierte Wert >= LvenstheinMin ist, einbauen und neue query erzeugen. ArrayList<String> new_queries= new ArrayList<String>(); - System.out.println("Start Iterating Wordnet with "+property_to_compare_withOne+" and deept of "+explorationdepthwordnet); + //System.out.println("Start Iterating Wordnet with "+property_to_compare_withOne+" and deept of "+explorationdepthwordnet); ArrayList<String> semanticsOne=new ArrayList<String>(); ArrayList<String> tmp_semanticsOne=new ArrayList<String>(); ArrayList<String> result_SemanticsMatchPropertiesOne=new ArrayList<String>(); @@ -1335,12 +1320,7 @@ result=result.concat(str); counter=counter+1;} } while (str != null); - - //TODO:if counter = 5 or less, there is an empty answer from the Server! Still to Verify! - /* if(counter<=5){ - System.out.println("Empty Answer from Server"); - return "noanswer"; - }*/ + } catch (MalformedURLException e) { System.out.println("Must enter a valid URL"); } catch (IOException e) { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <seb...@us...> - 2011-12-08 10:43:31
|
Revision: 3490 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3490&view=rev Author: sebastianwtr Date: 2011-12-08 10:43:20 +0000 (Thu, 08 Dec 2011) Log Message: ----------- [tbsl exploration] removed nullpointer error Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java 2011-12-08 09:01:12 UTC (rev 3489) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java 2011-12-08 10:43:20 UTC (rev 3490) @@ -47,12 +47,8 @@ */ String vergleichorig = vergleich; - /*String bla123 = vergleich; - //to get only the name - bla123=bla123.replace("http://dbpedia.org/resource/Category:",""); - bla123=bla123.replace("http://dbpedia.org/resource/",""); - vergleich=bla123;*/ + String tmp_left="http://greententacle.techfak.uni-bielefeld.de:5171/sparql?default-graph-uri=&query="+createServerRequest("PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT DISTINCT ?s ?p WHERE {?y ?p <"+vergleichorig+">. ?p rdfs:label ?s.}")+"%0D%0A&format=text%2Fhtml&debug=on&timeout="; //System.out.println("property right!!! : " +tmp_right); String tmp_right="http://greententacle.techfak.uni-bielefeld.de:5171/sparql?default-graph-uri=&query="+createServerRequest("PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT DISTINCT ?s ?p WHERE {<"+vergleichorig+"> ?p ?y. ?p rdfs:label ?s.}")+"%0D%0A&format=text%2Fhtml&debug=on&timeout="; Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java 2011-12-08 09:01:12 UTC (rev 3489) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java 2011-12-08 10:43:20 UTC (rev 3490) @@ -925,176 +925,6 @@ } - /** - * Is the function for the Case, you are in Iteration one and have only one triple of condition (s,p,o). - * @param querylist - * @param query - * @return a list with answers from the Server - * @throws SQLException - * @throws JWNLException - */ - private ArrayList<String> simpleIteration1Case(ArrayList<String> querylist, String query) throws SQLException, - JWNLException { - //asking server - String answer; - ArrayList<String> final_answer=new ArrayList<String>(); - - /* - * First try the original query on the server. If that doesnt work, try it with Iteration - */ - answer=sendServerQuestionRequest(query); - - if(answer.contains("EmtyAnswer")){ - - String resource=""; - String property_to_compare_with=""; - String sideOfProperty="LEFT"; - - - int tmpcounter=0; - for(String s : querylist){ - //we dont need the first one, because thats the query itself - tmpcounter=tmpcounter+1; - if(tmpcounter>=1){ - if(s.contains("LEFT")){ - sideOfProperty="LEFT"; - resource=s.replace("LEFT",""); - } - if(s.contains("RIGHT")){ - sideOfProperty="RIGHT"; - resource=s.replace("RIGHT",""); - } - if(s.contains("PROPERTY")){ - property_to_compare_with=s.replace("PROPERTY",""); - } - - } - } - System.out.println("Property to compare:: "+ property_to_compare_with); - System.out.println("Resource: "+ resource); - //contains uri AND string, every second is the string - HashMap<String,String> properties = new HashMap<String, String>(); - GetRessourcePropertys property = new GetRessourcePropertys(); - Boolean goOnAfterProperty = true; - try { - /* - * TODO: Have to check now, if we need a right Property or a left one - */ - properties=property.getPropertys(getUriFromIndex(resource.toLowerCase(),0),sideOfProperty); - if (properties==null){ - //final_answer.add("Error in getting Properties\n"); - - final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); - goOnAfterProperty=false; - } - //System.out.println(properties); - } catch (IOException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - - final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); - goOnAfterProperty=false; - - } - if(goOnAfterProperty==true){ - //property_to_compare_with mit der Liste der propertys vergleichen, und wenn der normalisierte Wert >= LvenstheinMin ist, einbauen und neue query erzeugen. - ArrayList<String> new_queries= new ArrayList<String>(); - for (Entry<String, String> entry : properties.entrySet()) { - String key = entry.getKey(); - String value = entry.getValue(); - double tmp=Levenshtein.computeLevenshteinDistance(property_to_compare_with.toLowerCase(), key); - - /* - * TODO: Implement Normalised levensthein - */ - if(tmp<=3.0){ - //alte property uri mit neuer ersetzen: - String query_tmp=query; - String test = getUriFromIndex(property_to_compare_with.toLowerCase(),1); - //query_tmp=query_tmp.replace(test,properties.get(i-1)); - query_tmp=query_tmp.replace(test,value); - new_queries.add(query_tmp); - } - - } - - System.out.println("Start Iterating Wordnet with "+property_to_compare_with+" and deept of "+explorationdepthwordnet); - ArrayList<String> semantics=new ArrayList<String>(); - ArrayList<String> tmp_semantics=new ArrayList<String>(); - ArrayList<String> result_SemanticsMatchProperties=new ArrayList<String>(); - semantics.add(property_to_compare_with); - - //first check, if there is a singular form in the wordnet dictionary.. eg children -> child - String _temp_=myindex.getWordnetHelp(property_to_compare_with); - if(_temp_==null){ - tmp_semantics=semantics; - } - else{ - semantics.clear(); - semantics.add(_temp_); - tmp_semantics=semantics; - } - Boolean goOnAfterWordnet = true; - for(int i=0;i<=explorationdepthwordnet;i++){ - - try { - tmp_semantics=getSemantics(tmp_semantics); - if (tmp_semantics==null){ - goOnAfterWordnet=false; - final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); - - } - else{ - //each word only one time - for(String k : tmp_semantics){ - if(!semantics.contains(k)) semantics.add(k); - } - } - - } catch (IOException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - goOnAfterWordnet=false; - final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); - - } - - } - - if(goOnAfterWordnet==true){ - - for (Entry<String, String> entry : properties.entrySet()) { - String key = entry.getKey(); - String value = entry.getValue(); - - for(String b : semantics){ - if(key.contains(b.toLowerCase())){ - if(!result_SemanticsMatchProperties.contains(key)){ - //create new query - result_SemanticsMatchProperties.add(key); - String query_tmp=query; - String test = getUriFromIndex(property_to_compare_with.toLowerCase(),1); - query_tmp=query_tmp.replace(test,value); - System.out.println("New query after wordnet: "+ query_tmp); - new_queries.add(query_tmp); - } - } - } - } - - for(String bla : new_queries){ - String answer_tmp; - answer_tmp=sendServerQuestionRequest(bla); - System.out.println("Antwort vom Server: "+answer_tmp); - final_answer.add("Begin:\n"+bla +"\n"+answer_tmp+" \n End"); - } - } - } - } - - return final_answer; - } - @@ -1102,6 +932,7 @@ + /** * Iterates thru the conditions and returns an array, where one can see, if the Property is left or right from the resource @@ -1167,109 +998,136 @@ ArrayList<String> lstquerupsidedown = new ArrayList<String>(); String query; String selTerms =""; - for(SPARQL_Term terms :temp.getSelTerms()) selTerms=selTerms+(terms.toString())+" "; - System.out.println(selTerms); + boolean addQuery=true; + //sometimes there isnt an Selectterm, so dont use this query + try{ + for(SPARQL_Term terms :temp.getSelTerms()) selTerms=selTerms+(terms.toString())+" "; + } + catch (Exception e){ + selTerms=""; + addQuery=false; + } + + String conditions = ""; - for(Path condition: temp.getConditions()) conditions=conditions+(condition.toString())+"."; + try{ + for(Path condition: temp.getConditions()) conditions=conditions+(condition.toString())+"."; + } + catch (Exception e){ + conditions=""; + addQuery=false; + } String filters=""; - for(SPARQL_Filter tmp : temp.getFilters()) filters=filters+tmp+" "; - //System.out.println("\n"); - System.out.println("\n"); - query="PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> "+temp.getQt().toString()+" "+selTerms+" WHERE {"+ conditions.replace("--","") + filters+"}"; - - String conditions_new = ""; - for(Path condition: temp.getConditions()){ - //make conditions up-side-down - String[] tmp_upside = condition.toString().split(" -- "); - String tmp_conditions_new=""; - for(String con : tmp_upside) tmp_conditions_new = con +" "+tmp_conditions_new; - //remove all dots befor end - tmp_conditions_new=tmp_conditions_new.replace(".", ""); - //at the end ein . - tmp_conditions_new = tmp_conditions_new + "."; + try{ + for(SPARQL_Filter tmp : temp.getFilters()) filters=filters+tmp+" "; + } + catch(Exception e){ + filters=""; + addQuery=false; + } + if(addQuery==true){ + query="PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> "+temp.getQt().toString()+" "+selTerms+" WHERE {"+ conditions.replace("--","") + filters+"}"; + + String conditions_new = ""; + for(Path condition: temp.getConditions()){ + //make conditions up-side-down + String[] tmp_upside = condition.toString().split(" -- "); + String tmp_conditions_new=""; + for(String con : tmp_upside) tmp_conditions_new = con +" "+tmp_conditions_new; + //remove all dots befor end + tmp_conditions_new=tmp_conditions_new.replace(".", ""); + //at the end ein . + tmp_conditions_new = tmp_conditions_new + "."; + + //conditions_new=tmp_conditions_new; + + conditions_new=conditions_new + tmp_conditions_new; + } - //conditions_new=tmp_conditions_new; + - conditions_new=conditions_new + tmp_conditions_new; + /*System.out.println("Conditions: " + conditions); + System.out.println("Conditions_new: " + conditions_new);*/ + + + String query_upside_down = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> "+temp.getQt().toString()+" "+selTerms+" WHERE {"+ conditions_new.replace("--","") +filters+ "}"; + String[] slots= null; + int slotcounter=1; + for(Slot slot : temp.getSlots()){ + + //see below + slotcounter=slotcounter+1; + + + String slotstring=slot.toString().replace("UNSPEC","").replace("RESOURCE","").replace("{","").replace("}",""); + slotstring=slotstring.replace(" ",""); + //System.out.println(tmp); + //damit auch wirklich nur ?y und nicht ?y0 ersetzt wird, einfach nach "?y " suchen. + String[] array = slotstring.split(":"); + String replace; + if(array[0].length()<2)replace = "?"+array[0]+" "; + else replace="?"+array[0]; + + + //TODO: Hotfix: get rid of " PROPERTY " + String _ThingGettingURIfor_=array[1]; + _ThingGettingURIfor_=_ThingGettingURIfor_.replace(" PROPERTY ","").toLowerCase(); + String hm_result=getUriFromIndex(_ThingGettingURIfor_,0); + try + { + if(hm_result.contains("Category:")) hm_result=hm_result.replace("Category:",""); + } + catch ( Exception e ) + { + + } + + /*always the middle slot is the property + * so count and always take the second of third to become a property + */ + if(slotcounter%2==0){ + hm_result=getUriFromIndex(_ThingGettingURIfor_,1); + } + //set back to 0 to start new + if(slotcounter==3) slotcounter=0; + query=query.replace(replace, "<"+hm_result+">"); + query_upside_down=query_upside_down.replace(replace, "<"+hm_result+">"); + + } + + lstquerupsidedown.add(query_upside_down); + lstquerynew.add(query); + + + + ArrayList<String> lsttmp=createLeftAndRightPropertyArray(query); + //if its lower than three, we dont have any conditions and dont need to check it. + //also if the size%3 isnt 0, than something else is wrong and we dont need to test the query + if(lsttmp.size()>=3&&lsttmp.size()%3==0)for(String i : lsttmp) lstquerynew.add(i); + else{ + lstquerynew.clear(); + lstquerynew.add("ERROR"); + addQuery=false; + } + + lsttmp.clear(); + lsttmp=createLeftAndRightPropertyArray(query_upside_down); + if(lsttmp.size()>=3&&lsttmp.size()%3==0)for(String i : lsttmp) lstquerupsidedown.add(i); + else{ + lstquerupsidedown.clear(); + lstquerupsidedown.add("ERROR"); + addQuery=false; + } + + if(addQuery==true){ + lstquery.add(lstquerynew); + lstquery.add(lstquerupsidedown); + } } + } - - - System.out.println("Conditions: " + conditions); - System.out.println("Conditions_new: " + conditions_new); - - - String query_upside_down = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> "+temp.getQt().toString()+" "+selTerms+" WHERE {"+ conditions_new.replace("--","") +filters+ "}"; - String[] slots= null; - int slotcounter=1; - for(Slot slot : temp.getSlots()){ - - //see below - slotcounter=slotcounter+1; - - - String slotstring=slot.toString().replace("UNSPEC","").replace("RESOURCE","").replace("{","").replace("}",""); - slotstring=slotstring.replace(" ",""); - //System.out.println(tmp); - //damit auch wirklich nur ?y und nicht ?y0 ersetzt wird, einfach nach "?y " suchen. - String[] array = slotstring.split(":"); - String replace; - if(array[0].length()<2)replace = "?"+array[0]+" "; - else replace="?"+array[0]; - - - //TODO: Hotfix: get rid of " PROPERTY " - String _ThingGettingURIfor_=array[1]; - _ThingGettingURIfor_=_ThingGettingURIfor_.replace(" PROPERTY ","").toLowerCase(); - String hm_result=getUriFromIndex(_ThingGettingURIfor_,0); - try - { - if(hm_result.contains("Category:")) hm_result=hm_result.replace("Category:",""); - } - catch ( Exception e ) - { - - } - - /*always the middle slot is the property - * so count and always take the second of third to become a property - */ - if(slotcounter%2==0){ - hm_result=getUriFromIndex(_ThingGettingURIfor_,1); - } - //set back to 0 to start new - if(slotcounter==3) slotcounter=0; - query=query.replace(replace, "<"+hm_result+">"); - query_upside_down=query_upside_down.replace(replace, "<"+hm_result+">"); - - } - lstquerupsidedown.add(query_upside_down); - lstquerynew.add(query); - - - - ArrayList<String> lsttmp=createLeftAndRightPropertyArray(query); - //if its lower than three, we dont have any conditions and dont need to check it. - //also if the size%3 isnt 0, than something else is wrong and we dont need to test the query - if(lsttmp.size()>=3&&lsttmp.size()%3==0)for(String i : lsttmp) lstquerynew.add(i); - else{ - lstquerynew.clear(); - lstquerynew.add("ERROR"); - } - - lsttmp.clear(); - lsttmp=createLeftAndRightPropertyArray(query_upside_down); - if(lsttmp.size()>=3&&lsttmp.size()%3==0)for(String i : lsttmp) lstquerupsidedown.add(i); - else{ - lstquerupsidedown.clear(); - lstquerupsidedown.add("ERROR"); - } - - lstquery.add(lstquerynew); - lstquery.add(lstquerupsidedown); - } return lstquery; } @@ -1351,6 +1209,10 @@ + /* + * TODO: if for example title,name,label is given, replace , and get for each thing the semantics + * + */ private static ArrayList<String> getSemantics (ArrayList<String> semantics) throws IOException, JWNLException { ArrayList<String> result = new ArrayList<String>(); //result.clear(); @@ -1492,24 +1354,23 @@ private String createAnswer(String string){ - string=string.replace("table",""); - string=string.replace("<tr>", ""); - string=string.replace("</tr>", ""); - string=string.replace("</>",""); - string=string.replace("<th>l</th>",""); - string=string.replace("<th>x</th>",""); - string=string.replace("< class=\"sparql\" border=\"1\">",""); - string=string.replace("\n",""); - string=string.replace(" ",""); - string=string.replace("</td>",""); - string=string.replace("<td>",""); - string=string.replace("<th>callret-0</th>", ""); - string=string.replace("<th>y</th>",""); - while (string.contains(" ")) string=string.replace(" ",""); - if (string.length()==0) string="EmtyAnswer"; - //System.out.println("Stringlänge: "+string.length()); - return string; + //<td>Klaus Wowereit</td> + + //get with regex all between <td> </td> + + Pattern p = Pattern.compile (".*<td>(.*)</td>.*"); + Matcher m = p.matcher (string); + String result=""; + while (m.find()) { + if(m.group(1)!=null) + result = result+" "+ m.group(1); + } + + if (result.length()==0) result="EmtyAnswer"; + + return result; + } @@ -1561,6 +1422,183 @@ +/* + * Backup original Iteration function + * + */ + +/* + * + * + // Is the function for the Case, you are in Iteration one and have only one triple of condition (s,p,o). + // @param querylist + // @param query + // @return a list with answers from the Server + // @throws SQLException + // @throws JWNLException + + private ArrayList<String> simpleIteration1Case(ArrayList<String> querylist, String query) throws SQLException, + JWNLException { + //asking server + String answer; + ArrayList<String> final_answer=new ArrayList<String>(); + + //First try the original query on the server. If that doesnt work, try it with Iteration + + answer=sendServerQuestionRequest(query); + + if(answer.contains("EmtyAnswer")){ + + String resource=""; + String property_to_compare_with=""; + String sideOfProperty="LEFT"; + + + int tmpcounter=0; + for(String s : querylist){ + //we dont need the first one, because thats the query itself + tmpcounter=tmpcounter+1; + if(tmpcounter>=1){ + if(s.contains("LEFT")){ + sideOfProperty="LEFT"; + resource=s.replace("LEFT",""); + } + if(s.contains("RIGHT")){ + sideOfProperty="RIGHT"; + resource=s.replace("RIGHT",""); + } + if(s.contains("PROPERTY")){ + property_to_compare_with=s.replace("PROPERTY",""); + } + + } + } + System.out.println("Property to compare:: "+ property_to_compare_with); + System.out.println("Resource: "+ resource); + //contains uri AND string, every second is the string + HashMap<String,String> properties = new HashMap<String, String>(); + GetRessourcePropertys property = new GetRessourcePropertys(); + Boolean goOnAfterProperty = true; + try { + properties=property.getPropertys(getUriFromIndex(resource.toLowerCase(),0),sideOfProperty); + if (properties==null){ + //final_answer.add("Error in getting Properties\n"); + + final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + goOnAfterProperty=false; + } + //System.out.println(properties); + } catch (IOException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + + final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + goOnAfterProperty=false; + + } + if(goOnAfterProperty==true){ + //property_to_compare_with mit der Liste der propertys vergleichen, und wenn der normalisierte Wert >= LvenstheinMin ist, einbauen und neue query erzeugen. + ArrayList<String> new_queries= new ArrayList<String>(); + for (Entry<String, String> entry : properties.entrySet()) { + String key = entry.getKey(); + String value = entry.getValue(); + double tmp=Levenshtein.computeLevenshteinDistance(property_to_compare_with.toLowerCase(), key); + + + //TODO: Implement Normalised levensthein + + if(tmp<=3.0){ + //alte property uri mit neuer ersetzen: + String query_tmp=query; + String test = getUriFromIndex(property_to_compare_with.toLowerCase(),1); + //query_tmp=query_tmp.replace(test,properties.get(i-1)); + query_tmp=query_tmp.replace(test,value); + new_queries.add(query_tmp); + } + + } + + System.out.println("Start Iterating Wordnet with "+property_to_compare_with+" and deept of "+explorationdepthwordnet); + ArrayList<String> semantics=new ArrayList<String>(); + ArrayList<String> tmp_semantics=new ArrayList<String>(); + ArrayList<String> result_SemanticsMatchProperties=new ArrayList<String>(); + semantics.add(property_to_compare_with); + + //first check, if there is a singular form in the wordnet dictionary.. eg children -> child + String _temp_=myindex.getWordnetHelp(property_to_compare_with); + if(_temp_==null){ + tmp_semantics=semantics; + } + else{ + semantics.clear(); + semantics.add(_temp_); + tmp_semantics=semantics; + } + Boolean goOnAfterWordnet = true; + for(int i=0;i<=explorationdepthwordnet;i++){ + + try { + tmp_semantics=getSemantics(tmp_semantics); + if (tmp_semantics==null){ + goOnAfterWordnet=false; + final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); + + } + else{ + //each word only one time + for(String k : tmp_semantics){ + if(!semantics.contains(k)) semantics.add(k); + } + } + + } catch (IOException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + goOnAfterWordnet=false; + final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); + + } + + } + + if(goOnAfterWordnet==true){ + + for (Entry<String, String> entry : properties.entrySet()) { + String key = entry.getKey(); + String value = entry.getValue(); + + for(String b : semantics){ + if(key.contains(b.toLowerCase())){ + if(!result_SemanticsMatchProperties.contains(key)){ + //create new query + result_SemanticsMatchProperties.add(key); + String query_tmp=query; + String test = getUriFromIndex(property_to_compare_with.toLowerCase(),1); + query_tmp=query_tmp.replace(test,value); + System.out.println("New query after wordnet: "+ query_tmp); + new_queries.add(query_tmp); + } + } + } + } + + for(String bla : new_queries){ + String answer_tmp; + answer_tmp=sendServerQuestionRequest(bla); + System.out.println("Antwort vom Server: "+answer_tmp); + final_answer.add("Begin:\n"+bla +"\n"+answer_tmp+" \n End"); + } + } + } + } + + return final_answer; + } + + */ + + + /** * Cluster function */ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <seb...@us...> - 2011-12-08 09:01:19
|
Revision: 3489 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3489&view=rev Author: sebastianwtr Date: 2011-12-08 09:01:12 +0000 (Thu, 08 Dec 2011) Log Message: ----------- [tbsl exploration] start changing project into following steps 1) Direct matching using only the Templator 2) Matching with Levenshtein Distanz 3) Matching with wordnet Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/exploration_main.java Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java 2011-12-07 22:23:04 UTC (rev 3488) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java 2011-12-08 09:01:12 UTC (rev 3489) @@ -223,20 +223,32 @@ */ //Iterration 1 if(getIterationdepth()==1&&startIterating==true){ - + /* //4, because of query + three conditions for the simple case if(querylist.size()==4)final_answer=simpleIteration1Case(querylist, query); //if we have more conditions, we need to change the way of replacing the uris got from wordnet etc + if(querylist.size()>4)final_answer=complexeIteration1Case(querylist, query); + */ + + if(querylist.size()==4)final_answer=simpleLevinstheinIteration(querylist, query); + if(querylist.size()>4)final_answer=complexeLevinstheinIteration(querylist, query); + } /* * ################################################################################################# */ //Iterration 2 + /* + * Only Levensthein!!! + */ if(getIterationdepth()==2&&startIterating==true){ - + if(querylist.size()==4)final_answer=simpleWordnetIteration(querylist, query); + if(querylist.size()>4)final_answer=complexWordnetIteration(querylist, query); } + + } BufferedReader in = null; @@ -293,6 +305,626 @@ } + private ArrayList<String> simpleLevinstheinIteration(ArrayList<String> querylist, String query) throws SQLException, + JWNLException { + //asking server + String answer; + ArrayList<String> final_answer=new ArrayList<String>(); + String resource=""; + String property_to_compare_with=""; + String sideOfProperty="LEFT"; + + + int tmpcounter=0; + for(String s : querylist){ + //we dont need the first one, because thats the query itself + tmpcounter=tmpcounter+1; + if(tmpcounter>=1){ + if(s.contains("LEFT")){ + sideOfProperty="LEFT"; + resource=s.replace("LEFT",""); + } + if(s.contains("RIGHT")){ + sideOfProperty="RIGHT"; + resource=s.replace("RIGHT",""); + } + if(s.contains("PROPERTY")){ + property_to_compare_with=s.replace("PROPERTY",""); + } + + } + } + System.out.println("Property to compare:: "+ property_to_compare_with); + System.out.println("Resource: "+ resource); + //contains uri AND string, every second is the string + HashMap<String,String> properties = new HashMap<String, String>(); + GetRessourcePropertys property = new GetRessourcePropertys(); + Boolean goOnAfterProperty = true; + try { + /* + * TODO: Have to check now, if we need a right Property or a left one + */ + properties=property.getPropertys(getUriFromIndex(resource.toLowerCase(),0),sideOfProperty); + if (properties==null){ + //final_answer.add("Error in getting Properties\n"); + + final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + goOnAfterProperty=false; + } + //System.out.println(properties); + } catch (IOException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + + final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + goOnAfterProperty=false; + + } + if(goOnAfterProperty==true){ + //property_to_compare_with mit der Liste der propertys vergleichen, und wenn der normalisierte Wert >= LvenstheinMin ist, einbauen und neue query erzeugen. + ArrayList<String> new_queries= new ArrayList<String>(); + for (Entry<String, String> entry : properties.entrySet()) { + String key = entry.getKey(); + String value = entry.getValue(); + double tmp=Levenshtein.computeLevenshteinDistance(property_to_compare_with.toLowerCase(), key); + + /* + * TODO: Implement Normalised levensthein + */ + if(tmp<=3.0){ + //alte property uri mit neuer ersetzen: + String query_tmp=query; + String test = getUriFromIndex(property_to_compare_with.toLowerCase(),1); + //query_tmp=query_tmp.replace(test,properties.get(i-1)); + query_tmp=query_tmp.replace(test,value); + new_queries.add(query_tmp); + } + } + + + for(String anfrage : new_queries){ + String answer_tmp; + answer_tmp=sendServerQuestionRequest(anfrage); + System.out.println("Antwort vom Server: "+answer_tmp); + final_answer.add("Begin:\n"+anfrage +"\n"+answer_tmp+" \n End"); + } + } + + + return final_answer; +} + + private ArrayList<String> complexeLevinstheinIteration(ArrayList<String> querylist, String query) throws SQLException, + JWNLException { + + /* + * in this case we need a inner and outer loop, because the properties are different from the resource or up side down + * Only for questions with two sets of conditions(s p o . s p o) + */ + + //asking server + String answer; + ArrayList<String> final_answer=new ArrayList<String>(); + String resourceOne=""; + String property_to_compare_withOne=""; + String resourceTwo=""; + String property_to_compare_withTwo=""; + String sideOfPropertyOne="LEFT"; + String sideOfPropertyTwo="LEFT"; + + + int tmpcounter=0; + for(String s : querylist){ + //we dont need the first one, because thats the query itself + tmpcounter=tmpcounter+1; + if(tmpcounter>=1&&tmpcounter<=4){ + if(s.contains("LEFT")){ + sideOfPropertyOne="LEFT"; + resourceOne=s.replace("LEFT",""); + } + if(s.contains("RIGHT")){ + sideOfPropertyOne="RIGHT"; + resourceOne=s.replace("RIGHT",""); + } + if(s.contains("PROPERTY")){ + property_to_compare_withOne=s.replace("PROPERTY",""); + } + + } + if(tmpcounter>4){ + if(s.contains("LEFT")){ + sideOfPropertyOne="LEFT"; + resourceTwo=s.replace("LEFT",""); + } + if(s.contains("RIGHT")){ + sideOfPropertyOne="RIGHT"; + resourceTwo=s.replace("RIGHT",""); + } + if(s.contains("PROPERTY")){ + property_to_compare_withTwo=s.replace("PROPERTY",""); + } + + } + } + HashMap<String,String> propertiesOne = new HashMap<String, String>(); + HashMap<String,String> propertiesTwo = new HashMap<String, String>(); + GetRessourcePropertys property = new GetRessourcePropertys(); + Boolean goOnAfterProperty = true; + try { + /* + * TODO: Have to check now, if we need a right Property or a left one + */ + propertiesOne=property.getPropertys(getUriFromIndex(resourceOne.toLowerCase(),0),sideOfPropertyOne); + if (propertiesOne==null){ + //final_answer.add("Error in getting Properties\n"); + + final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + goOnAfterProperty=false; + } + //System.out.println(properties); + } catch (IOException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + + final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + goOnAfterProperty=false; + + } + try { + /* + * TODO: Have to check now, if we need a right Property or a left one + */ + propertiesTwo=property.getPropertys(getUriFromIndex(resourceOne.toLowerCase(),0),sideOfPropertyTwo); + if (propertiesOne==null){ + //final_answer.add("Error in getting Properties\n"); + + final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + goOnAfterProperty=false; + } + //System.out.println(properties); + } catch (IOException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + + final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + goOnAfterProperty=false; + + } + + if(goOnAfterProperty==true){ + //property_to_compare_with mit der Liste der propertys vergleichen, und wenn der normalisierte Wert >= LvenstheinMin ist, einbauen und neue query erzeugen. + ArrayList<String> new_queries= new ArrayList<String>(); + + for (Entry<String, String> entryOne : propertiesOne.entrySet()) { + + String queryOne=query; + String keyOne = entryOne.getKey(); + String valueOne = entryOne.getValue(); + double levnstheinDistanzeOne=Levenshtein.computeLevenshteinDistance(property_to_compare_withOne.toLowerCase(), keyOne); + if(levnstheinDistanzeOne<=3.0){ + String test = getUriFromIndex(property_to_compare_withOne.toLowerCase(),1); + queryOne=queryOne.replace(test,valueOne); + } + + for (Entry<String, String> entryTwo : propertiesTwo.entrySet()) { + String keyTwo = entryTwo.getKey(); + String valueTwo = entryTwo.getValue(); + double levnstheinDistanzeTwo=Levenshtein.computeLevenshteinDistance(property_to_compare_withTwo.toLowerCase(), keyTwo); + + /* + * TODO: Implement Normalised levensthein + */ + if(levnstheinDistanzeTwo<=3.0){ + //alte property uri mit neuer ersetzen: + String queryTwo=queryOne; + String test = getUriFromIndex(property_to_compare_withTwo.toLowerCase(),1); + queryTwo=queryTwo.replace(test,valueTwo); + new_queries.add(queryTwo); + } + + } + } + + + + for(String anfrage : new_queries){ + String answer_tmp; + answer_tmp=sendServerQuestionRequest(anfrage); + System.out.println("Antwort vom Server: "+answer_tmp); + final_answer.add("Begin:\n"+anfrage +"\n"+answer_tmp+" \n End"); + } + + } + + return final_answer; +} + + private ArrayList<String> simpleWordnetIteration(ArrayList<String> querylist, String query) throws SQLException, + JWNLException { + //asking server + String answer; + ArrayList<String> final_answer=new ArrayList<String>(); + + /* + * First try the original query on the server. If that doesnt work, try it with Iteration + */ + answer=sendServerQuestionRequest(query); + + if(answer.contains("EmtyAnswer")){ + + String resource=""; + String property_to_compare_with=""; + String sideOfProperty="LEFT"; + + + int tmpcounter=0; + for(String s : querylist){ + //we dont need the first one, because thats the query itself + tmpcounter=tmpcounter+1; + if(tmpcounter>=1){ + if(s.contains("LEFT")){ + sideOfProperty="LEFT"; + resource=s.replace("LEFT",""); + } + if(s.contains("RIGHT")){ + sideOfProperty="RIGHT"; + resource=s.replace("RIGHT",""); + } + if(s.contains("PROPERTY")){ + property_to_compare_with=s.replace("PROPERTY",""); + } + + } + } + System.out.println("Property to compare:: "+ property_to_compare_with); + System.out.println("Resource: "+ resource); + //contains uri AND string, every second is the string + HashMap<String,String> properties = new HashMap<String, String>(); + GetRessourcePropertys property = new GetRessourcePropertys(); + Boolean goOnAfterProperty = true; + try { + /* + * TODO: Have to check now, if we need a right Property or a left one + */ + properties=property.getPropertys(getUriFromIndex(resource.toLowerCase(),0),sideOfProperty); + if (properties==null){ + //final_answer.add("Error in getting Properties\n"); + + final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + goOnAfterProperty=false; + } + //System.out.println(properties); + } catch (IOException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + + final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + goOnAfterProperty=false; + + } + if(goOnAfterProperty==true){ + //property_to_compare_with mit der Liste der propertys vergleichen, und wenn der normalisierte Wert >= LvenstheinMin ist, einbauen und neue query erzeugen. + ArrayList<String> new_queries= new ArrayList<String>(); + + System.out.println("Start Iterating Wordnet with "+property_to_compare_with+" and deept of "+explorationdepthwordnet); + ArrayList<String> semantics=new ArrayList<String>(); + ArrayList<String> tmp_semantics=new ArrayList<String>(); + ArrayList<String> result_SemanticsMatchProperties=new ArrayList<String>(); + semantics.add(property_to_compare_with); + + //first check, if there is a singular form in the wordnet dictionary.. eg children -> child + String _temp_=myindex.getWordnetHelp(property_to_compare_with); + if(_temp_==null){ + tmp_semantics=semantics; + } + else{ + semantics.clear(); + semantics.add(_temp_); + tmp_semantics=semantics; + } + Boolean goOnAfterWordnet = true; + for(int i=0;i<=explorationdepthwordnet;i++){ + + try { + tmp_semantics=getSemantics(tmp_semantics); + if (tmp_semantics==null){ + goOnAfterWordnet=false; + final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); + + } + else{ + //each word only one time + for(String k : tmp_semantics){ + if(!semantics.contains(k)) semantics.add(k); + } + } + + } catch (IOException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + goOnAfterWordnet=false; + final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); + + } + + } + + if(goOnAfterWordnet==true){ + + for (Entry<String, String> entry : properties.entrySet()) { + String key = entry.getKey(); + String value = entry.getValue(); + + for(String b : semantics){ + if(key.contains(b.toLowerCase())){ + if(!result_SemanticsMatchProperties.contains(key)){ + //create new query + result_SemanticsMatchProperties.add(key); + String query_tmp=query; + String test = getUriFromIndex(property_to_compare_with.toLowerCase(),1); + query_tmp=query_tmp.replace(test,value); + System.out.println("New query after wordnet: "+ query_tmp); + new_queries.add(query_tmp); + } + } + } + } + + for(String bla : new_queries){ + String answer_tmp; + answer_tmp=sendServerQuestionRequest(bla); + System.out.println("Antwort vom Server: "+answer_tmp); + final_answer.add("Begin:\n"+bla +"\n"+answer_tmp+" \n End"); + } + } + } + } + + return final_answer; +} + + + private ArrayList<String> complexWordnetIteration(ArrayList<String> querylist, String query) throws SQLException, + JWNLException { + //asking server + String answer; + ArrayList<String> final_answer=new ArrayList<String>(); + + /* + * First try the original query on the server. If that doesnt work, try it with Iteration + */ + answer=sendServerQuestionRequest(query); + + if(answer.contains("EmtyAnswer")){ + + String resourceOne=""; + String property_to_compare_withOne=""; + String resourceTwo=""; + String property_to_compare_withTwo=""; + String sideOfPropertyOne="LEFT"; + String sideOfPropertyTwo="LEFT"; + + + int tmpcounter=0; + for(String s : querylist){ + //we dont need the first one, because thats the query itself + tmpcounter=tmpcounter+1; + if(tmpcounter>=1&&tmpcounter<=4){ + if(s.contains("LEFT")){ + sideOfPropertyOne="LEFT"; + resourceOne=s.replace("LEFT",""); + } + if(s.contains("RIGHT")){ + sideOfPropertyOne="RIGHT"; + resourceOne=s.replace("RIGHT",""); + } + if(s.contains("PROPERTY")){ + property_to_compare_withOne=s.replace("PROPERTY",""); + } + + } + if(tmpcounter>4){ + if(s.contains("LEFT")){ + sideOfPropertyOne="LEFT"; + resourceTwo=s.replace("LEFT",""); + } + if(s.contains("RIGHT")){ + sideOfPropertyOne="RIGHT"; + resourceTwo=s.replace("RIGHT",""); + } + if(s.contains("PROPERTY")){ + property_to_compare_withTwo=s.replace("PROPERTY",""); + } + + } + } + System.out.println("Property to compare:: "+ property_to_compare_withOne); + System.out.println("Resource: "+ resourceOne); + //contains uri AND string, every second is the string + HashMap<String,String> propertiesOne = new HashMap<String, String>(); + HashMap<String,String> propertiesTwo = new HashMap<String, String>(); + GetRessourcePropertys property = new GetRessourcePropertys(); + Boolean goOnAfterProperty = true; + try { + /* + * TODO: Have to check now, if we need a right Property or a left one + */ + propertiesOne=property.getPropertys(getUriFromIndex(resourceOne.toLowerCase(),0),sideOfPropertyOne); + propertiesTwo=property.getPropertys(getUriFromIndex(resourceTwo.toLowerCase(),0),sideOfPropertyTwo); + if (propertiesOne==null){ + //final_answer.add("Error in getting Properties\n"); + + final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + goOnAfterProperty=false; + } + //System.out.println(properties); + } catch (IOException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + + final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + goOnAfterProperty=false; + + } + if(goOnAfterProperty==true){ + + /* + * #################################### Semantics One############################################# + */ + //property_to_compare_with mit der Liste der propertys vergleichen, und wenn der normalisierte Wert >= LvenstheinMin ist, einbauen und neue query erzeugen. + ArrayList<String> new_queries= new ArrayList<String>(); + + System.out.println("Start Iterating Wordnet with "+property_to_compare_withOne+" and deept of "+explorationdepthwordnet); + ArrayList<String> semanticsOne=new ArrayList<String>(); + ArrayList<String> tmp_semanticsOne=new ArrayList<String>(); + ArrayList<String> result_SemanticsMatchPropertiesOne=new ArrayList<String>(); + semanticsOne.add(property_to_compare_withOne); + + //first check, if there is a singular form in the wordnet dictionary.. eg children -> child + String _temp_One=myindex.getWordnetHelp(property_to_compare_withOne); + if(_temp_One==null){ + tmp_semanticsOne=semanticsOne; + } + else{ + semanticsOne.clear(); + semanticsOne.add(_temp_One); + tmp_semanticsOne=semanticsOne; + } + Boolean goOnAfterWordnet = true; + for(int i=0;i<=explorationdepthwordnet;i++){ + + try { + tmp_semanticsOne=getSemantics(tmp_semanticsOne); + if (tmp_semanticsOne==null){ + goOnAfterWordnet=false; + final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semanticsOne+" \n End"); + + } + else{ + //each word only one time + for(String k : tmp_semanticsOne){ + if(!semanticsOne.contains(k)) semanticsOne.add(k); + } + } + + } catch (IOException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + goOnAfterWordnet=false; + final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semanticsOne+" \n End"); + + } + + + } + /* + * #################################### Semantics Two############################################# + */ + + System.out.println("Start Iterating Wordnet with "+property_to_compare_withOne+" and deept of "+explorationdepthwordnet); + ArrayList<String> semanticsTwo=new ArrayList<String>(); + ArrayList<String> tmp_semanticsTwo=new ArrayList<String>(); + ArrayList<String> result_SemanticsMatchPropertiesTwo=new ArrayList<String>(); + semanticsTwo.add(property_to_compare_withTwo); + + //first check, if there is a singular form in the wordnet dictionary.. eg children -> child + String _temp_Two=myindex.getWordnetHelp(property_to_compare_withTwo); + if(_temp_Two==null){ + tmp_semanticsOne=semanticsTwo; + } + else{ + semanticsTwo.clear(); + semanticsTwo.add(_temp_Two); + tmp_semanticsTwo=semanticsTwo; + } + + for(int i=0;i<=explorationdepthwordnet;i++){ + + try { + tmp_semanticsTwo=getSemantics(tmp_semanticsTwo); + if (tmp_semanticsTwo==null){ + goOnAfterWordnet=false; + final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semanticsTwo+" \n End"); + + } + else{ + //each word only one time + for(String k : tmp_semanticsTwo){ + if(!semanticsTwo.contains(k)) semanticsTwo.add(k); + } + } + + } catch (IOException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + goOnAfterWordnet=false; + final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semanticsTwo+" \n End"); + + } + + + } + + + if(goOnAfterWordnet==true){ + + for (Entry<String, String> entryOne : propertiesOne.entrySet()) { + String keyOne = entryOne.getKey(); + String valueOne = entryOne.getValue(); + String queryOne=query; + + for(String b : semanticsOne){ + if(keyOne.contains(b.toLowerCase())){ + if(!result_SemanticsMatchPropertiesOne.contains(keyOne)){ + //create new query + result_SemanticsMatchPropertiesOne.add(keyOne); + String replacementOne = getUriFromIndex(property_to_compare_withOne.toLowerCase(),1); + queryOne=queryOne.replace(replacementOne,valueOne); + + for (Entry<String, String> entryTwo : propertiesTwo.entrySet()) { + String keyTwo = entryTwo.getKey(); + String valueTwo = entryTwo.getValue(); + + for(String z : semanticsTwo){ + if(keyTwo.contains(z.toLowerCase())){ + if(!result_SemanticsMatchPropertiesTwo.contains(keyTwo)){ + //create new query + result_SemanticsMatchPropertiesTwo.add(keyTwo); + String queryTwo=queryOne; + String replacementTwo = getUriFromIndex(property_to_compare_withTwo.toLowerCase(),1); + queryTwo=queryTwo.replace(replacementTwo,valueTwo); + System.out.println("New query after wordnet: "+ queryTwo); + new_queries.add(queryTwo); + } + } + } + } + + } + } + } + + + + } + + + + + for(String bla : new_queries){ + String answer_tmp; + answer_tmp=sendServerQuestionRequest(bla); + System.out.println("Antwort vom Server: "+answer_tmp); + final_answer.add("Begin:\n"+bla +"\n"+answer_tmp+" \n End"); + } + } + } + } + + return final_answer; +} + + /** * Is the function for the Case, you are in Iteration one and have only one triple of condition (s,p,o). * @param querylist @@ -463,6 +1095,13 @@ return final_answer; } + + + + + + + /** * Iterates thru the conditions and returns an array, where one can see, if the Property is left or right from the resource Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/exploration_main.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/exploration_main.java 2011-12-07 22:23:04 UTC (rev 3488) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/exploration_main.java 2011-12-08 09:01:12 UTC (rev 3489) @@ -25,31 +25,10 @@ * in Eclipse Run -> RunConfigurations -> Arguments -> VM Arguments -> -Xmx1024m */ -/* - * - * - * for pom.xml file - * <dependencies> - <dependency> - <groupId>org.xerial</groupId> - <artifactId>sqlite-jdbc</artifactId> - <version>3.6.16</version> - </dependency> - </dependencies> - */ -// Sax example from http://www.bennyn.de/programmierung/java/java-xml-sax-parser.html -/* - * - * eins:http://greententacle.techfak.uni-bielefeld.de:5171/sparql?default-graph-uri=&query=SELECT+DISTINCT+%3Fx+%3Fl++WHERE+{%0D%0A++%3Fx+rdf%3Atype+%3Fc+.%0D%0A++%3Fx+rdfs%3Alabel+%3Fl+.%0D%0A++FILTER+%28lang%28%3Fl%29+%3D+%27en%27%29%0D%0A}&format=text%2Fhtml&debug=on&timeout= - * zwei:http://greententacle.techfak.uni-bielefeld.de:5171/sparql?default-graph-uri=&query=SELECT+DISTINCT+%3Fc+%3Fl++WHERE+{%0D%0A++%3Fx+rdf%3Atype+%3Fc+.%0D%0A++%3Fc+rdfs%3Alabel+%3Fl+.%0D%0A++FILTER+%28lang%28%3Fl%29+%3D+%27en%27%29%0D%0A}&format=text%2Fhtml&debug=on&timeout= - * - */ public class exploration_main { - //private static HashMap<String, String> hm = new HashMap<String, String>(); - private static String qaldEntity2="http://greententacle.techfak.uni-bielefeld.de:5171/sparql?default-graph-uri=&query=SELECT+DISTINCT+%3Fc+%3Fl++WHERE+{%0D%0A++%3Fx+rdf%3Atype+%3Fc+.%0D%0A++%3Fc+rdfs%3Alabel+%3Fl+.%0D%0A++FILTER+%28lang%28%3Fl%29+%3D+%27en%27%29%0D%0A}&format=text%2Fhtml&debug=on&timeout="; - private static String qaldEntity1="http://greententacle.techfak.uni-bielefeld.de:5171/sparql?default-graph-uri=&query=SELECT+DISTINCT+%3Fx+%3Fl++WHERE+{%0D%0A++%3Fx+rdf%3Atype+%3Fc+.%0D%0A++%3Fx+rdfs%3Alabel+%3Fl+.%0D%0A++FILTER+%28lang%28%3Fl%29+%3D+%27en%27%29%0D%0A}&format=text%2Fhtml&debug=on&timeout="; + /** * @param args * @throws IOException @@ -64,24 +43,12 @@ * Do the starting initializing stuff */ long startInitTime = System.currentTimeMillis(); - System.out.println("Start Indexing"); - - //For testing! - //hm=ParseXmlHtml.parse_xml("/home/swalter/workspace/qaldEntity2",hm); - //hm=ParseXmlHtml.parse_xml("/home/swalter/workspace/qaldEntity1",hm); - - //30% Ram - /* - * For real use! - */ - /*hm=ParseXmlHtml.parse_xml((getEntity(qaldEntity2,"/tmp/qaldEntity2")),hm); - System.out.println("Entity2 done"); - hm=ParseXmlHtml.parse_xml((getEntity(qaldEntity1,"/tmp/qaldEntity1")),hm); - System.out.println("Entity1 done");*/ - System.out.println("Done with indexing\n"); - System.out.println("Start generating Wordnet Dictionary"); + + /* + * Create Sparql Object + */ SparqlObject sparql = new SparqlObject(); - System.out.println("Generating Wordnet Dictionary Done"); + long stopInitTime = System.currentTimeMillis(); System.out.println("Time for Initialising "+(stopInitTime-startInitTime)+" ms"); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2011-12-07 22:23:10
|
Revision: 3488 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3488&view=rev Author: lorenz_b Date: 2011-12-07 22:23:04 +0000 (Wed, 07 Dec 2011) Log Message: ----------- Continued greedy algorithm implementation to build cohaerent ontology. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java 2011-12-07 21:50:12 UTC (rev 3487) +++ trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java 2011-12-07 22:23:04 UTC (rev 3488) @@ -16,6 +16,8 @@ import org.semanticweb.owlapi.model.OWLOntologyManager; import org.semanticweb.owlapi.reasoner.OWLReasoner; +import com.clarkparsia.modularity.IncrementalClassifier; + public class GreedyCohaerencyExtractor { public GreedyCohaerencyExtractor() { @@ -23,6 +25,9 @@ } public OWLOntology getCoharentOntology(OWLOntology ontology) throws OWLOntologyCreationException{ + IncrementalClassifier reasoner = new IncrementalClassifier(ontology); + reasoner.classify(); + BidiMap<AxiomType<? extends OWLAxiom>, Integer> axiomType2CountMap = getAxiomTypeCount(ontology); Map<AxiomType<? extends OWLAxiom>, List<OWLAxiom>> axiomType2AxiomsMap = new HashMap<AxiomType<? extends OWLAxiom>, List<OWLAxiom>>(); @@ -33,12 +38,22 @@ int lcm = lcm(new ArrayList<Integer>(axiomType2CountMap.values())); OWLOntologyManager man = OWLManager.createOWLOntologyManager(); + man.addOntologyChangeListener(reasoner); OWLOntology cohaerentOntology = man.createOntology(); + boolean isCohaerent = true; for(int i = 0; i < lcm; i++){ - for(Entry<AxiomType<? extends OWLAxiom>, Integer> entry : axiomType2CountMap.entrySet()){ - if((i % entry.getValue()) == 0){ - man.addAxiom(cohaerentOntology, axiomType2AxiomsMap.get(entry.getKey()).remove(0)); + if(isCohaerent){ + for(Entry<AxiomType<? extends OWLAxiom>, Integer> entry : axiomType2CountMap.entrySet()){ + if((i % entry.getValue()) == 0){ + OWLAxiom ax = axiomType2AxiomsMap.get(entry.getKey()).remove(0); + man.addAxiom(cohaerentOntology, ax); + isCohaerent = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom().isEmpty(); + if(!isCohaerent){ + man.removeAxiom(cohaerentOntology, ax); + break; + } + } } } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2011-12-07 21:50:18
|
Revision: 3487 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3487&view=rev Author: lorenz_b Date: 2011-12-07 21:50:12 +0000 (Wed, 07 Dec 2011) Log Message: ----------- Started greedy algorithm implementation to build cohaerent ontology. Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java Added: trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java 2011-12-07 21:50:12 UTC (rev 3487) @@ -0,0 +1,99 @@ +package org.dllearner.utilities; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.apache.commons.collections15.BidiMap; +import org.apache.commons.collections15.bidimap.DualHashBidiMap; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.AxiomType; +import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.reasoner.OWLReasoner; + +public class GreedyCohaerencyExtractor { + + public GreedyCohaerencyExtractor() { + // TODO Auto-generated constructor stub + } + + public OWLOntology getCoharentOntology(OWLOntology ontology) throws OWLOntologyCreationException{ + BidiMap<AxiomType<? extends OWLAxiom>, Integer> axiomType2CountMap = getAxiomTypeCount(ontology); + + Map<AxiomType<? extends OWLAxiom>, List<OWLAxiom>> axiomType2AxiomsMap = new HashMap<AxiomType<? extends OWLAxiom>, List<OWLAxiom>>(); + for(AxiomType<? extends OWLAxiom> type : AxiomType.AXIOM_TYPES){ + axiomType2AxiomsMap.put(type, new ArrayList<OWLAxiom>(ontology.getAxioms(type))); + } + + int lcm = lcm(new ArrayList<Integer>(axiomType2CountMap.values())); + + OWLOntologyManager man = OWLManager.createOWLOntologyManager(); + OWLOntology cohaerentOntology = man.createOntology(); + + for(int i = 0; i < lcm; i++){ + for(Entry<AxiomType<? extends OWLAxiom>, Integer> entry : axiomType2CountMap.entrySet()){ + if((i % entry.getValue()) == 0){ + man.addAxiom(cohaerentOntology, axiomType2AxiomsMap.get(entry.getKey()).remove(0)); + } + } + } + return cohaerentOntology; + } + + public OWLOntology getCoharentOntology(OWLReasoner reasoner) throws OWLOntologyCreationException{ + return getCoharentOntology(reasoner.getRootOntology()); + } + + private BidiMap<AxiomType<? extends OWLAxiom>, Integer> getAxiomTypeCount(OWLOntology ontology){ + BidiMap<AxiomType<? extends OWLAxiom>, Integer> axiomType2CountMap = new DualHashBidiMap<AxiomType<? extends OWLAxiom>, Integer>(); + + for(AxiomType<? extends OWLAxiom> type : AxiomType.AXIOM_TYPES){ + axiomType2CountMap.put(type, ontology.getAxiomCount(type)); + } + + return axiomType2CountMap; + } + + private int lcm(int x1,int x2) { + if(x1<=0 || x2<=0) { + throw new IllegalArgumentException("Cannot compute the least "+ + "common multiple of two "+ + "numbers if one, at least,"+ + "is negative."); + } + int max,min; + if (x1>x2) { + max = x1; + min = x2; + } else { + max = x2; + min = x1; + } + for(int i=1; i<=min; i++) { + if( (max*i)%min == 0 ) { + return i*max; + } + } + throw new Error("Cannot find the least common multiple of numbers "+ + x1+" and "+x2); + } + + private int lcm(List<Integer> values) { + if(values.size() == 1){ + return values.get(0); + } else { + List<Integer> list = new ArrayList<Integer>(); + list.add(lcm(values.get(0), values.get(1))); + if(values.size() > 2){ + list.addAll(values.subList(2, values.size())); + } + return lcm(list); + } + } + +} Property changes on: trunk/components-core/src/main/java/org/dllearner/utilities/GreedyCohaerencyExtractor.java ___________________________________________________________________ Added: svn:mime-type + text/plain This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <seb...@us...> - 2011-12-07 16:20:49
|
Revision: 3486 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3486&view=rev Author: sebastianwtr Date: 2011-12-07 16:20:39 +0000 (Wed, 07 Dec 2011) Log Message: ----------- [tbsl exploration] Split Interation1 into two different cases and did the "simple" case . Modified the SQLite functions. And changed other functions to get it all running. Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/mySQLDictionary.java Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java 2011-12-07 16:15:38 UTC (rev 3485) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/GetRessourcePropertys.java 2011-12-07 16:20:39 UTC (rev 3486) @@ -9,34 +9,62 @@ import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; import org.dllearner.algorithm.tbsl.exploration.sax.MySaxParser; public class GetRessourcePropertys { - public ArrayList<String> getPropertys(String element) throws IOException{ - try{ - sendServerPropertyRequest(element); - return do_parsing("answer_property"); - } catch (Exception e){ - return null; - } + public HashMap<String,String> getPropertys(String element, String side) throws IOException{ + + return sendServerPropertyRequest(element,side); + } + /** * Get an uri and saves the properties of this resource * @param vergleich + * @return * @throws IOException */ - private void sendServerPropertyRequest(String vergleich) throws IOException{ + private HashMap<String,String> sendServerPropertyRequest(String vergleich, String side) throws IOException{ + /* + * + * For the second Iteration, I can just add the sparql property here. + */ - String bla123 = vergleich; + /* + * + * SELECT DISTINCT ?p WHERE {<http://dbpedia.org/resource/Berlin> ?y ?p.} für Berlin links der Property + * PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT DISTINCT ?s ?p WHERE {<http://dbpedia.org/resource/Berlin> ?p ?y. ?p rdfs:label ?s.} + * + * SELECT DISTINCT ?p WHERE {?y ?p <http://dbpedia.org/resource/Berlin>.} für Berlin rechts der Property + * PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT DISTINCT ?s ?p WHERE {?y ?p <http://dbpedia.org/resource/Berlin>. ?p rdfs:label ?s.} + * http://greententacle.techfak.uni-bielefeld.de:5171/sparql?default-graph-uri=&query=PREFIX+rdfs%3A+%3Chttp%3A%2F%2Fwww.w3.org%2F2000%2F01%2Frdf-schema%23%3E+SELECT+DISTINCT+%3Fs+%3Fp+WHERE+{%3Fy+%3Fp+%3Chttp%3A%2F%2Fdbpedia.org%2Fresource%2FBerlin%3E.+%3Fp+rdfs%3Alabel+%3Fs.}&format=text%2Fhtml&debug=on&timeout= + */ + + String vergleichorig = vergleich; + /*String bla123 = vergleich; //to get only the name bla123=bla123.replace("http://dbpedia.org/resource/Category:",""); bla123=bla123.replace("http://dbpedia.org/resource/",""); - vergleich=bla123; - String tmp="http://greententacle.techfak.uni-bielefeld.de:5171/sparql?default-graph-uri=&query=PREFIX+rdfs%3A+%3Chttp%3A%2F%2Fwww.w3.org%2F2000%2F01%2Frdf-schema%23%3E%0D%0APREFIX+res%3A+%3Chttp%3A%2F%2Fdbpedia.org%2Fresource%2F%3E%0D%0A%0D%0ASELECT+DISTINCT+%3Fp+%3Fl+WHERE++{%0D%0A+{+res%3A"+vergleich+"+%3Fp+%3Fo+.+}%0D%0A+UNION%0D%0A+{+%3Fs+%3Fp+res%3A"+vergleich+"+.+}%0D%0A+{+%3Fp+rdfs%3Alabel+%3Fl+.+}%0D%0A}%0D%0A&format=text%2Fhtml&debug=on&timeout="; + vergleich=bla123;*/ + + String tmp_left="http://greententacle.techfak.uni-bielefeld.de:5171/sparql?default-graph-uri=&query="+createServerRequest("PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT DISTINCT ?s ?p WHERE {?y ?p <"+vergleichorig+">. ?p rdfs:label ?s.}")+"%0D%0A&format=text%2Fhtml&debug=on&timeout="; + //System.out.println("property right!!! : " +tmp_right); + String tmp_right="http://greententacle.techfak.uni-bielefeld.de:5171/sparql?default-graph-uri=&query="+createServerRequest("PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> SELECT DISTINCT ?s ?p WHERE {<"+vergleichorig+"> ?p ?y. ?p rdfs:label ?s.}")+"%0D%0A&format=text%2Fhtml&debug=on&timeout="; + + String verarbeitungsstring=null; + if(side.contains("RIGHT")) verarbeitungsstring=tmp_right; + if(side.contains("LEFT")) verarbeitungsstring=tmp_left; + + //just in case..... + if(!side.contains("LEFT") && !side.contains("RIGHT")) verarbeitungsstring=tmp_left; + + //String verarbeitungsstring="http://greententacle.techfak.uni-bielefeld.de:5171/sparql?default-graph-uri=&query=PREFIX+rdfs%3A+%3Chttp%3A%2F%2Fwww.w3.org%2F2000%2F01%2Frdf-schema%23%3E%0D%0APREFIX+res%3A+%3Chttp%3A%2F%2Fdbpedia.org%2Fresource%2F%3E%0D%0A%0D%0ASELECT+DISTINCT+%3Fp+%3Fl+WHERE++{%0D%0A+{+res%3A"+vergleich+"+%3Fp+%3Fo+.+}%0D%0A+UNION%0D%0A+{+%3Fs+%3Fp+res%3A"+vergleich+"+.+}%0D%0A+{+%3Fp+rdfs%3Alabel+%3Fl+.+}%0D%0A}%0D%0A&format=text%2Fhtml&debug=on&timeout="; URL url; InputStream is; InputStreamReader isr; @@ -45,7 +73,7 @@ String result=""; try { - url = new URL(tmp); + url = new URL(verarbeitungsstring); is = url.openStream(); isr = new InputStreamReader(is); r = new BufferedReader(isr); @@ -60,9 +88,29 @@ System.out.println("Can not connect"); } - FileWriter w = new FileWriter("answer_property"); + /* FileWriter w = new FileWriter("answer_property"); w.write(result); w.close(); + */ + + HashMap<String,String> hm = new HashMap(); + result=result.replace("<th>s</th>",""); + result=result.replace("<th>p</th>",""); + result=result.replace("<table class=\"sparql\" border=\"1\">",""); + result=result.replace("<tr>",""); + result=result.replace("</tr>",""); + result=result.replace("\n", ""); + result=result.replace(" ", ""); + result=result.replaceFirst("<td>", ""); + + String[] tmp_array=result.split("</td><td>"); + + for(int i =1; i<=tmp_array.length-2;i=i+2) { + hm.put(tmp_array[i-1].toLowerCase(), tmp_array[i]); + //System.out.println(tmp_array[i-1].toLowerCase() + " " +tmp_array[i]); + } + + return hm; } @@ -85,4 +133,47 @@ return indexObject; } + + + + private String createServerRequest(String query){ + String anfrage=null; + anfrage=removeSpecialKeys(query); + anfrage=anfrage.replace("<","<"); + anfrage=anfrage.replace("%gt;",">"); + anfrage=anfrage.replace("&","&"); + //anfrage=anfrage.replaceAll("#>","%23%3E%0D%0A%"); + anfrage=anfrage.replace("#","%23"); + anfrage=anfrage.replace(" ","+"); + anfrage=anfrage.replace("/","%2F"); + anfrage=anfrage.replace(":","%3A"); + anfrage=anfrage.replace("?","%3F"); + anfrage=anfrage.replace("$","%24"); + //anfrage=anfrage.replaceAll("F>+","F%3E%0D%0A"); + anfrage=anfrage.replace(">","%3E"); + anfrage=anfrage.replace("<","%3C"); + anfrage=anfrage.replace("\"","%22"); + anfrage=anfrage.replace("\n","%0D%0A%09"); + anfrage=anfrage.replace("%%0D%0A%09","%09"); + anfrage=anfrage.replace("=","%3D"); + anfrage=anfrage.replace("@","%40"); + anfrage=anfrage.replace("&","%26"); + anfrage=anfrage.replace("(","%28"); + anfrage=anfrage.replace(")","%29"); + anfrage=anfrage.replace("%3E%0D%0A%25","%3E"); + //anfrage=anfrage.replaceAll("\n",".%0D%0A%09"); + return anfrage; + } + + private String removeSpecialKeys(String query){ + query=query.replace("\\",""); + //query=query.replaceAll("\a",""); + query=query.replace("\b",""); + query=query.replace("\f",""); + query=query.replace("\r",""); + query=query.replace("\t",""); + // query=query.replaceAll("\v",""); + return query; + } + } Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java 2011-12-07 16:15:38 UTC (rev 3485) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java 2011-12-07 16:20:39 UTC (rev 3486) @@ -15,9 +15,13 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Map.Entry; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import java.util.Set; import net.didion.jwnl.JWNLException; +import net.didion.jwnl.data.POS; import org.dllearner.algorithm.tbsl.exploration.sax.ParseXmlHtml; import org.dllearner.algorithm.tbsl.nlp.WordNet; @@ -57,18 +61,15 @@ //Konstruktor public SparqlObject() throws MalformedURLException, ClassNotFoundException, SQLException{ - wordnet = new WordNet(); - //hm=hm_new; - /*hm=ParseXmlHtml.parse_xml("/home/swalter/workspace/qaldEntity2",hm); - hm=ParseXmlHtml.parse_xml("/home/swalter/workspace/qaldEntity1",hm);*/ + this.wordnet = new WordNet(); System.out.println("Loading SPARQL Templator"); - btemplator = new BasicTemplator(); - templator = new Templator(); + this.btemplator = new BasicTemplator(); + this.templator = new Templator(); System.out.println("Loading SPARQL Templator Done\n"); - System.out.println("Start Indexing Wikipedia URI's"); - myindex = new mySQLDictionary(); + System.out.println("Start Indexing"); + this.myindex = new mySQLDictionary(); - System.out.println("Done:Indexing Wikipedia URI's"); + System.out.println("Done:Indexing"); setExplorationdepthwordnet(1); //eigentlich immer mit 0 initialisieren setIterationdepth(1); @@ -128,17 +129,23 @@ System.out.println("The Questionparsing took "+ (endParsingTime-startParsingTime)+ " ms"); ArrayList<String> final_answer = new ArrayList<String>(); - //if(!lstquery.isEmpty()){ + if(lstquery.isEmpty()){ + saveNotParsedQuestions(question); + } //for each querry + //TODO: Add function that no qery is send to the server, if querylist==null for(ArrayList<String> querylist : lstquery){ - /* - * ################################################################################################# - */ - //only testfunction to save the generated queries in the tmp-folder + boolean startIterating=true; String query=""; - query=querylist.get(0).toString(); - if(getIterationdepth()==-1){ + if(querylist.get(0).contains("ERROR"))startIterating=false; + else query=querylist.get(0).toString(); + + //TODO: Somewhere is an error, because sometimes there is an double _ a __ and thats not allowed. + //fixing it now with an replace of "__" to "" + query=query.replace("__", ""); + + if(getIterationdepth()==-1&&startIterating==true){ String tmp = new String(); String s = null; BufferedReader in = null; @@ -181,7 +188,7 @@ * ################################################################################################# */ //Iteration 0 - if(getIterationdepth()==0){ + if(getIterationdepth()==0&&startIterating==true){ String tmp = new String(); String s = null; BufferedReader in = null; @@ -208,170 +215,26 @@ } String answer; answer=sendServerQuestionRequest(query); - final_answer.add(answer); - /*System.out.println(query); - if (query=="" || query==" "||query.length()==0) answer="Could not parse"; - System.out.println("Antwort: " + answer); - String out=tmp + "\n" + "Question: "+question + "\n"+"Query: " + query +"\n Anwer: "+answer+"\n\n##############################"; - - BufferedWriter outfile = new BufferedWriter( - new OutputStreamWriter( - new FileOutputStream( "/tmp/answer.txt" ) ) ); - - outfile.write(out); - outfile.close(); */ + final_answer.add("Begin:\n"+query +"\n"+answer+" \n End"); + } /* * ################################################################################################# */ //Iterration 1 - if(getIterationdepth()==1){ + if(getIterationdepth()==1&&startIterating==true){ - //asking server - String answer; - answer=sendServerQuestionRequest(query); - System.out.println(query); - //if Emty answer, get properties an look up the right property with levensthein - if(answer.contains("EmtyAnswer")){ - //TODO: get all information from the query - //TODO: maybe put the query + information in an array list of arraylist. each arraylist contains the query, the variables and the uris. Then iterate over the List and get the query for sending to server - String rescource=""; - - //get the resource of the query. always the last Item in the array! - //Funktioniert! - String resource_tmp=""; - int tmp_length=querylist.size(); - resource_tmp=querylist.get(tmp_length-1); - String[] array_tmp = resource_tmp.split(":"); - rescource=array_tmp[1]; - - - //the property we are looking for is always the second last in the array! - //Funktioniert! - String property_to_compare_with=""; - tmp_length=querylist.size(); - //second last - property_to_compare_with=querylist.get(tmp_length-2); - array_tmp = property_to_compare_with.split(":"); - property_to_compare_with=array_tmp[1]; - //System.out.println("property_to_compare_with: "+property_to_compare_with); - - - //contains uri AND string, every second is the string - //Funktioniert - ArrayList<String> properties = new ArrayList<String>(); - GetRessourcePropertys property = new GetRessourcePropertys(); - Boolean goOnAfterProperty = true; - try { - //using uri now, not the string - //properties=property.getPropertys(hm.get(rescource.toLowerCase())); - properties=property.getPropertys(getUriFromIndex(rescource.toLowerCase(),0)); - if (properties==null){ - final_answer.add("Error in getting Properties\n"); - goOnAfterProperty=false; - } - //System.out.println(properties); - } catch (IOException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - final_answer.add("Error in getting Properties\n"); - goOnAfterProperty=false; - - } - if(goOnAfterProperty==true){ - //property_to_compare_with mit der Liste der propertys vergleichen, und wenn der normalisierte Wert >= LvenstheinMin ist, einbauen und neue query erzeugen. - Levenshtein levensthein = new Levenshtein(); - ArrayList<String> new_queries= new ArrayList<String>(); - for(int i =1; i<=properties.size()-2;i=i+2){ - //double tmp=levensthein.nld(property_to_compare_with.toLowerCase(), properties.get(i).toLowerCase()); - double tmp=levensthein.computeLevenshteinDistance(property_to_compare_with.toLowerCase(), properties.get(i).toLowerCase()); - //create new query - //System.out.println(tmp); - //if(tmp>=LvenstheinMin){ - if(tmp<=3.0){ - //System.out.println(tmp); - //alte property uri mit neuer ersetzen: - String query_tmp=query; - //query_tmp=query_tmp.replace(hm.get(property_to_compare_with.toLowerCase()),properties.get(i-1)); - query_tmp=query_tmp.replace(getUriFromIndex(property_to_compare_with.toLowerCase(),1),properties.get(i-1)); - //System.out.println("hm.get(property_to_compare_with.toLowerCase(): " + hm.get(property_to_compare_with.toLowerCase())); - new_queries.add(query_tmp); - } - - } - - System.out.println("Start Iterating Wordnet with "+property_to_compare_with+" and deept of "+explorationdepthwordnet); - ArrayList<String> semantics=new ArrayList<String>(); - ArrayList<String> tmp_semantics=new ArrayList<String>(); - ArrayList<String> result_SemanticsMatchProperties=new ArrayList<String>(); - semantics.add(property_to_compare_with); - tmp_semantics=semantics; - Boolean goOnAfterWordnet = true; - for(int i=0;i<=explorationdepthwordnet;i++){ - - try { - tmp_semantics=getSemantics(tmp_semantics); - if (tmp_semantics==null){ - goOnAfterWordnet=false; - final_answer.add("Error in searching Wordnet\n"); - } - else{ - //each word only one time - for(String k : tmp_semantics){ - if(!semantics.contains(k)) semantics.add(k); - } - } - - } catch (IOException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - goOnAfterWordnet=false; - final_answer.add("Error in searching Wordnet\n"); - - } - - } - - if(goOnAfterWordnet==true){ - // ArrayList<String> new_queries= new ArrayList<String>(); - - //TODO: Try, if it works, if you use only one loop: (b.lowerCase).contains(properties.get(h)) - for(int h=1;h<properties.size()-2;h=h+2){ - for(String b : semantics){ - //System.out.println(properties.get(h)); - //System.out.println(b); - if(properties.get(h).contains(b.toLowerCase())){ - if(!result_SemanticsMatchProperties.contains(properties.get(h))){ - //create new query - result_SemanticsMatchProperties.add(properties.get(h)); - String query_tmp=query; - - //query_tmp=query_tmp.replace(hm.get(property_to_compare_with.toLowerCase()),properties.get(h-1)); - query_tmp=query_tmp.replace(getUriFromIndex(property_to_compare_with.toLowerCase(),1),properties.get(h-1)); - //System.out.println("hm.get(property_to_compare_with.toLowerCase(): " + hm.get(property_to_compare_with.toLowerCase())); - new_queries.add(query_tmp); - } - } - } - } - - for(String bla : new_queries){ - String answer_tmp; - answer_tmp=sendServerQuestionRequest(bla); - if(!answer_tmp.contains("EmtyAnswer")){ - final_answer.add(answer_tmp); - } - } - } - } - } + //4, because of query + three conditions for the simple case + if(querylist.size()==4)final_answer=simpleIteration1Case(querylist, query); + //if we have more conditions, we need to change the way of replacing the uris got from wordnet etc + } /* * ################################################################################################# */ //Iterration 2 - if(getIterationdepth()==2){ + if(getIterationdepth()==2&&startIterating==true){ } } @@ -403,9 +266,21 @@ String out=""; for(String answer : final_answer){ + //only answered question + // if(!answer.contains("Error in searching Wordnet with word") && !answer.contains("EmtyAnswer")&& !answer.contains("Error in getting Properties"))out=out+ "\n"+answer+"\n"; + + /* + //only questions with wordnet error + if(answer.contains("Error in searching Wordnet with word"))out=out+ "\n"+answer+"\n"; - out=out+ "\n"+answer+"\n"; - + //only questions with emty answers + if(answer.contains("EmtyAnswer"))out=out+ "\n"+answer+"\n"; +*/ + //only questions with Error in Properties + if(answer.contains("Error in getting Properties"))out=out+ "\n"+answer+"\n"; + + + } System.out.println(question); System.out.println(out); @@ -416,13 +291,227 @@ outfile.write(tmp+"\n"+question+" :\n"+out); outfile.close(); } - - // string=string.replaceAll("?", ""); - //create_Sparql_query_old(string); - - // } + + /** + * Is the function for the Case, you are in Iteration one and have only one triple of condition (s,p,o). + * @param querylist + * @param query + * @return a list with answers from the Server + * @throws SQLException + * @throws JWNLException + */ + private ArrayList<String> simpleIteration1Case(ArrayList<String> querylist, String query) throws SQLException, + JWNLException { + //asking server + String answer; + ArrayList<String> final_answer=new ArrayList<String>(); + + /* + * First try the original query on the server. If that doesnt work, try it with Iteration + */ + answer=sendServerQuestionRequest(query); + if(answer.contains("EmtyAnswer")){ + + String resource=""; + String property_to_compare_with=""; + String sideOfProperty="LEFT"; + + + int tmpcounter=0; + for(String s : querylist){ + //we dont need the first one, because thats the query itself + tmpcounter=tmpcounter+1; + if(tmpcounter>=1){ + if(s.contains("LEFT")){ + sideOfProperty="LEFT"; + resource=s.replace("LEFT",""); + } + if(s.contains("RIGHT")){ + sideOfProperty="RIGHT"; + resource=s.replace("RIGHT",""); + } + if(s.contains("PROPERTY")){ + property_to_compare_with=s.replace("PROPERTY",""); + } + + } + } + System.out.println("Property to compare:: "+ property_to_compare_with); + System.out.println("Resource: "+ resource); + //contains uri AND string, every second is the string + HashMap<String,String> properties = new HashMap<String, String>(); + GetRessourcePropertys property = new GetRessourcePropertys(); + Boolean goOnAfterProperty = true; + try { + /* + * TODO: Have to check now, if we need a right Property or a left one + */ + properties=property.getPropertys(getUriFromIndex(resource.toLowerCase(),0),sideOfProperty); + if (properties==null){ + //final_answer.add("Error in getting Properties\n"); + + final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + goOnAfterProperty=false; + } + //System.out.println(properties); + } catch (IOException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + + final_answer.add("Begin:\n"+query +"\nError in getting Properties \n End"); + goOnAfterProperty=false; + + } + if(goOnAfterProperty==true){ + //property_to_compare_with mit der Liste der propertys vergleichen, und wenn der normalisierte Wert >= LvenstheinMin ist, einbauen und neue query erzeugen. + ArrayList<String> new_queries= new ArrayList<String>(); + for (Entry<String, String> entry : properties.entrySet()) { + String key = entry.getKey(); + String value = entry.getValue(); + double tmp=Levenshtein.computeLevenshteinDistance(property_to_compare_with.toLowerCase(), key); + + /* + * TODO: Implement Normalised levensthein + */ + if(tmp<=3.0){ + //alte property uri mit neuer ersetzen: + String query_tmp=query; + String test = getUriFromIndex(property_to_compare_with.toLowerCase(),1); + //query_tmp=query_tmp.replace(test,properties.get(i-1)); + query_tmp=query_tmp.replace(test,value); + new_queries.add(query_tmp); + } + + } + + System.out.println("Start Iterating Wordnet with "+property_to_compare_with+" and deept of "+explorationdepthwordnet); + ArrayList<String> semantics=new ArrayList<String>(); + ArrayList<String> tmp_semantics=new ArrayList<String>(); + ArrayList<String> result_SemanticsMatchProperties=new ArrayList<String>(); + semantics.add(property_to_compare_with); + + //first check, if there is a singular form in the wordnet dictionary.. eg children -> child + String _temp_=myindex.getWordnetHelp(property_to_compare_with); + if(_temp_==null){ + tmp_semantics=semantics; + } + else{ + semantics.clear(); + semantics.add(_temp_); + tmp_semantics=semantics; + } + Boolean goOnAfterWordnet = true; + for(int i=0;i<=explorationdepthwordnet;i++){ + + try { + tmp_semantics=getSemantics(tmp_semantics); + if (tmp_semantics==null){ + goOnAfterWordnet=false; + final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); + + } + else{ + //each word only one time + for(String k : tmp_semantics){ + if(!semantics.contains(k)) semantics.add(k); + } + } + + } catch (IOException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + goOnAfterWordnet=false; + final_answer.add("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); + + } + + } + + if(goOnAfterWordnet==true){ + + for (Entry<String, String> entry : properties.entrySet()) { + String key = entry.getKey(); + String value = entry.getValue(); + + for(String b : semantics){ + if(key.contains(b.toLowerCase())){ + if(!result_SemanticsMatchProperties.contains(key)){ + //create new query + result_SemanticsMatchProperties.add(key); + String query_tmp=query; + String test = getUriFromIndex(property_to_compare_with.toLowerCase(),1); + query_tmp=query_tmp.replace(test,value); + System.out.println("New query after wordnet: "+ query_tmp); + new_queries.add(query_tmp); + } + } + } + } + + for(String bla : new_queries){ + String answer_tmp; + answer_tmp=sendServerQuestionRequest(bla); + System.out.println("Antwort vom Server: "+answer_tmp); + final_answer.add("Begin:\n"+bla +"\n"+answer_tmp+" \n End"); + } + } + } + } + + return final_answer; + } + + + /** + * Iterates thru the conditions and returns an array, where one can see, if the Property is left or right from the resource + * @param query + * @return returns an array, where one can see, if the Property is left or right from the resource + */ + private static ArrayList<String> createLeftAndRightPropertyArray(String query){ + query=query.replace(" ", " "); + Pattern p = Pattern.compile (".*\\{(.*\\<http.*)\\}.*"); + Matcher m = p.matcher (query); + ArrayList<String> lstquery = new ArrayList<String>(); + while (m.find()) { + String tmp= m.group(1); + tmp=tmp.replace("http://dbpedia.org/resource/","").replace("http://dbpedia.org/property/", "").replace("http://dbpedia.org/ontology/", ""); + + //split on . for sign for end of conditions + String[] firstArray=tmp.split("\\."); + for(String i : firstArray){ + + String[] secondArray=i.split(" "); + //always in three counts + int counter=0; + for(String j : secondArray){ + counter=counter+1; + //only one condition + if(secondArray.length%3==0){ + if(counter==1&&j.contains("<")){ + //position of Property is right + lstquery.add("RIGHT"+j.replace("<", "").replace(">","")); + } + else if(counter==3&&j.contains("<")){ + //position of Property is left + lstquery.add("RIGHT"+j.replace("<", "").replace(">","")); + } + else if(counter==2){ + lstquery.add("PROPERTY"+j.replace("<", "").replace(">","")); + } + + else if(j.contains("?")) lstquery.add("VARIABLE"); + } + if(counter==0)counter=0; + + + } + } + } + + return lstquery; + } /** * Method gets a String and takes the information from the templator to creat a Sparql query. @@ -435,11 +524,6 @@ Set<BasicQueryTemplate> querytemps = btemplator.buildBasicQueries(question); for (BasicQueryTemplate temp : querytemps) { - /*System.out.println("temp.getQt();" + temp.getQt()); - System.out.println("temp.getSelTerms();" + temp.getSelTerms()); - System.out.println("temp.getVariablesAsStringList();" + temp.getVariablesAsStringList()); - System.out.println("temp.getConditions();" + temp.getConditions()); - System.out.println("temp.getSlots();" + temp.getSlots());*/ ArrayList<String> lstquerynew = new ArrayList<String>(); ArrayList<String> lstquerupsidedown = new ArrayList<String>(); String query; @@ -454,7 +538,7 @@ for(SPARQL_Filter tmp : temp.getFilters()) filters=filters+tmp+" "; //System.out.println("\n"); System.out.println("\n"); - query="PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> "+temp.getQt().toString()+" "+selTerms+" WHERE { "+ conditions.replace("--","") + "}"+filters; + query="PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> "+temp.getQt().toString()+" "+selTerms+" WHERE {"+ conditions.replace("--","") + filters+"}"; String conditions_new = ""; for(Path condition: temp.getConditions()){ @@ -478,37 +562,46 @@ System.out.println("Conditions_new: " + conditions_new); - String query_upside_down = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> "+temp.getQt().toString()+" "+selTerms+" WHERE { "+ conditions_new.replace("--","") + "}"+filters; + String query_upside_down = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> "+temp.getQt().toString()+" "+selTerms+" WHERE {"+ conditions_new.replace("--","") +filters+ "}"; String[] slots= null; + int slotcounter=1; for(Slot slot : temp.getSlots()){ - String tmp= slot.toString(); - tmp= tmp.replace("UNSPEC",""); - tmp= tmp.replace("RESOURCE",""); - tmp= tmp.replace("{",""); - tmp= tmp.replace("}",""); - tmp=tmp.replace(" ",""); + //see below + slotcounter=slotcounter+1; + + + String slotstring=slot.toString().replace("UNSPEC","").replace("RESOURCE","").replace("{","").replace("}",""); + slotstring=slotstring.replace(" ",""); //System.out.println(tmp); //damit auch wirklich nur ?y und nicht ?y0 ersetzt wird, einfach nach "?y " suchen. - String[] array = tmp.split(":"); + String[] array = slotstring.split(":"); String replace; if(array[0].length()<2)replace = "?"+array[0]+" "; else replace="?"+array[0]; - //System.out.println("replace: " + replace); - //hier dann den hm wert von array[1] eintragen + - - //String hm_result=hm.get(array[1].toLowerCase()); - String hm_result=getUriFromIndex(array[1].toLowerCase(),0); + //TODO: Hotfix: get rid of " PROPERTY " + String _ThingGettingURIfor_=array[1]; + _ThingGettingURIfor_=_ThingGettingURIfor_.replace(" PROPERTY ","").toLowerCase(); + String hm_result=getUriFromIndex(_ThingGettingURIfor_,0); try { if(hm_result.contains("Category:")) hm_result=hm_result.replace("Category:",""); } catch ( Exception e ) { - //System.out.println( "Das war keine Zahl!" ); + } + /*always the middle slot is the property + * so count and always take the second of third to become a property + */ + if(slotcounter%2==0){ + hm_result=getUriFromIndex(_ThingGettingURIfor_,1); + } + //set back to 0 to start new + if(slotcounter==3) slotcounter=0; query=query.replace(replace, "<"+hm_result+">"); query_upside_down=query_upside_down.replace(replace, "<"+hm_result+">"); @@ -516,23 +609,27 @@ lstquerupsidedown.add(query_upside_down); lstquerynew.add(query); - //slots hinzufügen - for(Slot slot : temp.getSlots()){ - String tmp= slot.toString(); - tmp= tmp.replace("UNSPEC",""); - tmp= tmp.replace("RESOURCE",""); - tmp= tmp.replace("{",""); - tmp= tmp.replace("}",""); - tmp=tmp.replace(" ",""); - lstquerupsidedown.add(tmp); - lstquerynew.add(tmp); + + + ArrayList<String> lsttmp=createLeftAndRightPropertyArray(query); + //if its lower than three, we dont have any conditions and dont need to check it. + //also if the size%3 isnt 0, than something else is wrong and we dont need to test the query + if(lsttmp.size()>=3&&lsttmp.size()%3==0)for(String i : lsttmp) lstquerynew.add(i); + else{ + lstquerynew.clear(); + lstquerynew.add("ERROR"); } - //System.out.println("Query: "+query); - /*lstquery.add(query); - lstquery.add(query_upside_down);*/ + + lsttmp.clear(); + lsttmp=createLeftAndRightPropertyArray(query_upside_down); + if(lsttmp.size()>=3&&lsttmp.size()%3==0)for(String i : lsttmp) lstquerupsidedown.add(i); + else{ + lstquerupsidedown.clear(); + lstquerupsidedown.add("ERROR"); + } + lstquery.add(lstquerynew); lstquery.add(lstquerupsidedown); - } return lstquery; @@ -540,6 +637,41 @@ + private void saveNotParsedQuestions(String question) throws IOException{ + BufferedReader in = null; + + String tmp=""; + // Lies Textzeilen aus der Datei in einen Vector: + try { + in = new BufferedReader( + new InputStreamReader( + new FileInputStream( "/tmp/notParsedQuestions" ) ) ); + String s; + while( null != (s = in.readLine()) ) { + tmp=tmp+"\n"+s; + } + } catch( FileNotFoundException ex ) { + } catch( Exception ex ) { + System.out.println( ex ); + } finally { + if( in != null ) + try { + in.close(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + + String out=""; + BufferedWriter outfile = new BufferedWriter( + new OutputStreamWriter( + new FileOutputStream( "/tmp/notParsedQuestions" ) ) ); + + outfile.write(tmp+"\n"+question); + outfile.close(); + } + /** * * @param string @@ -549,27 +681,31 @@ */ private String getUriFromIndex(String string, int fall) throws SQLException{ String result=null; + //just to be sure its only 0 or 1 + if(fall!=0 && fall!=1) fall=0; if(fall==0){ - //result=hm.get(string.toLowerCase()); - //if(result==null)result=myindex.getURI(string); result=myindex.getResourceURI(string.toLowerCase()); if(result==null)result=myindex.getPropertyURI(string.toLowerCase()); } if(fall==1){ - /*result=hm.get(string.toLowerCase()); - if(result==null)result=myindex.getURI(string); - if(result==null)result="http://dbpedia.org/property/"+string.toLowerCase();*/ - //should be alway property an not resource - //result=result.replace("resource", "property"); result=myindex.getPropertyURI(string.toLowerCase()); if(result==null){ result=myindex.getResourceURI(string.toLowerCase()); - result=result.replace("resource", "property"); + if(result!=null) result=result.replace("resource", "property"); } } - - if(result==null) return "http://dbpedia.org/property/"+string.toLowerCase(); + String tmp=""; + tmp=string.toLowerCase(); + tmp=tmp.replace("property",""); + tmp=tmp.replace(" ", "_"); + if(result==null) { + if(fall==1)return "http://dbpedia.org/property/"+tmp; + if(fall==0)return "http://dbpedia.org/resource/"+tmp; + else{ + return result; + } + } else return result; } @@ -578,19 +714,98 @@ private static ArrayList<String> getSemantics (ArrayList<String> semantics) throws IOException, JWNLException { ArrayList<String> result = new ArrayList<String>(); + //result.clear(); + //try{ try{ for(String id :semantics){ - List<String> array =wordnet.getRelatedNouns(id); - for(String i:array){ - if(!result.contains(i))result.add(i); + List<String> array_relatedNouns=null; + List<String> array_bestsynonyms=null; + List<String> array_siterterms=null; + //array.clear(); + System.out.println("Wordnet Word: "+id); + array_relatedNouns =wordnet.getRelatedNouns(id); + + array_bestsynonyms=wordnet.getBestSynonyms(POS.NOUN, id); + + array_siterterms=wordnet.getSisterTerms(POS.NOUN, id); + + if(array_relatedNouns!=null){ + for(String i:array_relatedNouns){ + if(!result.contains(i))result.add(i); + } } + if(array_bestsynonyms!=null){ + for(String i:array_bestsynonyms){ + if(!result.contains(i))result.add(i); + } + } + if(array_siterterms!=null){ + for(String i:array_siterterms){ + if(!result.contains(i))result.add(i); + } + } + } - return result; - } catch (Exception e) { - return null; } + catch(Exception e){ + if(result.isEmpty()) return null; + } + + if(!result.isEmpty()) return result; + else{ + //System.out.println("Didnt find ") + /*this is the case, if the first time nothing was found. + * but sometimes wordnet doesnt find anything e.g. die place... bzt you have also die and place + * so we try to find the seperate words and test them as well + */ + try{ + for(String id :semantics){ + String[] tmp_array=id.split(" "); + if(tmp_array.length>=2){ + for(String tmp : tmp_array){ + List<String> array_relatedNouns=null; + List<String> array_bestsynonyms=null; + List<String> array_siterterms=null; + //array.clear(); + //System.out.println("Wordnet Word: "+tmp); + array_relatedNouns =wordnet.getRelatedNouns(tmp); + + array_bestsynonyms=wordnet.getBestSynonyms(POS.NOUN, tmp); + + array_siterterms=wordnet.getSisterTerms(POS.NOUN, tmp); + + if(array_relatedNouns!=null){ + for(String i:array_relatedNouns){ + if(!result.contains(i))result.add(i); + } + } + if(array_bestsynonyms!=null){ + for(String i:array_bestsynonyms){ + if(!result.contains(i))result.add(i); + } + } + if(array_siterterms!=null){ + for(String i:array_siterterms){ + if(!result.contains(i))result.add(i); + } + } + + } + } + + } + } + catch(Exception e){ + if(result.isEmpty()) return null; + } + + } + + if(!result.isEmpty()) return result; + else return null; + // else{ return result;} } Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/mySQLDictionary.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/mySQLDictionary.java 2011-12-07 16:15:38 UTC (rev 3485) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/mySQLDictionary.java 2011-12-07 16:20:39 UTC (rev 3486) @@ -21,56 +21,13 @@ conn = DriverManager.getConnection("jdbc:sqlite::memory:"); createIndexPropertys(); createIndexResource(); + createWordnetHelp(); //optional!! //createIndexWikipedia(); } -private String createSimpleHashKey(String string){ - string = string.replace("!",""); - string = string.replace(":",""); - string = string.replace("/",""); - string = string.replace("\\",""); - string = string.replace("?",""); - string = string.replace(":",""); - - string = string.replace("a","1"); - string = string.replace("b","2"); - string = string.replace("c","3"); - string = string.replace("d","4"); - string = string.replace("e","5"); - string = string.replace("f","6"); - string = string.replace("g","7"); - string = string.replace("h","8"); - string = string.replace("i","9"); - string = string.replace("j","10"); - string = string.replace("k","11"); - string = string.replace("l","12"); - string = string.replace("m","13"); - string = string.replace("n","14"); - string = string.replace("o","15"); - string = string.replace("p","16"); - string = string.replace("q","17"); - string = string.replace("r","18"); - string = string.replace("s","19"); - string = string.replace("t","20"); - string = string.replace("u","21"); - string = string.replace("v","22"); - string = string.replace("w","23"); - string = string.replace("x","24"); - string = string.replace("y","25"); - string = string.replace("z","26"); - string = string.replace("ä","0"); - string = string.replace("ö","0"); - string = string.replace("ü","0"); - string = string.replace("?","0"); - string = string.replace(" ","0"); - return string; - - -} - public String getResourceURI(String string) throws SQLException{ Statement stat = conn.createStatement(); ResultSet rs; @@ -115,7 +72,81 @@ } + public String getWordnetHelp(String string) throws SQLException{ + Statement stat = conn.createStatement(); + ResultSet rs; + try { + rs = stat.executeQuery("select singular from wordnet where plural='"+string.toLowerCase()+"';"); + return rs.getString("singular"); + } catch (Exception e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + return null; + } + + } + + private void createWordnetHelp() throws SQLException{ /*System.out.println("Start SQL test"); + Class.forName( "org.sqlite.JDBC" ); + conn = DriverManager.getConnection("jdbc:sqlite::memory:");*/ + System.out.println("start generating Wordnet Help-Function"); + Statement stat = conn.createStatement(); + stat.executeUpdate("drop table if exists wordnet;"); + stat.executeUpdate("create table wordnet (plural, singular);"); + PreparedStatement prep = conn.prepareStatement("insert into wordnet values (?, ?);"); + BufferedReader in=null; + // conn.setAutoCommit(false); + int zaehler=0; + try { + in = new BufferedReader( + new InputStreamReader( + new FileInputStream( "/home/swalter/workspace/noun.exc" ) ) ); + String s; + while( null != (s = in.readLine()) ) { + String[] tmp_array =s.split(" "); + if(tmp_array.length>=2){ + prep.setString(1, tmp_array[0]); + prep.setString(2, tmp_array[1]); + String temp=""; + if(tmp_array.length>2){ + for(int i =1;i<tmp_array.length;i++){ + temp=temp+tmp_array[i]+" "; + } + prep.setString(2, temp); + } + prep.addBatch(); + zaehler=zaehler+1; + //if(zaehler%10000==0) System.out.println(zaehler); + if(zaehler%10000==0){ + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(false); + System.out.println("done"); + } + + } + } + } catch( FileNotFoundException ex ) { + } catch( Exception ex ) { + System.out.println( ex ); + } finally { + if( in != null ) + try { + in.close(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + + conn.setAutoCommit(false); + prep.executeBatch(); + conn.setAutoCommit(true); + System.out.println("Done"); + + } + private void createIndexWikipedia() throws ClassNotFoundException, SQLException{ /*System.out.println("Start SQL test"); Class.forName( "org.sqlite.JDBC" ); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <seb...@us...> - 2011-12-07 16:15:44
|
Revision: 3485 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3485&view=rev Author: sebastianwtr Date: 2011-12-07 16:15:38 +0000 (Wed, 07 Dec 2011) Log Message: ----------- [tbsl] removed System.out Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/nlp/WordNet.java Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/nlp/WordNet.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/nlp/WordNet.java 2011-12-07 15:58:10 UTC (rev 3484) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/nlp/WordNet.java 2011-12-07 16:15:38 UTC (rev 3485) @@ -71,9 +71,9 @@ IndexWord iw = dict.getIndexWord(pos, s);//dict.getMorphologicalProcessor().lookupBaseForm(pos, s) // IndexWord iw = dict.getMorphologicalProcessor().lookupBaseForm(pos, s); if(iw != null){ - Synset[] synsets = iw.getSenses();System.out.println(synsets[0]); + Synset[] synsets = iw.getSenses(); + //System.out.println(synsets[0]); PointerTarget[] pointerArr = synsets[0].getTargets(); - System.out.println(pointerArr); } } catch (JWNLException e) { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2011-12-07 15:58:17
|
Revision: 3484 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3484&view=rev Author: lorenz_b Date: 2011-12-07 15:58:10 +0000 (Wed, 07 Dec 2011) Log Message: ----------- Set release packaging to zip and tar.gz. Modified Paths: -------------- trunk/interfaces/src/main/assemble/archive.xml Modified: trunk/interfaces/src/main/assemble/archive.xml =================================================================== --- trunk/interfaces/src/main/assemble/archive.xml 2011-12-07 15:24:10 UTC (rev 3483) +++ trunk/interfaces/src/main/assemble/archive.xml 2011-12-07 15:58:10 UTC (rev 3484) @@ -2,6 +2,7 @@ <id>archive</id> <formats> <format>tar.gz</format> + <format>zip</format> </formats> <fileSets> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2011-12-07 15:24:24
|
Revision: 3483 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3483&view=rev Author: lorenz_b Date: 2011-12-07 15:24:10 +0000 (Wed, 07 Dec 2011) Log Message: ----------- Some changes for release. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/SimpleSubclassLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java trunk/interfaces/pom.xml trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java trunk/interfaces/src/main/java/org/dllearner/cli/QuickStart.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/SimpleSubclassLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/SimpleSubclassLearner.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/SimpleSubclassLearner.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -226,7 +226,7 @@ l.setReasoner(reasoner); ConfigHelper.configure(l, "maxExecutionTimeInSeconds", 10); - l.setClassToDescribe(new NamedClass("http://dbpedia.org/ontology/AdministrativeRegion")); + l.setClassToDescribe(new NamedClass("http://dbpedia.org/ontology/Olympics")); l.init(); l.start(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -19,7 +19,9 @@ package org.dllearner.algorithms.properties; +import java.net.URL; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -36,6 +38,7 @@ import org.dllearner.core.owl.EquivalentObjectPropertiesAxiom; import org.dllearner.core.owl.ObjectProperty; import org.dllearner.kb.SparqlEndpointKS; +import org.dllearner.kb.sparql.SparqlEndpoint; import org.dllearner.learningproblems.AxiomScore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -132,5 +135,15 @@ return axioms; } + public static void main(String[] args) throws Exception{ + EquivalentObjectPropertyAxiomLearner l = new EquivalentObjectPropertyAxiomLearner(new SparqlEndpointKS(new SparqlEndpoint( + new URL("http://dbpedia.aksw.org:8902/sparql"), Collections.singletonList("http://dbpedia.org"), Collections.<String>emptyList())));//.getEndpointDBpediaLiveAKSW())); + l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/country")); + l.setMaxExecutionTimeInSeconds(10); + l.init(); + l.start(); + System.out.println(l.getCurrentlyBestEvaluatedAxioms(5, 0.75)); + } + } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -19,6 +19,8 @@ package org.dllearner.algorithms.properties; +import java.net.MalformedURLException; +import java.net.URL; import java.util.ArrayList; import org.dllearner.core.AbstractAxiomLearningAlgorithm; @@ -101,10 +103,14 @@ logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); } - public static void main(String[] args) { - IrreflexiveObjectPropertyAxiomLearner l = new IrreflexiveObjectPropertyAxiomLearner(new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveAKSW())); - l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/thumbnail")); + public static void main(String[] args) throws Exception { + SparqlEndpointKS ks = new SparqlEndpointKS(new SparqlEndpoint(new URL("http://dbpedia.aksw.org:8902/sparql"))); + + IrreflexiveObjectPropertyAxiomLearner l = new IrreflexiveObjectPropertyAxiomLearner(ks); + l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/author")); l.start(); + + System.out.println(l.getCurrentlyBestEvaluatedAxioms(10, 0.75)); } } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -176,21 +176,21 @@ } public static void main(String[] args) throws Exception{ - SparqlEndpointKS ks = new SparqlEndpointKS(new SparqlEndpoint(new URL("http://factforge.net/sparql")));//.getEndpointDBpediaLiveAKSW())); + SparqlEndpointKS ks = new SparqlEndpointKS(new SparqlEndpoint(new URL("http://dbpedia.aksw.org:8902/sparql")));//.getEndpointDBpediaLiveAKSW())); SPARQLReasoner reasoner = new SPARQLReasoner(ks); -// reasoner.prepareSubsumptionHierarchy(); + reasoner.prepareSubsumptionHierarchy(); ObjectPropertyDomainAxiomLearner l = new ObjectPropertyDomainAxiomLearner(ks); l.setReasoner(reasoner); - l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/industry")); + l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/officialLanguage")); l.setMaxExecutionTimeInSeconds(10); - l.setReturnOnlyNewAxioms(true); +// l.setReturnOnlyNewAxioms(true); l.init(); l.start(); - System.out.println(l.getCurrentlyBestEvaluatedAxioms(5)); + System.out.println(l.getCurrentlyBestEvaluatedAxioms(10, 0.75)); } } Modified: trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -28,6 +28,7 @@ import java.util.SortedSet; import java.util.TreeSet; +import org.aksw.commons.jena.CollectionResultSet; import org.dllearner.core.config.BooleanEditor; import org.dllearner.core.config.ConfigOption; import org.dllearner.core.config.IntegerEditor; @@ -190,6 +191,13 @@ queryExecution.setDefaultGraphURIs(ks.getEndpoint().getDefaultGraphURIs()); queryExecution.setNamedGraphURIs(ks.getEndpoint().getNamedGraphURIs()); +// ResultSet resultSet = null; +// try { +// resultSet = queryExecution.execSelect(); +// } catch (Exception e) { +// logger.error("Got a timeout during query execution.", e); +// resultSet = new CollectionResultSet(Collections.<String>emptyList(), Collections.<QuerySolution>emptyList()); +// } ResultSet resultSet = queryExecution.execSelect(); return resultSet; Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -80,15 +80,19 @@ StringBuilder sb = new StringBuilder(); sb.append("CONSTRUCT {\n"); sb.append("<").append(resource).append("> ").append("?p0 ").append("?o0").append(".\n"); + sb.append("?p0 a ?type0.\n"); for(int i = 1; i < depth; i++){ sb.append("?o").append(i-1).append(" ").append("?p").append(i).append(" ").append("?o").append(i).append(".\n"); + sb.append("?p").append(i).append(" ").append("a").append(" ").append("?type").append(i).append(".\n"); } sb.append("}\n"); sb.append("WHERE {\n"); sb.append("<").append(resource).append("> ").append("?p0 ").append("?o0").append(".\n"); + sb.append("?p0 a ?type0.\n"); for(int i = 1; i < depth; i++){ sb.append("OPTIONAL{\n"); sb.append("?o").append(i-1).append(" ").append("?p").append(i).append(" ").append("?o").append(i).append(".\n"); + sb.append("?p").append(i).append(" ").append("a").append(" ").append("?type").append(i).append(".\n"); } for(int i = 1; i < depth; i++){ sb.append("}"); @@ -96,7 +100,6 @@ sb.append("}\n"); sb.append("LIMIT ").append(limit).append("\n"); sb.append("OFFSET ").append(offset); - return sb.toString(); } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -514,7 +514,11 @@ // policy: returned sets are clones, i.e. can be modified // (of course we only have to clone the leafs of a class description tree) if (description instanceof NamedClass) { - return (TreeSet<Individual>) classInstancesPos.get((NamedClass) description).clone(); + if(classInstancesPos.containsKey((NamedClass) description)){ + return (TreeSet<Individual>) classInstancesPos.get((NamedClass) description).clone(); + } else { + return new TreeSet<Individual>(); + } } else if (description instanceof Negation) { if(description.getChild(0) instanceof NamedClass) { return (TreeSet<Individual>) classInstancesNeg.get((NamedClass) description.getChild(0)).clone(); Modified: trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -351,7 +351,7 @@ // we do not need the temporary set anymore and let the // garbage collector take care of it valueFrequency = null; - dataValueFrequency = null; + dataValueFrequency.clear();// = null; // System.out.println("freqDataValues: " + frequentDataValues); Modified: trunk/interfaces/pom.xml =================================================================== --- trunk/interfaces/pom.xml 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/interfaces/pom.xml 2011-12-07 15:24:10 UTC (rev 3483) @@ -15,7 +15,7 @@ </parent> <properties> - <release.name>1.0-alpha-2</release.name> + <release.name>1.0-beta-1</release.name> </properties> <profiles> @@ -202,12 +202,10 @@ <groupId>org.dllearner</groupId> <artifactId>components-core</artifactId> </dependency> - <!-- - <dependency> + <!--dependency> <groupId>org.dllearner</groupId> <artifactId>components-ext</artifactId> - </dependency> - --> + </dependency--> <!-- Added the dependency of the core tests so that they will be accessible from the tests in this component --> <dependency> @@ -279,6 +277,12 @@ <groupId>org.json</groupId> <artifactId>json</artifactId> </dependency> + + <dependency> + <groupId>commons-lang</groupId> + <artifactId>commons-lang</artifactId> + <version>2.3</version> +</dependency> <!--BEGIN Logging Dependencies--> Modified: trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -123,7 +123,7 @@ AbstractCELA la = context.getBean(AbstractCELA.class); new CrossValidation(la,lp,rs,nrOfFolds,false); } else { - knowledgeSource = context.getBean(KnowledgeSource.class); + knowledgeSource = context.getBeansOfType(KnowledgeSource.class).entrySet().iterator().next().getValue(); algorithm = context.getBean(LearningAlgorithm.class); algorithm.start(); } Modified: trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -128,6 +128,7 @@ import org.semanticweb.owlapi.io.SystemOutDocumentTarget; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLAnnotation; +import org.semanticweb.owlapi.model.OWLAnnotationProperty; import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLIndividual; @@ -223,6 +224,7 @@ AbstractReasonerComponent rcCached; private Set<OWLAxiom> learnedOWLAxioms; + private Set<EvaluatedAxiom> learnedEvaluatedAxioms; public Enrichment(SparqlEndpoint se, Entity resource, double threshold, int nrOfAxiomsToLearn, boolean useInference, boolean verbose) { this.se = se; @@ -263,6 +265,7 @@ algorithmRuns = new LinkedList<AlgorithmRun>(); learnedOWLAxioms = new HashSet<OWLAxiom>(); + learnedEvaluatedAxioms = new HashSet<EvaluatedAxiom>(); } public void start() throws ComponentInitException, IllegalArgumentException, SecurityException, InstantiationException, IllegalAccessException, InvocationTargetException, NoSuchMethodException, LearningProblemUnsupportedException, MalformedURLException { @@ -444,7 +447,7 @@ learnedAxioms.add(new EvaluatedAxiom(axiom, score)); } System.out.println(prettyPrint(learnedAxioms)); - + learnedEvaluatedAxioms.addAll(learnedAxioms); algorithmRuns.add(new AlgorithmRun(CELOE.class, learnedAxioms, ConfigHelper.getConfigOptionValues(la))); return learnedAxioms; } @@ -485,7 +488,7 @@ List<EvaluatedAxiom> learnedAxioms = learner .getCurrentlyBestEvaluatedAxioms(nrOfAxiomsToLearn, threshold); System.out.println(prettyPrint(learnedAxioms)); - + learnedEvaluatedAxioms.addAll(learnedAxioms); for(EvaluatedAxiom evAx : learnedAxioms){ learnedOWLAxioms.add(OWLAPIAxiomConvertVisitor.convertAxiom(evAx.getAxiom())); } @@ -665,7 +668,7 @@ return model; } - private OWLOntology getGeneratedOntology(){ + public OWLOntology getGeneratedOntology(){ OWLOntology ontology = null; try { OWLOntologyManager man = OWLManager.createOWLOntologyManager(); @@ -677,6 +680,31 @@ return ontology; } + public OWLOntology getGeneratedOntology(boolean withConfidenceAsAnnotations){ + OWLOntology ontology = null; + try { + OWLOntologyManager man = OWLManager.createOWLOntologyManager(); + OWLDataFactory factory = man.getOWLDataFactory(); + if(withConfidenceAsAnnotations){ + OWLAnnotationProperty confAnnoProp = factory.getOWLAnnotationProperty(IRI.create(EnrichmentVocabulary.NS + "confidence")); + Set<OWLAxiom> axioms = new HashSet<OWLAxiom>(); + for(EvaluatedAxiom evAx : learnedEvaluatedAxioms){ + OWLAxiom ax = OWLAPIAxiomConvertVisitor.convertAxiom(evAx.getAxiom()); + ax = ax.getAnnotatedAxiom(Collections.singleton( + factory.getOWLAnnotation(confAnnoProp, factory.getOWLLiteral(evAx.getScore().getAccuracy())))); + axioms.add(ax); + } + ontology = man.createOntology(axioms); + } else { + ontology = man.createOntology(learnedOWLAxioms); + } + } catch (OWLOntologyCreationException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + return ontology; + } + /* * Write axioms in Turtle syntax. */ @@ -748,6 +776,8 @@ "Specifies whether to use inference. If yes, the schema will be loaded into a reasoner and used for computing the scores.").withOptionalArg().ofType(Boolean.class).defaultsTo(true); parser.acceptsAll(asList("s", "serialize"), "Specify a file where the ontology with all axioms can be written.") .withRequiredArg().ofType(File.class); + parser.acceptsAll(asList("a", "annotations"), + "Specifies whether to save scores as annotations.").withOptionalArg().ofType(Boolean.class).defaultsTo(true); // parse options and display a message for the user in case of problems OptionSet options = null; try { @@ -885,7 +915,7 @@ if(options.has("s")){ File file = (File)options.valueOf("s"); try { - OWLOntology ontology = e.getGeneratedOntology(); + OWLOntology ontology = e.getGeneratedOntology(options.has("a")); OutputStream os = new BufferedOutputStream(new FileOutputStream(file)); OWLManager.createOWLOntologyManager().saveOntology(ontology, new RDFXMLOntologyFormat(), os); } catch (OWLOntologyStorageException e1) { Modified: trunk/interfaces/src/main/java/org/dllearner/cli/QuickStart.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/QuickStart.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/interfaces/src/main/java/org/dllearner/cli/QuickStart.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -177,7 +177,7 @@ public static void getAllConfs(File f, String path, Map<String, ArrayList<String>> confs) { path = path + File.separator; // System.out.println(path); - String[] act = f.list(); + String[] act = f.list();System.out.println(f); for (int i = 0; i < act.length; i++) { // System.out.println(act[i]); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |