You can subscribe to this list here.
2007 |
Jan
|
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
|
Aug
(120) |
Sep
(36) |
Oct
(116) |
Nov
(17) |
Dec
(44) |
---|---|---|---|---|---|---|---|---|---|---|---|---|
2008 |
Jan
(143) |
Feb
(192) |
Mar
(74) |
Apr
(84) |
May
(105) |
Jun
(64) |
Jul
(49) |
Aug
(120) |
Sep
(159) |
Oct
(156) |
Nov
(51) |
Dec
(28) |
2009 |
Jan
(17) |
Feb
(55) |
Mar
(33) |
Apr
(57) |
May
(54) |
Jun
(28) |
Jul
(6) |
Aug
(16) |
Sep
(38) |
Oct
(30) |
Nov
(26) |
Dec
(52) |
2010 |
Jan
(7) |
Feb
(91) |
Mar
(65) |
Apr
(2) |
May
(14) |
Jun
(25) |
Jul
(38) |
Aug
(48) |
Sep
(80) |
Oct
(70) |
Nov
(75) |
Dec
(77) |
2011 |
Jan
(68) |
Feb
(53) |
Mar
(51) |
Apr
(35) |
May
(65) |
Jun
(101) |
Jul
(29) |
Aug
(230) |
Sep
(95) |
Oct
(49) |
Nov
(110) |
Dec
(63) |
2012 |
Jan
(41) |
Feb
(42) |
Mar
(25) |
Apr
(46) |
May
(51) |
Jun
(44) |
Jul
(45) |
Aug
(29) |
Sep
(12) |
Oct
(9) |
Nov
(17) |
Dec
(2) |
2013 |
Jan
(12) |
Feb
(14) |
Mar
(7) |
Apr
(16) |
May
(54) |
Jun
(27) |
Jul
(11) |
Aug
(5) |
Sep
(85) |
Oct
(27) |
Nov
(37) |
Dec
(32) |
2014 |
Jan
(8) |
Feb
(29) |
Mar
(5) |
Apr
(3) |
May
(22) |
Jun
(3) |
Jul
(4) |
Aug
(3) |
Sep
|
Oct
|
Nov
|
Dec
|
From: <ku...@us...> - 2012-05-03 14:32:07
|
Revision: 3682 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3682&view=rev Author: kurzum Date: 2012-05-03 14:31:56 +0000 (Thu, 03 May 2012) Log Message: ----------- missing examples in kb do not throw exception any more Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/learningproblems/PosNegLP.java trunk/components-core/src/main/java/org/dllearner/utilities/analyse/TypeOntology.java Modified: trunk/components-core/src/main/java/org/dllearner/learningproblems/PosNegLP.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/learningproblems/PosNegLP.java 2012-05-03 11:42:24 UTC (rev 3681) +++ trunk/components-core/src/main/java/org/dllearner/learningproblems/PosNegLP.java 2012-05-03 14:31:56 UTC (rev 3682) @@ -24,6 +24,7 @@ import java.util.Set; import java.util.TreeSet; +import org.apache.log4j.Logger; import org.dllearner.core.AbstractLearningProblem; import org.dllearner.core.AbstractReasonerComponent; import org.dllearner.core.ComponentInitException; @@ -41,7 +42,8 @@ * */ public abstract class PosNegLP extends AbstractLearningProblem { - + private static Logger logger = Logger.getLogger(PosNegLP.class); + protected Set<Individual> positiveExamples = new TreeSet<Individual>(); protected Set<Individual> negativeExamples = new TreeSet<Individual>(); protected Set<Individual> allExamples = new TreeSet<Individual>(); @@ -114,10 +116,18 @@ allExamples = Helper.union(positiveExamples, negativeExamples); if(!reasoner.getIndividuals().containsAll(allExamples)) { - String str = "The examples below are not contained in the knowledge base (check spelling and prefixes)\n"; - Set<Individual> inds = Helper.difference(allExamples, reasoner.getIndividuals()); - str += inds.toString(); - throw new ComponentInitException(str); + Set<Individual> missing = Helper.difference(allExamples, reasoner.getIndividuals()); + double percentage = (double) (missing.size()/allExamples.size()); + percentage = Math.round(percentage * 1000) / 1000 ; + String str = "The examples ("+percentage+" % of total) below are not contained in the knowledge base (check spelling and prefixes)\n"; + str += missing.toString(); + if(missing.size()==allExamples.size()) { + throw new ComponentInitException(str); + } if(percentage < 0.10) { + logger.warn(str); + } else { + logger.error(str); + } } } Modified: trunk/components-core/src/main/java/org/dllearner/utilities/analyse/TypeOntology.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/analyse/TypeOntology.java 2012-05-03 11:42:24 UTC (rev 3681) +++ trunk/components-core/src/main/java/org/dllearner/utilities/analyse/TypeOntology.java 2012-05-03 14:31:56 UTC (rev 3682) @@ -33,7 +33,8 @@ Set<String> individuals = new HashSet<String>(); Set<Triple> triples = model.getGraph().find(Triple.ANY).toSet(); - ExtendedIterator<OntClass> itClass = model.listClasses(); + + ExtendedIterator<OntClass> itClass = model.listNamedClasses(); while (itClass.hasNext()) { classes.add(itClass.next().getURI()); } @@ -58,7 +59,7 @@ String sUri; String pUri; String oUri; - System.out.println(individuals); + //System.out.println(individuals); // foreach triple in the model for (Triple triple : triples) { @@ -72,16 +73,16 @@ // if subject is an Individual if (individuals.contains(sUri)) { - log.debug("Subject is an individuals {}",triple); + log.trace("Subject is an individual {}",triple); // if predicate is rdf:type if (pUri.equals(RDF.type.getURI())) { // if object is not in the list of class and not equals // owl:thing - if (!classes.contains(model.getResource(oUri)) + if (!classes.contains(oUri) && !oUri.equals(OWL.Thing.getURI())) { - model.getResource(oUri).addProperty(RDFS.subClassOf, OWL.Thing); + model.getResource(oUri).addProperty(RDF.type,OWL.Class); classes.add(oUri); changes++; log.debug("{} is a class",oUri); @@ -133,9 +134,16 @@ } // if subject is an owl:class } else if (classes.contains(sUri)) { - model.getResource(oUri).addProperty( - com.hp.hpl.jena.vocabulary.RDFS.subClassOf, OWL.Thing); - changes++; + log.trace("Subject is a class {}",triple); + + //TODO check this assumption + //if s is owl:class, then o is owl:class too ???? + if(!classes.contains(oUri) ){ + model.getResource(oUri).addProperty(RDF.type, OWL.Class); + classes.add(oUri); + log.debug("{} is a class",oUri); + changes++; + } } } return changes; @@ -156,11 +164,8 @@ } } // model.write(System.out); - int changes; - do{ - changes=this.addTypes(model); - } while(changes!=0); - + while(this.addTypes(model)!=0); + return model; } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dc...@us...> - 2012-05-03 13:14:58
|
Revision: 3680 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3680&view=rev Author: dcherix Date: 2012-05-03 11:41:19 +0000 (Thu, 03 May 2012) Log Message: ----------- Bug fixes in type ontology Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java 2012-05-02 20:44:08 UTC (rev 3679) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java 2012-05-03 11:41:19 UTC (rev 3680) @@ -103,7 +103,7 @@ for (String candidate : candidates) { if (!alreadyQueriedIndividuals.contains(candidate)) { - System.out.println(candidate); +// System.out.println(candidate); result.add(candidate); } } @@ -128,12 +128,9 @@ } if (ontologySchemaUrls == null) { throw new ComponentInitException( - "An ontology schema description file (ontologyFile) in RDF ist required"); + "An ontology schema description file (ontologyFile) in RDF is required"); } - for(String instance:instances){ - model.createIndividual(instance, OWL.Thing); - } Monitor monComp = MonitorFactory.start("Simple SPARQL Component") .start(); Monitor monIndexer = MonitorFactory.start("Schema Indexer").start(); @@ -160,7 +157,7 @@ log.info("processing (recursion " + i + ") " + instancesSet.size() + " new instances"); queryString = aGenerator.createQuery(instancesSet, aboxfilter); - System.out.println(queryString); +// System.out.println(queryString); log.debug("SPARQL: {}", queryString); monQueryingABox = MonitorFactory.start("ABox query time"); @@ -172,7 +169,7 @@ monQueryingABox.stop(); typizeModel=MonitorFactory.start("Typize the model"); - typeOntology.addTypes(model); + model=typeOntology.addTypetoJena(model, instances, null); typizeModel.stop(); alreadyQueried.addAll(instancesSet); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dc...@us...> - 2012-05-03 11:42:45
|
Revision: 3681 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3681&view=rev Author: dcherix Date: 2012-05-03 11:42:24 +0000 (Thu, 03 May 2012) Log Message: ----------- Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/analyse/TypeOntology.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/analyse/TypeOntology.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/analyse/TypeOntology.java 2012-05-03 11:41:19 UTC (rev 3680) +++ trunk/components-core/src/main/java/org/dllearner/utilities/analyse/TypeOntology.java 2012-05-03 11:42:24 UTC (rev 3681) @@ -1,13 +1,14 @@ package org.dllearner.utilities.analyse; +import java.util.ArrayList; import java.util.HashSet; +import java.util.List; import java.util.Set; import org.dllearner.kb.sparql.simple.QueryExecutor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.hp.hpl.jena.graph.Node; import com.hp.hpl.jena.graph.Triple; import com.hp.hpl.jena.ontology.DatatypeProperty; import com.hp.hpl.jena.ontology.Individual; @@ -18,75 +19,151 @@ import com.hp.hpl.jena.util.iterator.ExtendedIterator; import com.hp.hpl.jena.vocabulary.OWL; import com.hp.hpl.jena.vocabulary.RDF; +import com.hp.hpl.jena.vocabulary.RDFS; public class TypeOntology { private static Logger log = LoggerFactory.getLogger(TypeOntology.class); - public void addTypes(OntModel model) { - Set<DatatypeProperty> dataProperties = model.listDatatypeProperties() - .toSet(); - Set<ObjectProperty> objectProperties = model.listObjectProperties() - .toSet(); - Set<OntClass> classes = model.listClasses().toSet(); - Set<Individual> individuals = model.listIndividuals().toSet(); + private int addTypes(OntModel model) { + int changes=0; + Set<String> dataProperties = new HashSet<String>(); + Set<String> objectProperties = new HashSet<String>(); + Set<String> classes = new HashSet<String>(); + Set<String> individuals = new HashSet<String>(); Set<Triple> triples = model.getGraph().find(Triple.ANY).toSet(); - Node subject; - Node predicate; - Node object; - // while (!triples.isEmpty()) { - ExtendedIterator<Triple> iterator = model.getGraph().find(Triple.ANY); - // System.out.println(triples.size()); + + ExtendedIterator<OntClass> itClass = model.listClasses(); + while (itClass.hasNext()) { + classes.add(itClass.next().getURI()); + } + + ExtendedIterator<Individual> itIndividuals = model.listIndividuals(); + while (itIndividuals.hasNext()) { + individuals.add(itIndividuals.next().getURI()); + } + + ExtendedIterator<DatatypeProperty> itDataProperties = model + .listDatatypeProperties(); + while (itDataProperties.hasNext()) { + dataProperties.add(itDataProperties.next().getURI()); + } + + ExtendedIterator<ObjectProperty> itObjectProperties = model + .listObjectProperties(); + while (itObjectProperties.hasNext()) { + objectProperties.add(itObjectProperties.next().getURI()); + } + + String sUri; + String pUri; + String oUri; + System.out.println(individuals); + + // foreach triple in the model for (Triple triple : triples) { - // System.out.println(triple); - subject = triple.getSubject(); - predicate = triple.getPredicate(); - object = triple.getObject(); - if (individuals.contains(model.getResource(subject.getURI()))) { - log.debug("{}", triple); - if (predicate.hasURI(RDF.type.getURI())) { - if (!classes.contains(model.getResource(object.getURI())) - && !object.getURI().equals(OWL.Thing.getURI())) { - model.getResource(subject.getURI()).addProperty( - com.hp.hpl.jena.vocabulary.RDFS.subClassOf, - OWL.Thing); - classes = model.listClasses().toSet(); - log.debug("{} is a class", object); + if(triple.getSubject().isBlank() || triple.getPredicate().isBlank() || triple.getObject().isBlank()){ + System.out.println(triple); + continue; + } + sUri = triple.getSubject().getURI(); + pUri = triple.getPredicate().getURI(); + oUri = triple.getObject().getURI(); + + // if subject is an Individual + if (individuals.contains(sUri)) { + log.debug("Subject is an individuals {}",triple); + + // if predicate is rdf:type + if (pUri.equals(RDF.type.getURI())) { + + // if object is not in the list of class and not equals + // owl:thing + if (!classes.contains(model.getResource(oUri)) + && !oUri.equals(OWL.Thing.getURI())) { + model.getResource(oUri).addProperty(RDFS.subClassOf, OWL.Thing); + classes.add(oUri); + changes++; + log.debug("{} is a class",oUri); } - } else if (object.isLiteral()) { - if (!objectProperties.contains(model.getResource(predicate - .getURI()))) { - model.createDatatypeProperty(predicate.getURI()); - dataProperties = model.listDatatypeProperties().toSet(); - log.debug("{} is a dataproperty", predicate); + + // object is not a class, so it can only be a literal or an + // object + // if object is a literal + } else if (model.getResource(oUri).isLiteral()) { + + // if predicate is not in the list of objectproperties + if (!objectProperties.contains(pUri)) { + model.createDatatypeProperty(pUri); + dataProperties.add(pUri); + + log.debug("{} is a dataproperty",pUri); + + // if predicate is in the list of objectproperties it + // must be an rdf:property } else { - model.createOntProperty(predicate.getURI()); - log.info("{} is a rdf:property", predicate); + model.createOntProperty(pUri); + log.info("{} is a rdf:property", pUri); } - } else if (!individuals.contains(model.getResource(object - .getURI()))) { - model.getResource(object.getURI()).addProperty(RDF.type, - OWL.Thing); - individuals = model.listIndividuals().toSet(); - if (!dataProperties.contains(model.getResource(predicate - .getURI()))) { - model.createObjectProperty(predicate.getURI()); - objectProperties = model.listObjectProperties().toSet(); - log.debug("{} is an objectproperty", predicate); + changes++; + + // object is not a literal or a class so it must be an + // instance + // if object is not in the list of individuals + } else if (!individuals.contains(oUri)) { + model.getResource(oUri).addProperty(RDF.type, OWL.Thing); + individuals.add(oUri); + + // subject and object are individuals so is predicate an + // objectproperty + // if predicate ist not in the list of dataproperties + if (!dataProperties.contains(pUri)) { + model.createObjectProperty(pUri); + objectProperties.add(pUri); + log.debug("{} is an objectproperty", pUri); + + // if predicate is in the list of dataproperties it must + // be a rdf:property } else { - model.createOntProperty(predicate.getURI()); - log.info("{} is a rdf:property", predicate); + model.createOntProperty(pUri); + log.info("{} is a rdf:property", pUri); } - log.debug("{} is an individual", object); + log.debug("{} is an individual",oUri); + changes++; } - - } else if (classes.contains(model.getResource(subject.getURI()))) { - model.getResource(object.getURI()).addProperty( + // if subject is an owl:class + } else if (classes.contains(sUri)) { + model.getResource(oUri).addProperty( com.hp.hpl.jena.vocabulary.RDFS.subClassOf, OWL.Thing); + changes++; } } + return changes; } + public OntModel addTypetoJena(OntModel model, List<String> individuals, + List<String> classes) { + if (individuals != null) { + for (String individual : individuals) { + model.getResource(individual).addProperty(RDF.type, OWL.Thing); + } + } + if (classes != null) { + for (String ontClass : classes) { + if (!ontClass.equals(OWL.Thing.getURI())) { + model.getResource(ontClass).addProperty(RDFS.subClassOf, OWL.Thing); + } + } + } +// model.write(System.out); + int changes; + do{ + changes=this.addTypes(model); + } while(changes!=0); + + return model; + } + public static void main(String... args) { String sparql = "CONSTRUCT {?s ?p ?o}" + "{ ?s ?p ?o " @@ -101,26 +178,21 @@ + " ! regex(str(?o), '^http://dbpedia.org/resource/Category') &&" + " ! regex(str(?o), '^http://dbpedia.org/resource/Template') ) . }"; OntModel model = ModelFactory.createOntologyModel(); - model.createIndividual("http://dbpedia.org/resource/Philolaus", - OWL.Thing); - model.createIndividual("http://dbpedia.org/resource/Zeno_of_Elea", - OWL.Thing); - model.createIndividual("http://dbpedia.org/resource/Socrates", - OWL.Thing); - model.createIndividual("http://dbpedia.org/resource/Pytagoras", - OWL.Thing); - model.createIndividual("http://dbpedia.org/resource/Archytas", - OWL.Thing); - model.createIndividual("http://dbpedia.org/resource/Plato", OWL.Thing); - model.createIndividual("http://dbpedia.org/resource/Democritus", - OWL.Thing); + List<String> individuals = new ArrayList<String>(7); + individuals.add("http://dbpedia.org/resource/Philolaus"); + individuals.add("http://dbpedia.org/resource/Zeno_of_Elea"); + individuals.add("http://dbpedia.org/resource/Socrates"); + individuals.add("http://dbpedia.org/resource/Pytagoras"); + individuals.add("http://dbpedia.org/resource/Archytas"); + individuals.add("http://dbpedia.org/resource/Plato"); + individuals.add("http://dbpedia.org/resource/Democritus"); QueryExecutor exec = new QueryExecutor(); exec.executeQuery(sparql, "http://live.dbpedia.org/sparql", model, "http://dbpedia.org"); System.out.println(model.listIndividuals().toSet()); System.out.println(model.listObjectProperties().toSet()); TypeOntology type = new TypeOntology(); - type.addTypes(model); + model=type.addTypetoJena(model, individuals, null); System.out.println(model.listIndividuals().toSet()); System.out.println(model.listObjectProperties().toSet()); System.out.println(model.listDatatypeProperties().toSet()); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-05-02 20:44:14
|
Revision: 3679 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3679&view=rev Author: lorenz_b Date: 2012-05-02 20:44:08 +0000 (Wed, 02 May 2012) Log Message: ----------- Small modification to avoid loading YAGO schema. Modified Paths: -------------- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java Modified: trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-05-02 20:44:00 UTC (rev 3678) +++ trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-05-02 20:44:08 UTC (rev 3679) @@ -34,7 +34,6 @@ import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; -import java.io.StringWriter; import java.lang.reflect.InvocationTargetException; import java.net.MalformedURLException; import java.net.SocketTimeoutException; @@ -138,6 +137,9 @@ import org.dllearner.utilities.owl.OWLAPIConverter; import org.ini4j.IniPreferences; import org.ini4j.InvalidFileFormatException; +import org.semanticweb.HermiT.Configuration; +import org.semanticweb.HermiT.Reasoner; +import org.semanticweb.HermiT.examples.HermiTConfigurations; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.io.RDFXMLOntologyFormat; import org.semanticweb.owlapi.model.AxiomType; @@ -1156,7 +1158,9 @@ } } System.out.println("Preparing reasoner ..."); -// reasoner = new Reasoner(dbPediaOntology); +// Configuration conf = new Configuration(); +// conf.ignoreUnsupportedDatatypes = true; +// reasoner = new Reasoner(conf, dbPediaOntology); reasoner = PelletReasonerFactory.getInstance().createNonBufferingReasoner(dbPediaOntology); reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-05-02 20:44:07
|
Revision: 3678 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3678&view=rev Author: lorenz_b Date: 2012-05-02 20:44:00 +0000 (Wed, 02 May 2012) Log Message: ----------- Small modification to avoid loading YAGO schema. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2012-05-02 15:22:07 UTC (rev 3677) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2012-05-02 20:44:00 UTC (rev 3678) @@ -199,7 +199,7 @@ Model model = ModelFactory.createDefaultModel(); //load class hierarchy - String query = "CONSTRUCT {?s <http://www.w3.org/2000/01/rdf-schema#subClassOf> ?o} WHERE {?s <http://www.w3.org/2000/01/rdf-schema#subClassOf> ?o}"; + String query = "CONSTRUCT {?s <http://www.w3.org/2000/01/rdf-schema#subClassOf> ?o} WHERE {?s <http://www.w3.org/2000/01/rdf-schema#subClassOf> ?o.FILTER(!REGEX(STR(?s), 'http://dbpedia.org/class/yago/'))}"; model.add(loadIncrementally(query)); query = "CONSTRUCT {?s <http://www.w3.org/2002/07/owl#equivalentClass> ?o} WHERE {?s <http://www.w3.org/2002/07/owl#equivalentClass> ?o}"; model.add(loadIncrementally(query)); @@ -277,6 +277,8 @@ // System.exit(0); f = new QueryExecutionFactoryPaginated(f, 1000); Model model = f.createQueryExecution(query).execConstruct(); + System.out.println(query); + System.out.println("Got " + model.size() + " triple."); return model; // } catch (ClassNotFoundException e) { // e.printStackTrace(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-05-02 15:25:24
|
Revision: 3675 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3675&view=rev Author: lorenz_b Date: 2012-05-02 14:52:45 +0000 (Wed, 02 May 2012) Log Message: ----------- Refactored measure for disjointness. Added popularity precomputation for classes. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/SPARQLTasks.java trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java 2012-05-02 09:26:50 UTC (rev 3674) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java 2012-05-02 14:52:45 UTC (rev 3675) @@ -21,6 +21,7 @@ import java.net.URL; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -44,6 +45,7 @@ import org.dllearner.core.owl.NamedClass; import org.dllearner.kb.LocalModelBasedSparqlEndpointKS; import org.dllearner.kb.SparqlEndpointKS; +import org.dllearner.kb.sparql.SparqlEndpoint; import org.dllearner.learningproblems.AxiomScore; import org.dllearner.learningproblems.Heuristics; import org.slf4j.Logger; @@ -84,6 +86,8 @@ private Set<NamedClass> allClasses; + private int popularity; + public DisjointClassesLearner(SparqlEndpointKS ks){ this.ks = ks; } @@ -119,9 +123,13 @@ fetchedRows = 0; currentlyBestEvaluatedDescriptions = new ArrayList<EvaluatedDescription>(); - //TODO + //we return here if the class contains no instances + popularity = reasoner.getPopularity(classToDescribe); + if(popularity == 0){ + return; + } - //at first get all existing classes in knowledgebase + //at first get all existing classes in knowledge base allClasses = getAllClasses(); allClasses.remove(classToDescribe); @@ -316,17 +324,24 @@ //firstly, create disjoint classexpressions which not occur and give score of 1 for(NamedClass cls : completeDisjointclasses){ if(useClassPopularity){ - int popularity = 0; + int overlap = 0; + int pop; if(ks.isRemote()){ - popularity = reasoner.getIndividualsCount(cls); + pop = reasoner.getPopularity(cls); } else { - popularity = ((LocalModelBasedSparqlEndpointKS)ks).getModel().getOntClass(cls.getName()).listInstances().toSet().size(); + pop = ((LocalModelBasedSparqlEndpointKS)ks).getModel().getOntClass(cls.getName()).listInstances().toSet().size(); } //we skip classes with no instances - if(popularity == 0) continue; - double[] confidenceInterval = Heuristics.getConfidenceInterval95Wald(popularity, 0); - double accuracy = (confidenceInterval[0] + confidenceInterval[1]) / 2; - evalDesc = new EvaluatedDescription(cls, new AxiomScore(1- accuracy)); + if(pop == 0) continue; + + //we compute the estimated precision + double precision = accuracy(pop, overlap); + //we compute the estimated recall + double recall = accuracy(popularity, overlap); + //compute the overall score + double score = 1 - fMEasure(precision, recall); + + evalDesc = new EvaluatedDescription(cls, new AxiomScore(score)); } else { evalDesc = new EvaluatedDescription(cls, new AxiomScore(1)); } @@ -335,23 +350,51 @@ } //secondly, create disjoint classexpressions with score 1 - (#occurence/#all) - for(Entry<NamedClass, Integer> entry : sortByValues(class2Count)){ - //drop classes from OWL and RDF namespace - if(entry.getKey().getName().startsWith(OWL2.getURI()) || entry.getKey().getName().startsWith(RDF.getURI()))continue; -// evalDesc = new EvaluatedDescription(entry.getKey(), -// new AxiomScore(1 - (entry.getValue() / (double)all))); - double[] confidenceInterval = Heuristics.getConfidenceInterval95Wald(total, entry.getValue()); - double accuracy = (confidenceInterval[0] + confidenceInterval[1]) / 2; - evalDesc = new EvaluatedDescription(entry.getKey(), - new AxiomScore(1 - accuracy)); - evalDescs.add(evalDesc); + for (Entry<NamedClass, Integer> entry : sortByValues(class2Count)) { + NamedClass cls = entry.getKey(); + // drop classes from OWL and RDF namespace + if (cls.getName().startsWith(OWL2.getURI()) || cls.getName().startsWith(RDF.getURI())) + continue; + if (useClassPopularity) { + int overlap = entry.getValue(); + int pop; + if (ks.isRemote()) { + pop = reasoner.getPopularity(cls); + } else { + pop = ((LocalModelBasedSparqlEndpointKS) ks).getModel() + .getOntClass(cls.getName()).listInstances().toSet() + .size(); + } + // we skip classes with no instances + if (pop == 0) + continue; + + // we compute the estimated precision + double precision = accuracy(pop, overlap); + // we compute the estimated recall + double recall = accuracy(popularity, overlap); + // compute the overall score + double score = 1 - fMEasure(precision, recall); + + evalDesc = new EvaluatedDescription(cls, new AxiomScore(score)); + } else { + evalDesc = new EvaluatedDescription(cls, new AxiomScore(1)); + } } class2Count.put(classToDescribe, total); return evalDescs; } + private double accuracy(int total, int success){ + double[] confidenceInterval = Heuristics.getConfidenceInterval95Wald(total, success); + return (confidenceInterval[0] + confidenceInterval[1]) / 2; + } + private double fMEasure(double precision, double recall){ + return 2 * precision * recall / (precision + recall); + } + private void keepMostGeneralClasses(Set<NamedClass> classes){ if(ks.isRemote()){ if(reasoner.isPrepared()){ @@ -384,12 +427,14 @@ } public static void main(String[] args) throws Exception{ -// SparqlEndpointKS ks = new SparqlEndpointKS(new SparqlEndpoint(new URL("http://dbpedia.aksw.org:8902/sparql"), Collections.singletonList("http://dbpedia.org"), Collections.<String>emptyList())); - SparqlEndpointKS ks = new LocalModelBasedSparqlEndpointKS(new URL("http://dl-learner.svn.sourceforge.net/viewvc/dl-learner/trunk/examples/swore/swore.rdf?revision=2217")); + SparqlEndpointKS ks = new SparqlEndpointKS(new SparqlEndpoint(new URL("http://dbpedia.aksw.org:8902/sparql"), Collections.singletonList("http://dbpedia.org"), Collections.<String>emptyList())); + ks = new LocalModelBasedSparqlEndpointKS(new URL("http://dl-learner.svn.sourceforge.net/viewvc/dl-learner/trunk/examples/swore/swore.rdf?revision=2217")); + ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveAKSW()); DisjointClassesLearner l = new DisjointClassesLearner(ks); - l.setClassToDescribe(new NamedClass("http://ns.softwiki.de/req/CustomerRequirement")); + l.setClassToDescribe(new NamedClass("http://dbpedia.org/ontology/Book")); l.init(); -// l.getReasoner().prepareSubsumptionHierarchy(); + l.getReasoner().prepareSubsumptionHierarchy(); + l.getReasoner().precomputeClassPopularity(); // System.out.println(l.getReasoner().getClassHierarchy().getSubClasses(new NamedClass("http://dbpedia.org/ontology/Athlete"), false));System.exit(0); l.start(); Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/SPARQLTasks.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/SPARQLTasks.java 2012-05-02 09:26:50 UTC (rev 3674) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/SPARQLTasks.java 2012-05-02 14:52:45 UTC (rev 3675) @@ -702,6 +702,10 @@ public Set<NamedClass> getAllClasses() { Set<NamedClass> classes = new TreeSet<NamedClass>(); String query = "PREFIX owl: <http://www.w3.org/2002/07/owl#> SELECT ?c WHERE {?c a owl:Class} LIMIT 1000"; + /* + * String query = "PREFIX owl: <http://www.w3.org/2002/07/owl#> PREFIX rdfs:<http://www.w3.org/2000/01/rdf-schema#> " + + "SELECT ?c WHERE {{?c a owl:Class} UNION {?c rdfs:subClassOf ?d} UNION {?d rdfs:subClassOf ?c}} LIMIT 1000"; + */ SparqlQuery sq = new SparqlQuery(query, sparqlEndpoint); ResultSet q = sq.send(false); while (q.hasNext()) { Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2012-05-02 09:26:50 UTC (rev 3674) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2012-05-02 14:52:45 UTC (rev 3675) @@ -93,7 +93,9 @@ private ClassHierarchy hierarchy; private OntModel model; + private Map<NamedClass, Integer> classPopularityMap; + public SPARQLReasoner(SparqlEndpointKS ks) { this.ks = ks; @@ -111,6 +113,36 @@ this.model = model; } + public void precomputeClassPopularity(){ + logger.info("Precomputing class popularity ..."); + classPopularityMap = new HashMap<NamedClass, Integer>(); + + Set<NamedClass> classes = new SPARQLTasks(ks.getEndpoint()).getAllClasses(); + String queryTemplate = "SELECT (COUNT(*) AS ?cnt) WHERE {?s a <%s>}"; + + ResultSet rs; + for(NamedClass nc : classes){ + rs = executeSelectQuery(String.format(queryTemplate, nc.getName())); + int cnt = rs.next().getLiteral("cnt").getInt(); + classPopularityMap.put(nc, cnt); + } + } + + public int getPopularity(NamedClass nc){ + if(classPopularityMap.containsKey(nc)){ + return classPopularityMap.get(nc); + } else { + System.out.println("Cache miss: " + nc); + String queryTemplate = "SELECT (COUNT(*) AS ?cnt) WHERE {?s a <%s>}"; + + ResultSet rs = executeSelectQuery(String.format(queryTemplate, nc.getName())); + int cnt = rs.next().getLiteral("cnt").getInt(); + classPopularityMap.put(nc, cnt); + return cnt; + } + + } + public final ClassHierarchy prepareSubsumptionHierarchy() { logger.info("Preparing subsumption hierarchy ..."); long startTime = System.currentTimeMillis(); @@ -915,6 +947,10 @@ this.cache = cache; } + public void setUseCache(boolean useCache) { + this.useCache = useCache; + } + private boolean executeAskQuery(String query){ boolean ret; if(ks.isRemote()){ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ku...@us...> - 2012-05-02 15:22:17
|
Revision: 3677 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3677&view=rev Author: kurzum Date: 2012-05-02 15:22:07 +0000 (Wed, 02 May 2012) Log Message: ----------- bsp f?\195?\188r alte komponente Modified Paths: -------------- trunk/examples/thanh/AristotlePosNeg.conf Added Paths: ----------- trunk/examples/thanh/AristotlePosNeg_alteKomponente.conf Modified: trunk/examples/thanh/AristotlePosNeg.conf =================================================================== --- trunk/examples/thanh/AristotlePosNeg.conf 2012-05-02 14:53:12 UTC (rev 3676) +++ trunk/examples/thanh/AristotlePosNeg.conf 2012-05-02 15:22:07 UTC (rev 3677) @@ -15,7 +15,7 @@ sparql.defaultGraphURI = "http://dbpedia.org" sparql.recursionDepth = 2 sparql.ontologySchemaUrls = {"http://downloads.dbpedia.org/3.6/dbpedia_3.6.owl" } -sparql.aboxfilter = "FILTER ( !isLiteral(?o) ) " +sparql.aboxfilter = "FILTER ( !isLiteral(?o) && regex(str(?o), '^http://dbpedia.org/resource/') && ! regex(str(?o), '^http://dbpedia.org/resource/Category') ) " // && regex(str(?o), '^http://dbpedia.org/resource/') && ! regex(str(?o), '^http://dbpedia.org/resource/Category') && ! regex(str(?o), '^http://dbpedia.org/resource/Template') ) . " //|| regex(str(?p), '^http://dbpedia.org/property/') )) " sparql.tboxfilter = "FILTER ( regex(str(?class), '^http://dbpedia.org/ontology/') ) . " Added: trunk/examples/thanh/AristotlePosNeg_alteKomponente.conf =================================================================== --- trunk/examples/thanh/AristotlePosNeg_alteKomponente.conf (rev 0) +++ trunk/examples/thanh/AristotlePosNeg_alteKomponente.conf 2012-05-02 15:22:07 UTC (rev 3677) @@ -0,0 +1,63 @@ +/** + * Some people from Greece. + * Note: DBpedia is always subject to change, solutions will change over time + + * Possible Solution: + * Theorist OR (Mathematician AND Physicist) + * + * This is the same as AristotlePos.conf, but positives and negatives + */ + + +// SPARQL options +sparql.type = "SPARQL endpoint fragment" +sparql.url = "http://live.dbpedia.org/sparql" +sparql.defaultGraphURIs = {"http://dbpedia.org"} +sparql.recursionDepth = 2 +//TODOREFACTOR check if predefinedFilter works at all +//predefined filter (1 = YAGO based learning) +sparql.predefinedFilter = "YAGO" +sparql.instances = { +"http://dbpedia.org/resource/Democritus", +"http://dbpedia.org/resource/Zeno_of_Elea", +"http://dbpedia.org/resource/Plato", +"http://dbpedia.org/resource/Socrates", +"http://dbpedia.org/resource/Archytas", +"http://dbpedia.org/resource/Pythagoras", +"http://dbpedia.org/resource/Philolaus" +} + +reasoner.type = "fast instance checker" +reasoner.sources = {sparql} + + +// we want to learn from positive and negative examples +lp.type = "posNegStandard" +lp.positiveExamples = { +"http://dbpedia.org/resource/Archytas", +"http://dbpedia.org/resource/Pythagoras", +"http://dbpedia.org/resource/Philolaus" +} +lp.negativeExamples = { +"http://dbpedia.org/resource/Democritus", +"http://dbpedia.org/resource/Zeno_of_Elea", +"http://dbpedia.org/resource/Plato", +"http://dbpedia.org/resource/Socrates" +} +// plug a reasoner into the learning problem +lp.reasoner = reasoner + +// create a refinement operator and configure it +op.type = "rho" +op.useNegation = false +op.useAllConstructor = false +op.useCardinalityRestrictions = false +op.useHasValueConstructor = true +op.reasoner = reasoner + +// we use the ocel algorithm +alg.type = "ocel" +alg.reasoner = reasoner +alg.maxExecutionTimeInSeconds = 30 +alg.noisePercentage = 10.0 +alg.minExecutionTimeInSeconds = 10 This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-05-02 15:00:26
|
Revision: 3676 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3676&view=rev Author: lorenz_b Date: 2012-05-02 14:53:12 +0000 (Wed, 02 May 2012) Log Message: ----------- Modified eval script. Modified Paths: -------------- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java Modified: trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-05-02 14:52:45 UTC (rev 3675) +++ trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-05-02 14:53:12 UTC (rev 3676) @@ -197,9 +197,9 @@ private SparqlEndpoint endpoint; // can be used to only evaluate a part of DBpedia - private int maxObjectProperties = 1; - private int maxDataProperties = 1; - private int maxClasses = 1; + private int maxObjectProperties = 0; + private int maxDataProperties = 0; + private int maxClasses = 0; private List<Class<? extends AxiomLearningAlgorithm>> objectPropertyAlgorithms; private List<Class<? extends AxiomLearningAlgorithm>> dataPropertyAlgorithms; private List<Class<? extends LearningAlgorithm>> classAlgorithms; @@ -328,7 +328,9 @@ sparqlReasoner = new SPARQLReasoner(ks); sparqlReasoner.setCache(new ExtractionDBCache("cache")); + sparqlReasoner.setUseCache(true); sparqlReasoner.prepareSubsumptionHierarchy(); + sparqlReasoner.precomputeClassPopularity(); Thread.sleep(20000); if(runClassAlgorithms){ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-05-02 09:27:00
|
Revision: 3674 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3674&view=rev Author: lorenz_b Date: 2012-05-02 09:26:50 +0000 (Wed, 02 May 2012) Log Message: ----------- Made script more flexible from command line and added algorithms. Modified Paths: -------------- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java Modified: trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-05-01 07:53:23 UTC (rev 3673) +++ trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-05-02 09:26:50 UTC (rev 3674) @@ -19,7 +19,10 @@ */ package org.dllearner.scripts.evaluation; +import static java.util.Arrays.asList; + import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; import java.io.BufferedWriter; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -31,9 +34,11 @@ import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; +import java.io.StringWriter; import java.lang.reflect.InvocationTargetException; import java.net.MalformedURLException; import java.net.SocketTimeoutException; +import java.net.URI; import java.net.URL; import java.sql.Connection; import java.sql.DriverManager; @@ -60,6 +65,10 @@ import java.util.TreeSet; import java.util.prefs.Preferences; +import joptsimple.OptionException; +import joptsimple.OptionParser; +import joptsimple.OptionSet; + import org.apache.commons.compress.compressors.CompressorException; import org.apache.commons.compress.compressors.CompressorInputStream; import org.apache.commons.compress.compressors.CompressorStreamFactory; @@ -70,14 +79,17 @@ import org.dllearner.algorithms.DisjointClassesLearner; import org.dllearner.algorithms.SimpleSubclassLearner; import org.dllearner.algorithms.celoe.CELOE; +import org.dllearner.algorithms.properties.AsymmetricObjectPropertyAxiomLearner; import org.dllearner.algorithms.properties.DataPropertyDomainAxiomLearner; import org.dllearner.algorithms.properties.DataPropertyRangeAxiomLearner; import org.dllearner.algorithms.properties.DisjointDataPropertyAxiomLearner; +import org.dllearner.algorithms.properties.DisjointObjectPropertyAxiomLearner; import org.dllearner.algorithms.properties.EquivalentDataPropertyAxiomLearner; import org.dllearner.algorithms.properties.EquivalentObjectPropertyAxiomLearner; import org.dllearner.algorithms.properties.FunctionalDataPropertyAxiomLearner; import org.dllearner.algorithms.properties.FunctionalObjectPropertyAxiomLearner; import org.dllearner.algorithms.properties.InverseFunctionalObjectPropertyAxiomLearner; +import org.dllearner.algorithms.properties.InverseObjectPropertyAxiomLearner; import org.dllearner.algorithms.properties.IrreflexiveObjectPropertyAxiomLearner; import org.dllearner.algorithms.properties.ObjectPropertyDomainAxiomLearner; import org.dllearner.algorithms.properties.ObjectPropertyRangeAxiomLearner; @@ -107,6 +119,7 @@ import org.dllearner.core.owl.ObjectProperty; import org.dllearner.core.owl.SubClassAxiom; import org.dllearner.kb.SparqlEndpointKS; +import org.dllearner.kb.sparql.ExtractionDBCache; import org.dllearner.kb.sparql.SPARQLTasks; import org.dllearner.kb.sparql.SparqlEndpoint; import org.dllearner.kb.sparql.SparqlKnowledgeSource; @@ -125,7 +138,6 @@ import org.dllearner.utilities.owl.OWLAPIConverter; import org.ini4j.IniPreferences; import org.ini4j.InvalidFileFormatException; -import org.semanticweb.HermiT.Reasoner; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.io.RDFXMLOntologyFormat; import org.semanticweb.owlapi.model.AxiomType; @@ -182,13 +194,12 @@ // only axioms with a score above this threshold will be considered private double threshold = 0.7; - private SparqlEndpoint endpoint = SparqlEndpoint.getEndpointDBpedia(); -// private SparqlEndpoint endpoint = SparqlEndpoint.getEndpointDBpedia(); + private SparqlEndpoint endpoint; // can be used to only evaluate a part of DBpedia - private int maxObjectProperties = 0; - private int maxDataProperties = 0; - private int maxClasses = 0; + private int maxObjectProperties = 1; + private int maxDataProperties = 1; + private int maxClasses = 1; private List<Class<? extends AxiomLearningAlgorithm>> objectPropertyAlgorithms; private List<Class<? extends AxiomLearningAlgorithm>> dataPropertyAlgorithms; private List<Class<? extends LearningAlgorithm>> classAlgorithms; @@ -202,16 +213,19 @@ private OWLOntology dbPediaOntology; private OWLReasoner reasoner; private OWLDataFactory factory = new OWLDataFactoryImpl(); - - public EnrichmentEvaluation() { - + + private SPARQLReasoner sparqlReasoner; + + public EnrichmentEvaluation(SparqlEndpoint endpoint) { + this.endpoint = endpoint; + prefixes = new HashMap<String,String>(); prefixes.put("dbp","http://dbpedia.org/property/"); prefixes.put("dbo","http://dbpedia.org/ontology/"); prefixes.put("yago", "http://dbpedia.org/class/"); objectPropertyAlgorithms = new LinkedList<Class<? extends AxiomLearningAlgorithm>>(); -// objectPropertyAlgorithms.add(DisjointObjectPropertyAxiomLearner.class); + objectPropertyAlgorithms.add(DisjointObjectPropertyAxiomLearner.class); objectPropertyAlgorithms.add(EquivalentObjectPropertyAxiomLearner.class); objectPropertyAlgorithms.add(FunctionalObjectPropertyAxiomLearner.class); objectPropertyAlgorithms.add(InverseFunctionalObjectPropertyAxiomLearner.class); @@ -219,12 +233,14 @@ objectPropertyAlgorithms.add(ObjectPropertyRangeAxiomLearner.class); objectPropertyAlgorithms.add(SubObjectPropertyOfAxiomLearner.class); objectPropertyAlgorithms.add(SymmetricObjectPropertyAxiomLearner.class); + objectPropertyAlgorithms.add(AsymmetricObjectPropertyAxiomLearner.class); objectPropertyAlgorithms.add(TransitiveObjectPropertyAxiomLearner.class); objectPropertyAlgorithms.add(IrreflexiveObjectPropertyAxiomLearner.class); objectPropertyAlgorithms.add(ReflexiveObjectPropertyAxiomLearner.class); + objectPropertyAlgorithms.add(InverseObjectPropertyAxiomLearner.class); dataPropertyAlgorithms = new LinkedList<Class<? extends AxiomLearningAlgorithm>>(); -// dataPropertyAlgorithms.add(DisjointDataPropertyAxiomLearner.class); + dataPropertyAlgorithms.add(DisjointDataPropertyAxiomLearner.class); dataPropertyAlgorithms.add(EquivalentDataPropertyAxiomLearner.class); dataPropertyAlgorithms.add(FunctionalDataPropertyAxiomLearner.class); dataPropertyAlgorithms.add(DataPropertyDomainAxiomLearner.class); @@ -301,26 +317,36 @@ } - public void start() throws IllegalArgumentException, SecurityException, InstantiationException, + public void start(boolean runClassAlgorithms, boolean runObjectPropertyAlgorithms, boolean runDataPropertyAlgorithms) throws IllegalArgumentException, SecurityException, InstantiationException, IllegalAccessException, InvocationTargetException, NoSuchMethodException, ComponentInitException, InterruptedException { -// dropAndCreateTable(); long overallStartTime = System.currentTimeMillis(); SparqlEndpointKS ks = new SparqlEndpointKS(endpoint); ks.init(); -// evaluateObjectProperties(ks); + sparqlReasoner = new SPARQLReasoner(ks); + sparqlReasoner.setCache(new ExtractionDBCache("cache")); + sparqlReasoner.prepareSubsumptionHierarchy(); Thread.sleep(20000); + if(runClassAlgorithms){ + evaluateClasses(ks); + } -// evaluateDataProperties(ks); -// -// Thread.sleep(20000); + Thread.sleep(20000); - evaluateClasses(ks); + if(runObjectPropertyAlgorithms){ + evaluateObjectProperties(ks); + } + Thread.sleep(20000); + + if(runDataPropertyAlgorithms){ + evaluateDataProperties(ks); + } + System.out.println("Overall runtime: " + (System.currentTimeMillis()-overallStartTime)/1000 + "s."); } @@ -338,6 +364,7 @@ // dynamically invoke constructor with SPARQL knowledge source AxiomLearningAlgorithm learner = algorithmClass.getConstructor( SparqlEndpointKS.class).newInstance(ks); + ((AbstractAxiomLearningAlgorithm)learner).setReasoner(sparqlReasoner); ConfigHelper.configure(learner, "propertyToDescribe", property.toString()); ConfigHelper.configure(learner, "maxExecutionTimeInSeconds", maxExecutionTimeInSeconds); @@ -390,7 +417,7 @@ } } objectProperties++; - if (maxObjectProperties != 0 && objectProperties > maxObjectProperties) { + if (maxObjectProperties != 0 && objectProperties == maxObjectProperties) { break; } @@ -410,11 +437,12 @@ Thread.sleep(10000); String algName = ""; for (DatatypeProperty property : properties) { - + Thread.sleep(1000); try{ // dynamically invoke constructor with SPARQL knowledge source AxiomLearningAlgorithm learner = algorithmClass.getConstructor( SparqlEndpointKS.class).newInstance(ks); + ((AbstractAxiomLearningAlgorithm)learner).setReasoner(sparqlReasoner); ConfigHelper.configure(learner, "propertyToDescribe", property.toString()); ConfigHelper.configure(learner, "maxExecutionTimeInSeconds", maxExecutionTimeInSeconds); @@ -466,7 +494,7 @@ } } dataProperties++; - if (maxDataProperties != 0 && dataProperties > maxDataProperties) { + if (maxDataProperties != 0 && dataProperties == maxDataProperties) { break; } @@ -499,6 +527,7 @@ // dynamically invoke constructor with SPARQL knowledge source LearningAlgorithm learner = algorithmClass.getConstructor( SparqlEndpointKS.class).newInstance(ks); + ((AbstractAxiomLearningAlgorithm)learner).setReasoner(sparqlReasoner); ConfigHelper.configure(learner, "classToDescribe", cls.toString()); ConfigHelper.configure(learner, "maxExecutionTimeInSeconds", maxExecutionTimeInSeconds); @@ -548,7 +577,7 @@ } classesCnt++; - if (maxClasses != 0 && classesCnt > maxClasses) { + if (maxClasses != 0 && classesCnt == maxClasses) { break; } @@ -689,7 +718,7 @@ int numberOfEntitiesWithTimeout = rs.getInt(1); //compute average number of suggestions above threshold - ps = conn.prepareStatement("SELECT AVG(cnt) FROM (SELECT entity, COUNT(axiom) AS cnt FROM (SELECT * FROM evaluation WHERE algorithm=? AND score >=?) AS A GROUP BY entity) AS B"); + ps = conn.prepareStatement("SELECT AVG(cnt) FROM (SELECT entity, COUNT(DISTINCT axiom) AS cnt FROM (SELECT * FROM evaluation WHERE algorithm=? AND score >=?) AS A GROUP BY entity) AS B"); ps.setString(1, algoName); ps.setDouble(2, threshold); rs = ps.executeQuery(); @@ -734,6 +763,7 @@ } table1.append("\\bottomrule\n\\end{tabulary}"); System.out.println(table1.toString()); + write2Disk(table1.toString(), "evaluation/table1.tex"); //second part of evaluation @@ -821,6 +851,7 @@ table2.append("\\end{tabulary}"); System.out.println(table2.toString()); + write2Disk(table2.toString(), "evaluation/table2.tex"); } private void writeToDisk(AxiomType<? extends OWLAxiom> axiomType, Map<String, Double> axiomsWithAccurracy){ @@ -862,6 +893,19 @@ } } + private void write2Disk(String content, String file){ + try { + new File(file).createNewFile(); + BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(file)); + bos.write(content.getBytes()); + bos.close(); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + } + private void writeToDisk(AxiomType<? extends OWLAxiom> axiomType, Set<String> axioms){ String fileName = axiomType.getName().replaceAll(" ", "_") + ".txt"; @@ -920,8 +964,10 @@ axiomType2Algorithm.put(AxiomType.FUNCTIONAL_OBJECT_PROPERTY, Arrays.asList((Class<? extends LearningAlgorithm>[])new Class[]{FunctionalObjectPropertyAxiomLearner.class})); axiomType2Algorithm.put(AxiomType.INVERSE_FUNCTIONAL_OBJECT_PROPERTY, Arrays.asList((Class<? extends LearningAlgorithm>[])new Class[]{InverseFunctionalObjectPropertyAxiomLearner.class})); axiomType2Algorithm.put(AxiomType.SYMMETRIC_OBJECT_PROPERTY, Arrays.asList((Class<? extends LearningAlgorithm>[])new Class[]{SymmetricObjectPropertyAxiomLearner.class})); + axiomType2Algorithm.put(AxiomType.ASYMMETRIC_OBJECT_PROPERTY, Arrays.asList((Class<? extends LearningAlgorithm>[])new Class[]{AsymmetricObjectPropertyAxiomLearner.class})); axiomType2Algorithm.put(AxiomType.REFLEXIVE_OBJECT_PROPERTY, Arrays.asList((Class<? extends LearningAlgorithm>[])new Class[]{ReflexiveObjectPropertyAxiomLearner.class})); axiomType2Algorithm.put(AxiomType.IRREFLEXIVE_OBJECT_PROPERTY, Arrays.asList((Class<? extends LearningAlgorithm>[])new Class[]{IrreflexiveObjectPropertyAxiomLearner.class})); + axiomType2Algorithm.put(AxiomType.INVERSE_OBJECT_PROPERTIES, Arrays.asList((Class<? extends LearningAlgorithm>[])new Class[]{InverseObjectPropertyAxiomLearner.class})); axiomType2Algorithm.put(AxiomType.SUB_DATA_PROPERTY, Arrays.asList((Class<? extends LearningAlgorithm>[])new Class[]{SubDataPropertyOfAxiomLearner.class})); axiomType2Algorithm.put(AxiomType.EQUIVALENT_DATA_PROPERTIES, Arrays.asList((Class<? extends LearningAlgorithm>[])new Class[]{EquivalentDataPropertyAxiomLearner.class})); @@ -1127,18 +1173,99 @@ } public static void main(String[] args) throws Exception - + { - EnrichmentEvaluation ee = new EnrichmentEvaluation(); - ee.dropAndCreateTable(); - ee.start(); - // ee.printResultsPlain(); - ee.printResultsLaTeX(); - Files.createFile(new File("enrichment_eval.html"), ee.printHTMLTable()); - FileAppender app = new FileAppender(new SimpleLayout(), "log/enrichmentEvalErrors.log"); - Logger.getRootLogger().setLevel(Level.ERROR); - Logger.getRootLogger().removeAllAppenders(); - Logger.getRootLogger().addAppender(app); + OptionParser parser = new OptionParser(); + parser.acceptsAll(asList("h", "?", "help"), "Show help."); + parser.acceptsAll(asList("e", "endpoint"), + "SPARQL endpoint URL to be used.").withRequiredArg() + .ofType(URL.class); + parser.acceptsAll(asList("g", "graph"), + "URI of default graph for queries on SPARQL endpoint.") + .withOptionalArg().ofType(URI.class); + parser.acceptsAll(asList("c", "classes"), + "Run class axiom algorithms") + .withOptionalArg().ofType(Boolean.class).defaultsTo(true); + parser.acceptsAll(asList("o", "objectProperties"), + "Run object property axiom algorithms") + .withOptionalArg().ofType(Boolean.class).defaultsTo(true); + parser.acceptsAll(asList("d", "dataProperties"), + "Run data property axiom algorithms") + .withOptionalArg().ofType(Boolean.class).defaultsTo(true); + parser.acceptsAll(asList("drop"), + "Drop and create tables where data for evaluation is stored.") + .withOptionalArg().ofType(Boolean.class).defaultsTo(false); + + + // parse options and display a message for the user in case of problems + OptionSet options = null; + try { + options = parser.parse(args); + } catch (Exception e) { + System.out.println("Error: " + e.getMessage() + + ". Use -? to get help."); + System.exit(0); + } + + // print help screen + if (options.has("?")) { + parser.printHelpOn(System.out); + // main script + } else { + // check that endpoint was specified + if (!options.hasArgument("endpoint")) { + System.out + .println("Please specify a SPARQL endpoint (using the -e option)."); + System.exit(0); + } + + // create SPARQL endpoint object (check that indeed a URL was given) + URL endpoint = null; + try { + endpoint = (URL) options.valueOf("endpoint"); + } catch (OptionException e) { + System.out + .println("The specified endpoint appears not to be a proper URL."); + System.exit(0); + } + URI graph = null; + try { + graph = (URI) options.valueOf("graph"); + } catch (OptionException e) { + System.out + .println("The specified graph appears not to be a proper URL."); + System.exit(0); + } + + LinkedList<String> defaultGraphURIs = new LinkedList<String>(); + if (graph != null) { + defaultGraphURIs.add(graph.toString()); + } + + SparqlEndpoint se = new SparqlEndpoint(endpoint, defaultGraphURIs, + new LinkedList<String>()); + + boolean runClassAlgorithms = (Boolean) options.valueOf("classes"); + boolean runObjectPropertyAlgorithms = (Boolean) options.valueOf("objectProperties"); + boolean runDataPropertyAlgorithms = (Boolean) options.valueOf("dataProperties"); + + boolean dropTables = (Boolean) options.valueOf("drop"); + + EnrichmentEvaluation ee = new EnrichmentEvaluation(se); + if(dropTables){ + ee.dropAndCreateTable(); + } + ee.start(runClassAlgorithms, runObjectPropertyAlgorithms, runDataPropertyAlgorithms); + // ee.printResultsPlain(); + ee.printResultsLaTeX(); + Files.createFile(new File("enrichment_eval.html"), + ee.printHTMLTable()); + FileAppender app = new FileAppender(new SimpleLayout(), + "log/enrichmentEvalErrors.log"); + Logger.getRootLogger().setLevel(Level.ERROR); + Logger.getRootLogger().removeAllAppenders(); + Logger.getRootLogger().addAppender(app); + } } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dc...@us...> - 2012-05-01 07:53:29
|
Revision: 3673 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3673&view=rev Author: dcherix Date: 2012-05-01 07:53:23 +0000 (Tue, 01 May 2012) Log Message: ----------- groupId from maven-taex-plugin changed to the new one. Add dependecy for xercesImpl. Modified Paths: -------------- trunk/interfaces/pom.xml Modified: trunk/interfaces/pom.xml =================================================================== --- trunk/interfaces/pom.xml 2012-04-30 11:59:31 UTC (rev 3672) +++ trunk/interfaces/pom.xml 2012-05-01 07:53:23 UTC (rev 3673) @@ -85,9 +85,9 @@ </configuration> </plugin> <plugin> - <groupId>de.akquinet.maven</groupId> + <groupId>de.akquinet.jbosscc.latex</groupId> <artifactId>maven-latex-plugin</artifactId> - <version>1.1</version> + <version>1.2</version> <configuration> <settings> <!-- all tex main documents in this folder (including subfolders) @@ -170,9 +170,9 @@ </configuration> </plugin> <plugin> - <groupId>de.akquinet.maven</groupId> + <groupId>de.akquinet.jbosscc.latex</groupId> <artifactId>maven-latex-plugin</artifactId> - <version>1.1</version> + <version>1.2</version> <configuration> <settings> <!-- all tex main documents in this folder (including subfolders) @@ -488,6 +488,11 @@ </dependency> <!--END Logging Dependencies --> + <dependency> + <groupId>xerces</groupId> + <artifactId>xercesImpl</artifactId> + <version>2.8.0</version> + </dependency> </dependencies> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-04-30 11:59:37
|
Revision: 3672 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3672&view=rev Author: lorenz_b Date: 2012-04-30 11:59:31 +0000 (Mon, 30 Apr 2012) Log Message: ----------- Modified eval script. Modified Paths: -------------- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java Modified: trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-04-30 10:19:21 UTC (rev 3671) +++ trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-04-30 11:59:31 UTC (rev 3672) @@ -24,7 +24,9 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; +import java.io.FileInputStream; import java.io.FileNotFoundException; +import java.io.FileOutputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; @@ -125,6 +127,7 @@ import org.ini4j.InvalidFileFormatException; import org.semanticweb.HermiT.Reasoner; import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.io.RDFXMLOntologyFormat; import org.semanticweb.owlapi.model.AxiomType; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLAxiom; @@ -135,6 +138,7 @@ import org.semanticweb.owlapi.model.OWLObjectPropertyAxiom; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyStorageException; import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; import org.semanticweb.owlapi.reasoner.InconsistentOntologyException; import org.semanticweb.owlapi.reasoner.InferenceType; @@ -178,7 +182,7 @@ // only axioms with a score above this threshold will be considered private double threshold = 0.7; - private SparqlEndpoint endpoint = SparqlEndpoint.getEndpointDBpediaLiveAKSW(); + private SparqlEndpoint endpoint = SparqlEndpoint.getEndpointDBpedia(); // private SparqlEndpoint endpoint = SparqlEndpoint.getEndpointDBpedia(); // can be used to only evaluate a part of DBpedia @@ -229,7 +233,7 @@ classAlgorithms = new LinkedList<Class<? extends LearningAlgorithm>>(); // classAlgorithms.add(CELOE.class); -// classAlgorithms.add(DisjointClassesLearner.class); + classAlgorithms.add(DisjointClassesLearner.class); classAlgorithms.add(SimpleSubclassLearner.class); @@ -307,7 +311,7 @@ SparqlEndpointKS ks = new SparqlEndpointKS(endpoint); ks.init(); - evaluateObjectProperties(ks); +// evaluateObjectProperties(ks); Thread.sleep(20000); @@ -315,7 +319,7 @@ // // Thread.sleep(20000); -// evaluateClasses(ks); + evaluateClasses(ks); System.out.println("Overall runtime: " + (System.currentTimeMillis()-overallStartTime)/1000 + "s."); @@ -1079,9 +1083,30 @@ } private void loadCurrentDBpediaOntology2(){ - System.out.println("Loading schema ..."); - SPARQLReasoner r = new SPARQLReasoner(new SparqlEndpointKS(endpoint)); - dbPediaOntology = convert(r.loadSchema()); + dbPediaOntology = null; + try { + dbPediaOntology = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(new FileInputStream(new File("evaluation/currentDBpediaSchema.owl"))); + } catch (OWLOntologyCreationException e1) { + e1.printStackTrace(); + } catch (FileNotFoundException e1) { + e1.printStackTrace(); + } + if(dbPediaOntology == null){ + System.out.println("Loading schema ..."); + SPARQLReasoner r = new SPARQLReasoner(new SparqlEndpointKS(endpoint)); + dbPediaOntology = convert(r.loadSchema()); + try { + new File("evaluation").mkdir(); + new File("evaluation/currentDBpediaSchema.owl").createNewFile(); + OWLManager.createOWLOntologyManager().saveOntology(dbPediaOntology, new RDFXMLOntologyFormat(), new FileOutputStream(new File("evaluation/currentDBpediaSchema.owl"))); + } catch (OWLOntologyStorageException e) { + e.printStackTrace(); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + } System.out.println("Preparing reasoner ..."); // reasoner = new Reasoner(dbPediaOntology); reasoner = PelletReasonerFactory.getInstance().createNonBufferingReasoner(dbPediaOntology); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-04-30 10:19:27
|
Revision: 3671 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3671&view=rev Author: lorenz_b Date: 2012-04-30 10:19:21 +0000 (Mon, 30 Apr 2012) Log Message: ----------- Added variable to get if no query was successfully executed before timeout, i.e. the whole algorithm got a timeout. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java Modified: trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2012-04-30 10:17:46 UTC (rev 3670) +++ trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2012-04-30 10:19:21 UTC (rev 3671) @@ -89,6 +89,8 @@ protected long startTime; protected int limit = 1000; + private boolean timeout = true; + public AbstractAxiomLearningAlgorithm() { existingAxioms = new TreeSet<Axiom>(new AxiomComparator()); } @@ -154,6 +156,7 @@ if(reasoner == null){ reasoner = new SPARQLReasoner((SparqlEndpointKS) ks); } + timeout = true; } @Override @@ -165,6 +168,10 @@ return getCurrentlyBestAxioms(nrOfAxioms, 0.0); } + public boolean isTimeout() { + return timeout; + } + public List<Axiom> getCurrentlyBestAxioms(int nrOfAxioms, double accuracyThreshold) { List<Axiom> bestAxioms = new ArrayList<Axiom>(); @@ -229,7 +236,9 @@ queryExecution.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); queryExecution.setNamedGraphURIs(endpoint.getNamedGraphURIs()); try { - return queryExecution.execConstruct(); + Model model = queryExecution.execConstruct(); + timeout = false; + return model; } catch (QueryExceptionHTTP e) { if(e.getCause() instanceof SocketTimeoutException){ logger.warn("Got timeout", e); @@ -254,7 +263,9 @@ queryExecution.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); queryExecution.setNamedGraphURIs(endpoint.getNamedGraphURIs()); try { - return queryExecution.execSelect(); + ResultSet rs = queryExecution.execSelect(); + timeout = false; + return rs; } catch (QueryExceptionHTTP e) { if(e.getCause() instanceof SocketTimeoutException){ logger.warn("Got timeout", e); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-04-30 10:17:52
|
Revision: 3670 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3670&view=rev Author: lorenz_b Date: 2012-04-30 10:17:46 +0000 (Mon, 30 Apr 2012) Log Message: ----------- Modified eval script. Modified Paths: -------------- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java Modified: trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-04-30 09:04:11 UTC (rev 3669) +++ trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-04-30 10:17:46 UTC (rev 3670) @@ -84,6 +84,7 @@ import org.dllearner.algorithms.properties.SubObjectPropertyOfAxiomLearner; import org.dllearner.algorithms.properties.SymmetricObjectPropertyAxiomLearner; import org.dllearner.algorithms.properties.TransitiveObjectPropertyAxiomLearner; +import org.dllearner.core.AbstractAxiomLearningAlgorithm; import org.dllearner.core.AbstractReasonerComponent; import org.dllearner.core.AnnComponentManager; import org.dllearner.core.AxiomLearningAlgorithm; @@ -306,15 +307,15 @@ SparqlEndpointKS ks = new SparqlEndpointKS(endpoint); ks.init(); -// evaluateObjectProperties(ks); -// -// Thread.sleep(20000); + evaluateObjectProperties(ks); + Thread.sleep(20000); + // evaluateDataProperties(ks); // // Thread.sleep(20000); - evaluateClasses(ks); +// evaluateClasses(ks); System.out.println("Overall runtime: " + (System.currentTimeMillis()-overallStartTime)/1000 + "s."); @@ -344,8 +345,7 @@ int attempt = 0; long startTime = 0; boolean timeout = true; - while(timeout && attempt++ < maxAttempts){ - timeout = false; + while(((AbstractAxiomLearningAlgorithm)learner).isTimeout() && attempt++ < maxAttempts){ if(attempt > 1){ try { System.out.println("Got timeout. Waiting " + delayInMilliseconds + " ms ..."); @@ -358,8 +358,8 @@ startTime = System.currentTimeMillis(); try { learner.start(); + timeout = false; } catch (Exception e) { - timeout = true; if(e.getCause() instanceof SocketTimeoutException){ } else { @@ -422,8 +422,7 @@ int attempt = 0; long startTime = 0; boolean timeout = true; - while(timeout && attempt++ < maxAttempts){ - timeout = false; + while(((AbstractAxiomLearningAlgorithm)learner).isTimeout() && attempt++ < maxAttempts){ if(attempt > 1){ try { Thread.sleep(delayInMilliseconds); @@ -435,8 +434,8 @@ startTime = System.currentTimeMillis(); try { learner.start(); + timeout = false; } catch (Exception e) { - timeout = true; if(e.getCause() instanceof SocketTimeoutException){ } else { @@ -506,8 +505,7 @@ int attempt = 0; timeout = true; - while(timeout && attempt++ < maxAttempts){ - timeout = false; + while(((AbstractAxiomLearningAlgorithm)learner).isTimeout() && attempt++ < maxAttempts){ if(attempt > 1){ try { Thread.sleep(delayInMilliseconds); @@ -519,17 +517,12 @@ startTime = System.currentTimeMillis(); try { learner.start(); + timeout = false; } catch (Exception e) { - timeout = true; - if(e.getCause() instanceof SocketTimeoutException){ - - } else { - e.printStackTrace(); - } + e.printStackTrace(); } } - learnedAxioms = ((AxiomLearningAlgorithm)learner) - .getCurrentlyBestEvaluatedAxioms(nrOfAxiomsToLearn); + learnedAxioms = ((AxiomLearningAlgorithm)learner).getCurrentlyBestEvaluatedAxioms(nrOfAxiomsToLearn); } @@ -1090,8 +1083,8 @@ SPARQLReasoner r = new SPARQLReasoner(new SparqlEndpointKS(endpoint)); dbPediaOntology = convert(r.loadSchema()); System.out.println("Preparing reasoner ..."); - reasoner = new Reasoner(dbPediaOntology); -// reasoner = PelletReasonerFactory.getInstance().createNonBufferingReasoner(dbPediaOntology); +// reasoner = new Reasoner(dbPediaOntology); + reasoner = PelletReasonerFactory.getInstance().createNonBufferingReasoner(dbPediaOntology); reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-04-30 09:04:18
|
Revision: 3669 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3669&view=rev Author: lorenz_b Date: 2012-04-30 09:04:11 +0000 (Mon, 30 Apr 2012) Log Message: ----------- Changed some queries. Set timeout to remaining execution time, so all algorithms should hopefully terminate after the given runtime independently from the endpoint runtime. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/AsymmetricObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentDataPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexiveObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/TransitiveObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/AsymmetricObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/AsymmetricObjectPropertyAxiomLearner.java 2012-04-29 18:39:37 UTC (rev 3668) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/AsymmetricObjectPropertyAxiomLearner.java 2012-04-30 09:04:11 UTC (rev 3669) @@ -97,7 +97,7 @@ while(newModel.size() != 0){ model.add(newModel); // get number of instances of s with <s p o> - query = "SELECT (COUNT(?s) AS ?total) WHERE {?s <%s> ?o.}"; + query = "SELECT (COUNT(*) AS ?total) WHERE {?s <%s> ?o.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); ResultSet rs = executeSelectQuery(query); QuerySolution qs; @@ -106,7 +106,7 @@ qs = rs.next(); total = qs.getLiteral("total").getInt(); } - query = "SELECT (COUNT(?s) AS ?symmetric) WHERE {?s <%s> ?o. ?o <%s> ?s.}"; + query = "SELECT (COUNT(*) AS ?symmetric) WHERE {?s <%s> ?o. ?o <%s> ?s.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); rs = executeSelectQuery(query); int symmetric = 0; @@ -128,7 +128,7 @@ } private void runSPARQL1_1_Mode(){ - String query = "SELECT (COUNT(?s) AS ?total) WHERE {?s <%s> ?o.}"; + String query = "SELECT (COUNT(*) AS ?total) WHERE {?s <%s> ?o.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); ResultSet rs = executeSelectQuery(query); QuerySolution qs; @@ -137,7 +137,7 @@ qs = rs.next(); total = qs.getLiteral("total").getInt(); } - query = "SELECT (COUNT(?s) AS ?symmetric) WHERE {?s <%s> ?o. ?o <%s> ?s.}"; + query = "SELECT (COUNT(*) AS ?symmetric) WHERE {?s <%s> ?o. ?o <%s> ?s.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); rs = executeSelectQuery(query); int symmetric = 0; Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java 2012-04-29 18:39:37 UTC (rev 3668) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java 2012-04-30 09:04:11 UTC (rev 3669) @@ -213,8 +213,9 @@ } public static void main(String[] args) throws Exception{ - DisjointObjectPropertyAxiomLearner l = new DisjointObjectPropertyAxiomLearner(new SparqlEndpointKS(new SparqlEndpoint( - new URL("http://dbpedia.aksw.org:8902/sparql"), Collections.singletonList("http://dbpedia.org"), Collections.<String>emptyList())));//.getEndpointDBpediaLiveAKSW())); + SparqlEndpoint endpoint = SparqlEndpoint.getEndpointDBpedia(); +// endpoint = new SparqlEndpoint(new URL("http://dbpedia.aksw.org:8902/sparql"), Collections.singletonList("http://dbpedia.org"), Collections.<String>emptyList())); + DisjointObjectPropertyAxiomLearner l = new DisjointObjectPropertyAxiomLearner(new SparqlEndpointKS(endpoint));//.getEndpointDBpediaLiveAKSW())); l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/stateOfOrigin")); l.setMaxExecutionTimeInSeconds(10); l.init(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentDataPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentDataPropertyAxiomLearner.java 2012-04-29 18:39:37 UTC (rev 3668) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentDataPropertyAxiomLearner.java 2012-04-30 09:04:11 UTC (rev 3669) @@ -116,7 +116,7 @@ offset += limit; - query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); + query = String.format(baseQuery, propertyToDescribe.getName(), limit, offset); newModel = executeConstructQuery(query); } @@ -184,7 +184,7 @@ public static void main(String[] args) throws Exception{ EquivalentDataPropertyAxiomLearner l = new EquivalentDataPropertyAxiomLearner(new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveAKSW())); l.setPropertyToDescribe(new DatatypeProperty("http://dbpedia.org/ontology/birthDate")); - l.setMaxExecutionTimeInSeconds(100); + l.setMaxExecutionTimeInSeconds(10); l.init(); l.start(); System.out.println(l.getCurrentlyBestEvaluatedAxioms(5)); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java 2012-04-29 18:39:37 UTC (rev 3668) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java 2012-04-30 09:04:11 UTC (rev 3669) @@ -110,7 +110,7 @@ all = qs.getLiteral("all").getInt(); } // get number of instances of s with <s p o> <s p o1> where o != o1 - query = "SELECT (COUNT(DISTINCT ?s1) AS ?noninversefunctional) WHERE {?s1 <%s> ?o. ?s2 <%s> ?o. FILTER(?s1 != ?s2) }"; + query = "SELECT (COUNT(DISTINCT ?o) AS ?noninversefunctional) WHERE {?s1 <%s> ?o. ?s2 <%s> ?o. FILTER(?s1 != ?s2) }"; query = query.replace("%s", propertyToDescribe.getURI().toString()); rs = executeSelectQuery(query); int notInverseFunctional = 1; @@ -148,7 +148,7 @@ all = qs.getLiteral("all").getInt(); } // get number of instances of s with <s p o> <s p o1> where o != o1 - query = "SELECT (COUNT(DISTINCT ?s1) AS ?noninversefunctional) WHERE {?s1 <%s> ?o. ?s2 <%s> ?o. FILTER(?s1 != ?s2) }"; + query = "SELECT (COUNT(DISTINCT ?o) AS ?noninversefunctional) WHERE {?s1 <%s> ?o. ?s2 <%s> ?o. FILTER(?s1 != ?s2) }"; query = query.replace("%s", propertyToDescribe.getURI().toString()); rs = executeSelectQuery(query); int notInverseFunctional = 1; @@ -168,8 +168,7 @@ } public static void main(String[] args) throws Exception{ - SparqlEndpointKS ks = new SparqlEndpointKS(new SparqlEndpoint( - new URL("http://dbpedia.aksw.org:8902/sparql"), Collections.singletonList("http://dbpedia.org"), Collections.<String>emptyList())); + SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveAKSW()); InverseFunctionalObjectPropertyAxiomLearner l = new InverseFunctionalObjectPropertyAxiomLearner(ks); l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/profession")); l.setMaxExecutionTimeInSeconds(10); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseObjectPropertyAxiomLearner.java 2012-04-29 18:39:37 UTC (rev 3668) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseObjectPropertyAxiomLearner.java 2012-04-30 09:04:11 UTC (rev 3669) @@ -96,7 +96,7 @@ while(newModel.size() != 0){ model.add(newModel); // get number of instances of s with <s p o> - query = "SELECT (COUNT(?s) AS ?total) WHERE {?s <%s> ?o.}"; + query = "SELECT (COUNT(*) AS ?total) WHERE {?s <%s> ?o.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); ResultSet rs = executeSelectQuery(query); QuerySolution qs; @@ -106,7 +106,7 @@ total = qs.getLiteral("total").getInt(); } - query = String.format("SELECT ?p (COUNT(?s) AS ?cnt) WHERE {?s <%s> ?o. ?o ?p ?s.} GROUP BY ?p", propertyToDescribe.getName()); + query = String.format("SELECT ?p (COUNT(*) AS ?cnt) WHERE {?s <%s> ?o. ?o ?p ?s.} GROUP BY ?p", propertyToDescribe.getName()); rs = executeSelectQuery(query); while(rs.hasNext()){ qs = rs.next(); @@ -121,7 +121,7 @@ } private void runSPARQL1_1_Mode(){ - String query = "SELECT (COUNT(?s) AS ?total) WHERE {?s <%s> ?o.}"; + String query = "SELECT (COUNT(*) AS ?total) WHERE {?s <%s> ?o.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); ResultSet rs = executeSelectQuery(query); QuerySolution qs; @@ -131,7 +131,7 @@ total = qs.getLiteral("total").getInt(); } - query = String.format("SELECT ?p (COUNT(?s) AS ?cnt) WHERE {?s <%s> ?o. ?o ?p ?s.} GROUP BY ?p", propertyToDescribe.getName()); + query = String.format("SELECT ?p (COUNT(*) AS ?cnt) WHERE {?s <%s> ?o. ?o ?p ?s.} GROUP BY ?p", propertyToDescribe.getName()); rs = executeSelectQuery(query); while(rs.hasNext()){ qs = rs.next(); @@ -146,14 +146,10 @@ SparqlEndpointKS ks = new SparqlEndpointKS(new SparqlEndpoint( new URL("http://dbpedia.aksw.org:8902/sparql"), Collections.singletonList("http://dbpedia.org"), Collections.<String>emptyList()));//.getEndpointDBpediaLiveAKSW())); - SPARQLReasoner reasoner = new SPARQLReasoner(ks); - reasoner.prepareSubsumptionHierarchy(); - InverseObjectPropertyAxiomLearner l = new InverseObjectPropertyAxiomLearner(ks); - l.setReasoner(reasoner); l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/officialLanguage")); - l.setMaxExecutionTimeInSeconds(10); + l.setMaxExecutionTimeInSeconds(100); // l.setReturnOnlyNewAxioms(true); l.init(); l.start(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexiveObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexiveObjectPropertyAxiomLearner.java 2012-04-29 18:39:37 UTC (rev 3668) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexiveObjectPropertyAxiomLearner.java 2012-04-30 09:04:11 UTC (rev 3669) @@ -97,7 +97,7 @@ while(newModel.size() != 0){ model.add(newModel); // get fraction of instances s with <s p s> - query = "SELECT (COUNT(?s) AS ?total) WHERE {?s <%s> ?o.}"; + query = "SELECT (COUNT(DISTINCT ?s) AS ?total) WHERE {?s <%s> ?o.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); ResultSet rs = executeSelectQuery(query); QuerySolution qs; @@ -106,7 +106,7 @@ qs = rs.next(); total = qs.getLiteral("total").getInt(); } - query = "SELECT (COUNT(?s) AS ?reflexive) WHERE {?s <%s> ?s.}"; + query = "SELECT (COUNT(DISTINCT ?s) AS ?reflexive) WHERE {?s <%s> ?s.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); rs = executeSelectQuery(query); int reflexive = 0; @@ -130,7 +130,7 @@ private void runSPARQL1_1_Mode() { // get fraction of instances s with <s p s> - String query = "SELECT (COUNT(?s) AS ?total) WHERE {?s <%s> ?o.}"; + String query = "SELECT (COUNT(DISTINCT ?s) AS ?total) WHERE {?s <%s> ?o.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); ResultSet rs = executeSelectQuery(query); QuerySolution qs; @@ -139,7 +139,7 @@ qs = rs.next(); total = qs.getLiteral("total").getInt(); } - query = "SELECT (COUNT(?s) AS ?reflexive) WHERE {?s <%s> ?s.}"; + query = "SELECT (COUNT(DISTINCT ?s) AS ?reflexive) WHERE {?s <%s> ?s.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); rs = executeSelectQuery(query); int reflexive = 0; Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricObjectPropertyAxiomLearner.java 2012-04-29 18:39:37 UTC (rev 3668) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricObjectPropertyAxiomLearner.java 2012-04-30 09:04:11 UTC (rev 3669) @@ -96,7 +96,7 @@ while(newModel.size() != 0){ model.add(newModel); // get number of instances of s with <s p o> - query = "SELECT (COUNT(?s) AS ?total) WHERE {?s <%s> ?o.}"; + query = "SELECT (COUNT(*) AS ?total) WHERE {?s <%s> ?o.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); ResultSet rs = executeSelectQuery(query); QuerySolution qs; @@ -105,7 +105,7 @@ qs = rs.next(); total = qs.getLiteral("total").getInt(); } - query = "SELECT (COUNT(?s) AS ?symmetric) WHERE {?s <%s> ?o. ?o <%s> ?s}"; + query = "SELECT (COUNT(*) AS ?symmetric) WHERE {?s <%s> ?o. ?o <%s> ?s}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); rs = executeSelectQuery(query); int symmetric = 0; @@ -127,7 +127,7 @@ } private void runSPARQL1_1_Mode(){ - String query = "SELECT (COUNT(?s) AS ?total) WHERE {?s <%s> ?o.}"; + String query = "SELECT (COUNT(*) AS ?total) WHERE {?s <%s> ?o.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); ResultSet rs = executeSelectQuery(query); QuerySolution qs; @@ -136,7 +136,7 @@ qs = rs.next(); total = qs.getLiteral("total").getInt(); } - query = "SELECT (COUNT(?s) AS ?symmetric) WHERE {?s <%s> ?o. ?o <%s> ?s}"; + query = "SELECT (COUNT(*) AS ?symmetric) WHERE {?s <%s> ?o. ?o <%s> ?s}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); rs = executeSelectQuery(query); int symmetric = 0; Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/TransitiveObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/TransitiveObjectPropertyAxiomLearner.java 2012-04-29 18:39:37 UTC (rev 3668) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/TransitiveObjectPropertyAxiomLearner.java 2012-04-30 09:04:11 UTC (rev 3669) @@ -98,7 +98,7 @@ while(newModel.size() != 0){ model.add(newModel); // get number of instances of s with <s p o> - query = "SELECT (COUNT(?o) AS ?total) WHERE {?s <%s> ?o. ?o <%s> ?o1.}"; + query = "SELECT (COUNT(*) AS ?total) WHERE {?s <%s> ?o. ?o <%s> ?o1.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); ResultSet rs = executeSelectQuery(query); QuerySolution qs; @@ -107,7 +107,7 @@ qs = rs.next(); total = qs.getLiteral("total").getInt(); } - query = "SELECT (COUNT(?o) AS ?transitive) WHERE {?s <%s> ?o. ?o <%s> ?o1. ?s <%s> ?o1.}"; + query = "SELECT (COUNT(*) AS ?transitive) WHERE {?s <%s> ?o. ?o <%s> ?o1. ?s <%s> ?o1.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); rs = executeSelectQuery(query); int transitive = 0; @@ -128,7 +128,7 @@ } private void runSPARQL1_1_Mode(){ - String query = "SELECT (COUNT(?o) AS ?total) WHERE {?s <%s> ?o. ?o <%s> ?o1.}"; + String query = "SELECT (COUNT(*) AS ?total) WHERE {?s <%s> ?o. ?o <%s> ?o1.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); ResultSet rs = executeSelectQuery(query); QuerySolution qs; @@ -137,7 +137,7 @@ qs = rs.next(); total = qs.getLiteral("total").getInt(); } - query = "SELECT (COUNT(?o) AS ?transitive) WHERE {?s <%s> ?o. ?o <%s> ?o1. ?s <%s> ?o1.}"; + query = "SELECT (COUNT(*) AS ?transitive) WHERE {?s <%s> ?o. ?o <%s> ?o1. ?s <%s> ?o1.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); rs = executeSelectQuery(query); int transitive = 0; Modified: trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2012-04-29 18:39:37 UTC (rev 3668) +++ trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2012-04-30 09:04:11 UTC (rev 3669) @@ -19,6 +19,7 @@ package org.dllearner.core; +import java.net.SocketTimeoutException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -53,7 +54,10 @@ import com.hp.hpl.jena.query.QueryExecutionFactory; import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; +import com.hp.hpl.jena.sparql.engine.http.QueryExceptionHTTP; +import com.hp.hpl.jena.sparql.resultset.ResultSetMem; import com.hp.hpl.jena.util.iterator.Filter; import com.hp.hpl.jena.vocabulary.OWL2; import com.hp.hpl.jena.vocabulary.RDF; @@ -221,10 +225,19 @@ SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), query); - queryExecution.setTimeout(maxExecutionTimeInSeconds * 1000); + queryExecution.setTimeout(getRemainingRuntimeInMilliSeconds()); queryExecution.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); queryExecution.setNamedGraphURIs(endpoint.getNamedGraphURIs()); - return queryExecution.execConstruct(); + try { + return queryExecution.execConstruct(); + } catch (QueryExceptionHTTP e) { + if(e.getCause() instanceof SocketTimeoutException){ + logger.warn("Got timeout", e); + } else { + logger.error("Exception executing query", e); + } + return ModelFactory.createDefaultModel(); + } } else { QueryExecution qexec = QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); return qexec.execConstruct(); @@ -237,10 +250,19 @@ SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), query); - queryExecution.setTimeout(maxExecutionTimeInSeconds * 1000); + queryExecution.setTimeout(getRemainingRuntimeInMilliSeconds()); queryExecution.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); queryExecution.setNamedGraphURIs(endpoint.getNamedGraphURIs()); - return queryExecution.execSelect(); + try { + return queryExecution.execSelect(); + } catch (QueryExceptionHTTP e) { + if(e.getCause() instanceof SocketTimeoutException){ + logger.warn("Got timeout", e); + } else { + logger.error("Exception executing query", e); + } + return new ResultSetMem(); + } } else { return executeSelectQuery(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); } @@ -280,8 +302,12 @@ return entries; } + private long getRemainingRuntimeInMilliSeconds(){ + return Math.max(0, (maxExecutionTimeInSeconds * 1000) - (System.currentTimeMillis() - startTime)); + } + protected boolean terminationCriteriaSatisfied(){ - boolean timeLimitExceeded = maxExecutionTimeInSeconds == 0 ? false : (System.currentTimeMillis() - startTime) >= maxExecutionTimeInSeconds * 1000; + boolean timeLimitExceeded = maxExecutionTimeInSeconds == 0 ? false : getRemainingRuntimeInMilliSeconds() <= 0; boolean resultLimitExceeded = maxFetchedRows == 0 ? false : fetchedRows >= maxFetchedRows; return timeLimitExceeded || resultLimitExceeded; } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <seb...@us...> - 2012-04-29 18:39:43
|
Revision: 3668 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3668&view=rev Author: sebastianwtr Date: 2012-04-29 18:39:37 +0000 (Sun, 29 Apr 2012) Log Message: ----------- [tbsl exploration] added special case Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/IterationModule.java Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/IterationModule.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/IterationModule.java 2012-04-29 17:25:45 UTC (rev 3667) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/IterationModule.java 2012-04-29 18:39:37 UTC (rev 3668) @@ -554,9 +554,98 @@ /* + * for the case: [?y rdf:type Klasse][?y Proptery Resource] + * or: [?y rdf:type Klasse][Resource Proptery ?y] + * + */ + if((condition1_exists_isa||condition2_exists_isa)&&gotResource&&(condition1_exists_resource||condition2_exists_resource)){ + String class_variable=null; + String class_property_variable=null; + ArrayList<String> working_condition=new ArrayList<String>(); + /* + * selcet "working_condition" + */ + if(condition1_exists_isa){ + class_variable= condition1.get(2); + class_property_variable=condition1.get(0); + working_condition= condition2; + } + if(condition2_exists_isa){ + class_variable= condition2.get(2); + class_property_variable=condition2.get(0); + working_condition= condition1; + } + + Hypothesis class_h=null; + + for(Hypothesis h_t : givenHypothesenList){ + if(h_t.getVariable().contains(class_variable)){ + class_h=h_t; + break; + } + } + + System.out.println("class_variable: " + class_variable); + System.out.println("Class Hypothese: "); + + /* + * check now, which side the classVariable is in the other condition + * + */ + + String property_variable_local=null; + String resource_variable_local=null; + String side_of_property=null; + + if(working_condition.get(0).contains(class_property_variable)){ + property_variable_local=working_condition.get(1); + resource_variable_local=working_condition.get(2); + side_of_property="RIGHT"; + } + else{ + property_variable_local=working_condition.get(1); + resource_variable_local=working_condition.get(2); + side_of_property="LEFT"; + } + + String property_name=null; + for(Hypothesis h_t : givenHypothesenList){ + if(h_t.getVariable().contains(property_variable_local)){ + property_name=h_t.getName(); + + } + } + for(ElementList el : resources){ + //System.out.println("el.getVariablename(): "+el.getVariablename()); + if(el.getVariablename().contains(resource_h.getName())&&el.getVariablename().contains(side_of_property)){ + ArrayList<Hypothesis> resultHypothesenList=new ArrayList<Hypothesis>(); + + if(type.contains("LEVENSTHEIN"))resultHypothesenList= LevenstheinModule.doLevensthein(property_variable_local,property_name,el.getHm()); + if(type.contains("RELATE"))resultHypothesenList= SemanticRelatenes.doSemanticRelatenes(property_variable_local,property_name,el.getHm()); + + if(type.contains("WORDNET"))resultHypothesenList= WordnetModule.doWordnet(property_variable_local,property_name,el.getHm(),myindex,wordnet,lemmatiser); + System.out.println("After generating new Hypothesen.\n "+resultHypothesenList.size()+" new were generated"); + for(Hypothesis h_temp : resultHypothesenList) { + ArrayList<Hypothesis> temp_al = new ArrayList<Hypothesis>(); + temp_al.add(class_h); + temp_al.add(h_temp); + temp_al.add(resource_h); + System.out.println("Hypothesen:"); + class_h.printAll(); + h_temp.printAll(); + finalHypothesenList.add(temp_al); + } + } + } + + + } + + + /* * ISA */ - if((condition1_exists_isa||condition2_exists_isa)&&gotResource){ + else if((condition1_exists_isa||condition2_exists_isa)&&gotResource){ /* * get Hypothese for the Class @@ -639,7 +728,7 @@ * Resource */ - if((condition1_exists_resource||condition2_exists_resource)&&gotResource){ + else if((condition1_exists_resource||condition2_exists_resource)&&gotResource){ System.out.println("IN RESOURCE NOT SIMPLE CASE!!!"); System.out.println("resource_variable: " + resource_variable); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <sha...@us...> - 2012-04-29 17:25:52
|
Revision: 3667 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3667&view=rev Author: shadowtm Date: 2012-04-29 17:25:45 +0000 (Sun, 29 Apr 2012) Log Message: ----------- Added logger for the cli package so we could enable output of stacktraces to the appenders if we want. Modified Paths: -------------- trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java trunk/interfaces/src/main/resources/log4j.properties Modified: trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java 2012-04-29 16:25:14 UTC (rev 3666) +++ trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java 2012-04-29 17:25:45 UTC (rev 3667) @@ -28,7 +28,6 @@ import java.util.Map.Entry; import org.apache.commons.lang.exception.ExceptionUtils; -import org.apache.log4j.Logger; import org.apache.xmlbeans.XmlObject; import org.dllearner.configuration.IConfiguration; import org.dllearner.configuration.spring.ApplicationContextBuilder; @@ -39,6 +38,8 @@ import org.dllearner.core.*; import org.dllearner.learningproblems.PosNegLP; import org.dllearner.utilities.Files; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.context.ApplicationContext; import org.springframework.core.io.FileSystemResource; import org.springframework.core.io.Resource; @@ -52,9 +53,8 @@ */ public class CLI { - private static Logger logger = Logger.getLogger(CLI.class); - private static Logger rootLogger = Logger.getRootLogger(); - + private static Logger logger = LoggerFactory.getLogger(CLI.class); + private ApplicationContext context; private IConfiguration configuration; private File confFile; @@ -190,7 +190,7 @@ // Get the Root Error Message logger.error("An Error Has Occurred During Processing."); logger.error(primaryCause.getMessage()); - + logger.debug("Stack Trace: ", e); logger.error("Terminating DL-Learner...and writing stacktrace to: " + stacktraceFileName); FileOutputStream fos = new FileOutputStream(stacktraceFileName); PrintStream ps = new PrintStream(fos); Modified: trunk/interfaces/src/main/resources/log4j.properties =================================================================== --- trunk/interfaces/src/main/resources/log4j.properties 2012-04-29 16:25:14 UTC (rev 3666) +++ trunk/interfaces/src/main/resources/log4j.properties 2012-04-29 17:25:45 UTC (rev 3667) @@ -1,3 +1,5 @@ +# All loggers will default to the INFO level unless specified in a specific logger +# For more information, see the Log4J API manual at: http://bit.ly/KmvtWL . log4j.rootLogger=INFO, stdout, file log4j.appender.stdout=org.apache.log4j.ConsoleAppender @@ -12,6 +14,8 @@ # DL-Learner Logs log4j.logger.org.dllearner=INFO +# Turn this to Debug if you wish to dump stack traces to the appenders (console, file) +log4j.logger.org.dllearner.cli=INFO log4j.logger.org.dllearner.server.nke.LogicalRelationStrategy=DEBUG log4j.category.org.dllearner.kb.simple=DEBUG This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <sha...@us...> - 2012-04-29 16:25:20
|
Revision: 3666 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3666&view=rev Author: shadowtm Date: 2012-04-29 16:25:14 +0000 (Sun, 29 Apr 2012) Log Message: ----------- Improved the way we were handling/outputting error messages to the console for the CLI interface. Modified Paths: -------------- trunk/components-core/pom.xml trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java trunk/interfaces/src/main/java/org/dllearner/configuration/spring/ConfigurationBasedBeanDefinitionRegistryPostProcessor.java trunk/interfaces/src/main/java/org/dllearner/configuration/spring/DefaultApplicationContextBuilder.java trunk/pom.xml Modified: trunk/components-core/pom.xml =================================================================== --- trunk/components-core/pom.xml 2012-04-29 15:58:36 UTC (rev 3665) +++ trunk/components-core/pom.xml 2012-04-29 16:25:14 UTC (rev 3666) @@ -225,6 +225,10 @@ <groupId>org.apache.lucene</groupId> <artifactId>lucene-core</artifactId> </dependency> + <dependency> + <groupId>commons-lang</groupId> + <artifactId>commons-lang</artifactId> + </dependency> </dependencies> Modified: trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java 2012-04-29 15:58:36 UTC (rev 3665) +++ trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java 2012-04-29 16:25:14 UTC (rev 3666) @@ -27,24 +27,16 @@ import java.util.List; import java.util.Map.Entry; -import org.apache.log4j.ConsoleAppender; -import org.apache.log4j.Layout; -import org.apache.log4j.Level; +import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.log4j.Logger; -import org.apache.log4j.PatternLayout; import org.apache.xmlbeans.XmlObject; -import org.dllearner.Info; import org.dllearner.configuration.IConfiguration; import org.dllearner.configuration.spring.ApplicationContextBuilder; import org.dllearner.configuration.spring.DefaultApplicationContextBuilder; import org.dllearner.configuration.util.SpringConfigurationXMLBeanConverter; import org.dllearner.confparser3.ConfParserConfiguration; import org.dllearner.confparser3.ParseException; -import org.dllearner.core.AbstractCELA; -import org.dllearner.core.AbstractReasonerComponent; -import org.dllearner.core.KnowledgeSource; -import org.dllearner.core.LearningAlgorithm; -import org.dllearner.core.ReasoningMethodUnsupportedException; +import org.dllearner.core.*; import org.dllearner.learningproblems.PosNegLP; import org.dllearner.utilities.Files; import org.springframework.context.ApplicationContext; @@ -170,10 +162,10 @@ List<Resource> springConfigResources = new ArrayList<Resource>(); - //DL-Learner Configuration Object - IConfiguration configuration = new ConfParserConfiguration(confFile); + try { + //DL-Learner Configuration Object + IConfiguration configuration = new ConfParserConfiguration(confFile); - try { ApplicationContextBuilder builder = new DefaultApplicationContextBuilder(); ApplicationContext context = builder.buildApplicationContext(configuration,springConfigResources); @@ -191,7 +183,15 @@ cli.run(); } catch (Exception e) { String stacktraceFileName = "log/error.log"; - logger.error("An Error Occurred During Processing. Terminating DL-Learner...and writing stacktrace to: " + stacktraceFileName); + + //Find the primary cause of the exception. + Throwable primaryCause = findPrimaryCause(e); + + // Get the Root Error Message + logger.error("An Error Has Occurred During Processing."); + logger.error(primaryCause.getMessage()); + + logger.error("Terminating DL-Learner...and writing stacktrace to: " + stacktraceFileName); FileOutputStream fos = new FileOutputStream(stacktraceFileName); PrintStream ps = new PrintStream(fos); e.printStackTrace(ps); @@ -199,7 +199,30 @@ } - public void setContext(ApplicationContext context) { + /** + * Find the primary cause of the specified exception. + * + * @param e The exception to analyze + * @return The primary cause of the exception. + */ + private static Throwable findPrimaryCause(Exception e) { + // The throwables from the stack of the exception + Throwable[] throwables = ExceptionUtils.getThrowables(e); + + //Look For a Component Init Exception and use that as the primary cause of failure, if we find it + int componentInitExceptionIndex = ExceptionUtils.indexOfThrowable(e, ComponentInitException.class); + + Throwable primaryCause; + if(componentInitExceptionIndex > -1) { + primaryCause = throwables[componentInitExceptionIndex]; + }else { + //No Component Init Exception on the Stack Trace, so we'll use the root as the primary cause. + primaryCause = ExceptionUtils.getRootCause(e); + } + return primaryCause; + } + + public void setContext(ApplicationContext context) { this.context = context; } Modified: trunk/interfaces/src/main/java/org/dllearner/configuration/spring/ConfigurationBasedBeanDefinitionRegistryPostProcessor.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/configuration/spring/ConfigurationBasedBeanDefinitionRegistryPostProcessor.java 2012-04-29 15:58:36 UTC (rev 3665) +++ trunk/interfaces/src/main/java/org/dllearner/configuration/spring/ConfigurationBasedBeanDefinitionRegistryPostProcessor.java 2012-04-29 16:25:14 UTC (rev 3666) @@ -14,7 +14,6 @@ import org.springframework.beans.factory.support.ManagedSet; import java.util.Collection; -import java.util.StringTokenizer; /** * Created by IntelliJ IDEA. Modified: trunk/interfaces/src/main/java/org/dllearner/configuration/spring/DefaultApplicationContextBuilder.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/configuration/spring/DefaultApplicationContextBuilder.java 2012-04-29 15:58:36 UTC (rev 3665) +++ trunk/interfaces/src/main/java/org/dllearner/configuration/spring/DefaultApplicationContextBuilder.java 2012-04-29 16:25:14 UTC (rev 3666) @@ -59,7 +59,6 @@ try { context.refresh(); } catch (BeanCreationException e) { - logger.error("There was a problem creating the bean named \"" + e.getBeanName() + "\" - Check your configuration file and try again."); throw new RuntimeException(e); } catch (Exception e) { logger.error("There was a problem initializing the components...shutting down."); Modified: trunk/pom.xml =================================================================== --- trunk/pom.xml 2012-04-29 15:58:36 UTC (rev 3665) +++ trunk/pom.xml 2012-04-29 16:25:14 UTC (rev 3666) @@ -478,7 +478,13 @@ <version>2.6.0</version> </dependency> - </dependencies> + <dependency> + <groupId>commons-lang</groupId> + <artifactId>commons-lang</artifactId> + <version>2.6</version> + </dependency> + + </dependencies> </dependencyManagement> <repositories> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <seb...@us...> - 2012-04-29 15:58:44
|
Revision: 3665 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3665&view=rev Author: sebastianwtr Date: 2012-04-29 15:58:36 +0000 (Sun, 29 Apr 2012) Log Message: ----------- [tbsl-exploration] updated some functions and repaired the function for getting informations from the server Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/Index_utils.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/SQLiteIndex.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Template.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/TemplateBuilder.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Utils/Query.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Utils/ServerUtil.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/MainInterface.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/Setting.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/exploration_main.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/IterationModule.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/LevenstheinModule.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/WordnetModule.java Added Paths: ----------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Utils/HeuristicSort.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/SemanticRelatenes.java Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/Index_utils.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/Index_utils.java 2012-04-29 12:31:22 UTC (rev 3664) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/Index_utils.java 2012-04-29 15:58:36 UTC (rev 3665) @@ -104,26 +104,51 @@ String result=null; String result2 = null; ArrayList<String> result_List = new ArrayList<String>(); - result=myindex.getPropertyURI(string.toLowerCase()); - result2=myindex.getontologyURI(string.toLowerCase()); - if(Setting.isDebugModus())DebugMode.debugPrint("Result: "+result); - if(result2!=null){ - result_List.add(result2); - hm.put(result, 1.0f); - if(Setting.isDebugModus())DebugMode.debugPrint("Found uri for: "+string.toLowerCase()); + + if(string.substring(string.length()-1).contains("s")){ + String neuer_string = string.substring(0, string.length() -1); + result=myindex.getPropertyURI(neuer_string.toLowerCase()); + result2=myindex.getontologyURI(neuer_string.toLowerCase()); + //tmp2=myindex.getYagoURI(neuer_string.toLowerCase()); + if(result2!=null){ + result_List.add(result2); + hm.put(result, 1.0f); + } + else if(result!=null){ + result_List.add(result); + hm.put(result, 1.0f); + if(Setting.isDebugModus())DebugMode.debugPrint("Found uri for: "+string.toLowerCase()); + } + else{ + if(Setting.isDebugModus())DebugMode.debugErrorPrint("Didnt find uri for: "+string.toLowerCase()); + + result_List.add("http://dbpedia.org/ontology/"+string.toLowerCase().replace(" ", "_")); + hm.put(result, 0.0f); + } } - else if(result!=null){ - result_List.add(result); - hm.put(result, 1.0f); - if(Setting.isDebugModus())DebugMode.debugPrint("Found uri for: "+string.toLowerCase()); - } else{ - if(Setting.isDebugModus())DebugMode.debugErrorPrint("Didnt find uri for: "+string.toLowerCase()); - - result_List.add("http://dbpedia.org/ontology/"+string.toLowerCase().replace(" ", "_")); - hm.put(result, 0.0f); + result=myindex.getPropertyURI(string.toLowerCase()); + result2=myindex.getontologyURI(string.toLowerCase()); + if(Setting.isDebugModus())DebugMode.debugPrint("Result: "+result); + if(result2!=null){ + result_List.add(result2); + hm.put(result, 1.0f); + if(Setting.isDebugModus())DebugMode.debugPrint("Found uri for: "+string.toLowerCase()); + } + else if(result!=null){ + result_List.add(result); + hm.put(result, 1.0f); + if(Setting.isDebugModus())DebugMode.debugPrint("Found uri for: "+string.toLowerCase()); + } + else{ + if(Setting.isDebugModus())DebugMode.debugErrorPrint("Didnt find uri for: "+string.toLowerCase()); + + result_List.add("http://dbpedia.org/ontology/"+string.toLowerCase().replace(" ", "_")); + hm.put(result, 0.0f); + } } + if(Setting.isDebugModus())DebugMode.debugPrint("######\n"); @@ -146,7 +171,7 @@ ArrayList<String> result_List = new ArrayList<String>(); tmp1=myindex.getontologyClassURI(string.toLowerCase()); - tmp2=myindex.getYagoURI(string.toLowerCase()); + //tmp2=myindex.getYagoURI(string.toLowerCase()); if(tmp1!=null){ result_List.add(tmp1); } @@ -178,9 +203,9 @@ //} - if(tmp2!=null) { + /*if(tmp2!=null) { result_List.add(tmp2); - } + }*/ /* * if nothing is found, also try the like operator for each part of the string */ @@ -202,13 +227,13 @@ if(string.substring(string.length()-1).contains("s")){ String neuer_string = string.substring(0, string.length() -1); tmp1=myindex.getontologyClassURI(neuer_string.toLowerCase()); - tmp2=myindex.getYagoURI(neuer_string.toLowerCase()); + //tmp2=myindex.getYagoURI(neuer_string.toLowerCase()); if(tmp1!=null){ result_List.add(tmp1); } - if(tmp2!=null){ + /*if(tmp2!=null){ result_List.add(tmp1); - } + }*/ } if(string.length()>3){ @@ -216,13 +241,13 @@ String neuer_string = string.substring(0, string.length() -3); neuer_string+="y"; tmp1=myindex.getontologyClassURI(neuer_string.toLowerCase()); - tmp2=myindex.getYagoURI(neuer_string.toLowerCase()); + //tmp2=myindex.getYagoURI(neuer_string.toLowerCase()); if(tmp1!=null){ result_List.add(tmp1); } - if(tmp2!=null){ + /*if(tmp2!=null){ result_List.add(tmp1); - } + }*/ } } Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/SQLiteIndex.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/SQLiteIndex.java 2012-04-29 12:31:22 UTC (rev 3664) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/SQLiteIndex.java 2012-04-29 15:58:36 UTC (rev 3665) @@ -32,12 +32,12 @@ createWordnetHelp(); createIndexOntology(); createIndexOntologyClass(); - createIndexofYago(); + //createIndexofYago(); - System.out.println("start IndexNumber"); + /*System.out.println("start IndexNumber"); createNewSpecialIndexNumber(); System.out.println("start SpecialIndex"); - createNewSpecialIndex(); + createNewSpecialIndex();*/ lemma = new StanfordLemmatizer(); @@ -148,7 +148,7 @@ rs = stat.executeQuery("select uri from property where name='"+string.toLowerCase()+"';"); while(rs.next()){ String result_string= rs.getString("uri"); - System.out.println("Property: "+result_string); + //System.out.println("Property: "+result_string); //check for double: boolean found = false; for(String s: al){ @@ -160,7 +160,7 @@ rs = stat.executeQuery("select uri from ontology where name='"+string.toLowerCase()+"';"); while(rs.next()){ String result_string= rs.getString("uri"); - System.out.println("OntologyProperty: "+result_string); + //System.out.println("OntologyProperty: "+result_string); //check for double: boolean found = false; for(String s: al){ @@ -168,7 +168,7 @@ } if(found==false)al.add(result_string); } - System.out.println("Anzahl ArrayList: "+al.size()); + //System.out.println("Anzahl ArrayList: "+al.size()); if(al.size()==1) return al.get(0); //check if there is one with an ontology in it else{ Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Template.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Template.java 2012-04-29 12:31:22 UTC (rev 3664) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/Template.java 2012-04-29 15:58:36 UTC (rev 3665) @@ -9,6 +9,7 @@ private ArrayList<ArrayList<Hypothesis>> hypothesen = new ArrayList<ArrayList<Hypothesis>>(); private ArrayList<ArrayList<Hypothesis>> hypothesenLevensthein = new ArrayList<ArrayList<Hypothesis>>(); private ArrayList<ArrayList<Hypothesis>> hypothesenWordnet = new ArrayList<ArrayList<Hypothesis>>(); + private ArrayList<ArrayList<Hypothesis>> hypothesenRelate = new ArrayList<ArrayList<Hypothesis>>(); private String selectTerm; private String having; private String filter; @@ -187,6 +188,12 @@ public void setHypothesenWordnet(ArrayList<ArrayList<Hypothesis>> hypothesenWordnet) { this.hypothesenWordnet = hypothesenWordnet; } + public ArrayList<ArrayList<Hypothesis>> getHypothesenRelate() { + return hypothesenRelate; + } + public void setHypothesenRelate(ArrayList<ArrayList<Hypothesis>> hypothesenRelate) { + this.hypothesenRelate = hypothesenRelate; + } } Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/TemplateBuilder.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/TemplateBuilder.java 2012-04-29 12:31:22 UTC (rev 3664) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/TemplateBuilder.java 2012-04-29 15:58:36 UTC (rev 3665) @@ -54,7 +54,7 @@ * check if templates were build, if not, safe the question and delete it for next time from the xml file. * Only in Debug Mode */ - //if(Setting.isDebugModus()){ + if(Setting.isDebugModus()){ if(querytemps.contains("could not be parsed") || querytemps.isEmpty()){ String dateiname="/home/swalter/Dokumente/Auswertung/NotParsed.txt"; String result_string =""; @@ -79,7 +79,7 @@ } - //} + } long stop_template = System.currentTimeMillis(); @@ -310,7 +310,7 @@ } if(!result.isEmpty()){ h.setUri(result.get(0)); - h.setRank(0.0); + h.setRank(1.0); } } catch (SQLException e) { // TODO Auto-generated catch block @@ -334,7 +334,10 @@ try { ArrayList<String> tmp = Index_utils.searchIndexForClass(h.getUri(), myindex); System.out.println("Laenge tmp: "+tmp.size()); - if(tmp.size()>0)h.setUri(tmp.get(0)); + if(tmp.size()>0){ + h.setUri(tmp.get(0)); + h.setRank(1.0); + } } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); @@ -345,7 +348,10 @@ try { ArrayList<String> tmp = Index_utils.searchIndexForResource(h.getUri(), myindex); System.out.println("Laenge tmp: "+tmp.size()); - if(tmp.size()>0)h.setUri(tmp.get(0)); + if(tmp.size()>0){ + h.setUri(tmp.get(0)); + h.setRank(1.0); + } } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); @@ -369,10 +375,20 @@ if(add_reverse_template){ for (ArrayList<String> x : condition_template_reverse_conditions){ ArrayList<String> new_list = new ArrayList<String>(); - new_list.add(x.get(2)); - new_list.add(x.get(1)); - new_list.add(x.get(0)); - condition_reverse_new.add(new_list); + if(x.get(1).contains("ISA")){ + new_list.add(x.get(0)); + new_list.add(x.get(1)); + new_list.add(x.get(2)); + condition_reverse_new.add(new_list); + if(condition_template_reverse_conditions.size()>=2) add_reverse_template=true; + } + else{ + new_list.add(x.get(2)); + new_list.add(x.get(1)); + new_list.add(x.get(0)); + condition_reverse_new.add(new_list); + } + } } @@ -404,7 +420,10 @@ template.setElm(elm); resultArrayList.add(template); } - if(add_reverse_template){ + /* + * Also change the condition, if you have two Conditions in which is one an isa + */ + //if(add_reverse_template ||template_reverse_conditions.getCondition().size()>1 ){ start_elements = System.currentTimeMillis(); Elements elm_reverse = new Elements(template_reverse_conditions.getCondition(),template_reverse_conditions.getHypothesen()); stop_elements = System.currentTimeMillis(); @@ -417,7 +436,7 @@ template_reverse_conditions.setElm(elm_reverse); resultArrayList.add(template_reverse_conditions); } - } + //} } } Added: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Utils/HeuristicSort.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Utils/HeuristicSort.java (rev 0) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Utils/HeuristicSort.java 2012-04-29 15:58:36 UTC (rev 3665) @@ -0,0 +1,47 @@ +package org.dllearner.algorithm.tbsl.exploration.Utils; + +import java.util.ArrayList; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.dllearner.algorithm.tbsl.exploration.exploration_main.Setting; + +public class HeuristicSort { + + /* + * TODO: test if the sorted queries are given back properly + */ + public static ArrayList<QueryPair> doSort(ArrayList<QueryPair> qp, String question){ + + boolean change=true; + //while(change){ + //change=false; + for(int i = 0; i<qp.size()-1;i++){ + if(qp.get(i).getRank()==qp.get(i+1).getRank()&&question.contains("of")){ + //change=true; + QueryPair one = qp.get(i); + QueryPair two = qp.get(i+1); + String string = one.getQuery(); + //Pattern p = Pattern.compile (".*\\<http://dbpedia.org/resource/.*\\> \\<http://dbpedia.org/.*\\> \\?.*"); + //Matcher m = p.matcher (string); + if(string.matches(".*\\<http://dbpedia.org/resource/.*\\> \\<http://dbpedia.org/.*\\> \\?.*")){ + qp.set(i, one); + qp.set(i+1, two); + + } + else{ + qp.set(i, two); + qp.set(i+1, one); + } + + } + } + //} + + if(Setting.isDebugModus())DebugMode.printQueryPair(qp); + + return qp; + + } + +} Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Utils/Query.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Utils/Query.java 2012-04-29 12:31:22 UTC (rev 3664) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Utils/Query.java 2012-04-29 15:58:36 UTC (rev 3665) @@ -34,7 +34,9 @@ else if(type.contains("WORDNET")){ givenHypothesenList=t.getHypothesenWordnet(); } - + else if(type.contains("RELATE")){ + givenHypothesenList=t.getHypothesenRelate(); + } else { if(!type.contains("NORMAL"))System.err.println("ATTENTION\n Given Type: "+type+" was not found in generating Queries!!\n"); givenHypothesenList=t.getHypothesen(); @@ -58,10 +60,34 @@ condition_new=condition_new.replace("isA", "rdf:type"); global_rank=global_rank+h.getRank(); } + + /* + * normalise Rank! + */ + + global_rank = global_rank/hypothesenList.size(); + //System.out.println("New_Condition after replacing "+condition_new); - String query="PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> "+t.getQueryType()+" "+t.getSelectTerm()+" WHERE {"+ condition_new+" "+ t.getFilter()+"}"+t.getOrderBy()+" "+t.getHaving() +" "+t.getLimit(); - QueryPair qp = new QueryPair(query,global_rank); - if(addQuery)queryList.add(qp); + if(t.getQuestion().toLowerCase().contains("who")){ + /* + * PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> +SELECT DISTINCT ?uri ?string +WHERE { + res:Brooklyn_Bridge dbp:designer ?uri . + OPTIONAL { ?uri rdfs:label ?string. FILTER (lang(?string) = 'en') } + */ + + String query="PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> "+t.getQueryType()+" "+t.getSelectTerm()+"?string WHERE {"+ condition_new+" OPTIONAL { "+ t.getSelectTerm()+" rdfs:label ?string. FILTER (lang(?string) = 'en') }"+ t.getFilter()+"}"+t.getOrderBy()+" "+t.getHaving() +" "+t.getLimit(); + QueryPair qp = new QueryPair(query,global_rank); + if(addQuery)queryList.add(qp); + + } + else{ + String query="PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> "+t.getQueryType()+" "+t.getSelectTerm()+" WHERE {"+ condition_new+" "+ t.getFilter()+"}"+t.getOrderBy()+" "+t.getHaving() +" "+t.getLimit(); + QueryPair qp = new QueryPair(query,global_rank); + if(addQuery)queryList.add(qp); + } + } Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Utils/ServerUtil.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Utils/ServerUtil.java 2012-04-29 12:31:22 UTC (rev 3664) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Utils/ServerUtil.java 2012-04-29 15:58:36 UTC (rev 3665) @@ -290,17 +290,63 @@ } private static ArrayList<String> createAnswerArray(String string){ + /* + * <html><head><meta http-equiv="Content-Type" content="text/html; charset=UTF-8"></head><body><table class="sparql" border="1"> + <tbody><tr> + <th>x</th> + <th>string</th> + </tr> + <tr> + <td>http://dbpedia.org/resource/John_A._Roebling</td> + <td>"John A. Roebling"@en</td> + </tr> +</tbody></table></body></html> + */ Pattern p = Pattern.compile (".*\\<td\\>(.*)\\</td\\>.*"); string = string.replace("<table class=\"sparql\" border=\"1\">", "").replace("<tr>","").replace("</tr>", "").replace("</table>", ""); - Matcher m = p.matcher (string); + + //System.out.println("Nach erster Bearbeitung: "+string); + + Matcher m = p.matcher (string); String[] bla = string.split(" "); ArrayList<String> result= new ArrayList<String>(); for(String s: bla){ - m=p.matcher(s); + //System.out.println("s von bla: "+s); + s=s.replace("<td>\"", ""); + s=s.replace("\"@en</td>", ""); + s=s.replace("<td>", ""); + s=s.replace("</td>", ""); + + //System.out.println("s new von bla: "+s); + + s = s.replace("\"@en",""); + s = s.replace("\"",""); + s = s.replace("\n",""); + s = s.replace("^^<http://www.w3.org/2001/XMLSchema#date>",""); + s = s.replace("^^<http://www.w3.org/2001/XMLSchema#int>",""); + s = s.replace("^^<http://www.w3.org/2001/XMLSchema#number>",""); + s = s.replace("\"",""); + s=s.replace(" ", ""); + for(int i =0; i<s.length();i++){ + s=s.replace(" ",""); + } + + if(s.length()>1){ + if(s.substring(0,1).contains(" ")){ + s = s.substring(1, s.length()); + } + } + + if(!s.contains("<th>")&&!s.matches(" ")&&s.length()>0){ + //System.out.println("add :"+s+"DONE"); + result.add(s); + } + /*m=p.matcher(s); while (m.find()) { String temp = m.group(1); + System.out.println("temp: "+temp); temp = temp.replace("\"@en",""); temp = temp.replace("\"",""); temp = temp.replace("^^<http://www.w3.org/2001/XMLSchema#date>",""); @@ -310,7 +356,7 @@ //result.add(m.group(1)); result.add(temp); - } + }*/ } Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/MainInterface.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/MainInterface.java 2012-04-29 12:31:22 UTC (rev 3664) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/MainInterface.java 2012-04-29 15:58:36 UTC (rev 3665) @@ -16,6 +16,7 @@ import org.dllearner.algorithm.tbsl.exploration.Sparql.Template; import org.dllearner.algorithm.tbsl.exploration.Sparql.TemplateBuilder; import org.dllearner.algorithm.tbsl.exploration.Utils.DebugMode; +import org.dllearner.algorithm.tbsl.exploration.Utils.HeuristicSort; import org.dllearner.algorithm.tbsl.exploration.Utils.LinearSort; import org.dllearner.algorithm.tbsl.exploration.Utils.Query; import org.dllearner.algorithm.tbsl.exploration.Utils.QueryPair; @@ -27,7 +28,7 @@ import org.dllearner.algorithms.gp.GP; public class MainInterface { - private static int anzahlAbgeschickterQueries = 10; + //private static int anzahlAbgeschickterQueries = 10; public static ArrayList<String> startQuestioning(String question,BasicTemplator btemplator,SQLiteIndex myindex, WordNet wordnet,StanfordLemmatizer lemmatiser) throws ClassNotFoundException, SQLException, IOException{ @@ -37,6 +38,7 @@ * false, goes through all */ boolean wait = false; + Setting.setThresholdSelect(0.5); if(Setting.isWaitModus())wait=true; @@ -80,13 +82,15 @@ //sort QueryPairs qp=LinearSort.doSort(qp); + qp=HeuristicSort.doSort(qp, Question); + //if(Setting.isDebugModus())printQueries(qp, "NORMAL", Question); printQueries(qp, "NORMAL", Question); + Setting.setAnzahlAbgeschickterQueries(10); - int anzahl=1; boolean go_on = true; for(QueryPair q : qp){ - if(anzahl<anzahlAbgeschickterQueries&go_on){ + if(anzahl<10&go_on &!q.getQuery().contains("ASK")){ ArrayList<String> answer_tmp = new ArrayList<String>(); System.out.println("Sending Query to Server: "+q.getQuery()); answer_tmp=ServerUtil.requestAnswerFromServer(q.getQuery()); @@ -94,7 +98,7 @@ else{ go_on=false; - if(qp.size()<3)go_on=true; + //if(qp.size()<3)go_on=true; System.out.println("Got Answer from Server with this Query: "+ q.getQuery()); //go_on=true; boolean contains_uri=false; @@ -106,18 +110,21 @@ } for(String s : answer_tmp){ if(checkAnswer(s)){ - boolean double_result = false; - for(String s_t : answers){ - if(s_t.contains(s)) double_result=true; - } - //TODO Test!!!!!! - //if in one answer is an http, only add uri's - if(!double_result){ - if(contains_uri){ - if(s.contains("http"))answers.add(s); + if(!s.equals("0")){ + boolean double_result = false; + for(String s_t : answers){ + if(s_t.contains(s)) double_result=true; } - else answers.add(s); + //TODO Test!!!!!! + //if in one answer is an http, only add uri's + if(!double_result){ + if(contains_uri){ + if(s.contains("http"))answers.add(s); + } + else answers.add(s); + } } + } } //if(checkAnswer(answer_tmp))answers.addAll(answer_tmp); @@ -136,9 +143,11 @@ /* * If there is no answer, start IterationMode with Levensthein */ - if(answers.isEmpty()){ + if(answers.isEmpty()&&Setting.getModuleStep()>=2){ answers.clear(); + //Setting.setLevenstheinMin(0.65); + //Setting.setAnzahlAbgeschickterQueries(10); answers.addAll(doStart(myindex, wordnet, lemmatiser, template_list,"LEVENSTHEIN","neu")); if(wait)DebugMode.waitForButton(); } @@ -147,19 +156,40 @@ * still no answer, start IterationMode with Wordnet */ - if(answers.isEmpty()){ + if(answers.isEmpty()&&Setting.getModuleStep()>=3){ answers.clear(); + //Setting.setAnzahlAbgeschickterQueries(10); answers.addAll(doStart(myindex, wordnet, lemmatiser, template_list,"WORDNET","neu")); if(wait)DebugMode.waitForButton(); } + if(answers.isEmpty()&&Setting.getModuleStep()>=4){ + + answers.clear(); + //Setting.setAnzahlAbgeschickterQueries(10); + //Setting.setThresholdSelect(0.2); + answers.addAll(doStart(myindex, wordnet, lemmatiser, template_list,"RELATE","neu")); + if(wait)DebugMode.waitForButton(); + } + + /*if(answers.isEmpty()){ + + answers.clear(); + Setting.setLevenstheinMin(0.25); + Setting.setAnzahlAbgeschickterQueries(20); + answers.addAll(doStart(myindex, wordnet, lemmatiser, template_list,"SPECIAL","neu")); + if(wait)DebugMode.waitForButton(); + }*/ + - if(answers.isEmpty()){ + + + /*if(answers.isEmpty()){ System.out.println(""); //answers.add("No answers were found with the three Modules"); - } + }*/ /* @@ -185,8 +215,14 @@ StanfordLemmatizer lemmatiser, ArrayList<Template> template_list, String type, String test) { ArrayList<String> answers = new ArrayList<String>(); ArrayList<QueryPair> qp = new ArrayList<QueryPair>(); + boolean special=false; int anzahl; boolean go_on; + if(type.contains("SPECIAL")){ + type ="LEVENSTHEIN"; + special=true; + } + System.out.println("No answer from direkt match, start "+type+"Modul"); for(Template t : template_list){ try{ @@ -194,11 +230,14 @@ ArrayList<ArrayList<Hypothesis>> hypothesenSetList = IterationModule.doIteration(t.getElm(),t.getHypothesen(),t.getCondition(),type,myindex,wordnet,lemmatiser); if(type.contains("WORDNET"))t.setHypothesenWordnet(hypothesenSetList); if(type.contains("LEVENSTHEIN"))t.setHypothesenLevensthein(hypothesenSetList); + if(type.contains("RELATE"))t.setHypothesenRelate(hypothesenSetList); } if(test.contains("neu")){ System.err.println("IN NEU!!!!!"); ArrayList<ArrayList<Hypothesis>> hypothesenSetList = new ArrayList<ArrayList<Hypothesis>>(); + + for(ArrayList<Hypothesis> l_h : t.getHypothesen()){ ArrayList<ArrayList<Hypothesis>> generated_hypothesis = new ArrayList<ArrayList<Hypothesis>>(); generated_hypothesis= IterationModule.new_iteration(t.getElm(),l_h,t.getCondition(),type,myindex,wordnet,lemmatiser); @@ -214,6 +253,8 @@ } if(type.contains("WORDNET"))t.setHypothesenWordnet(hypothesenSetList); if(type.contains("LEVENSTHEIN"))t.setHypothesenLevensthein(hypothesenSetList); + if(type.contains("RELATE"))t.setHypothesenRelate(hypothesenSetList); + } } @@ -247,13 +288,23 @@ //sort QueryPairs qp=LinearSort.doSort(qp); + printQueries(qp, type, Question); + /* + * Only for test! + */ + qp=HeuristicSort.doSort(qp, Question); - printQueries(qp, type, Question); + System.out.println("Following Querries were created:"); + for(QueryPair z : qp){ + System.out.println(z.getQuery()+" "+z.getRank()); + } + if(Setting.isDebugModus())printQueries(qp, type, Question); + //printQueries(qp, type, Question); anzahl=1; go_on = true; int id=0; for(QueryPair q : qp){ - if(anzahl<anzahlAbgeschickterQueries&go_on){ + if(q.getRank()>Setting.getThresholdSelect()&go_on &!q.getQuery().contains("ASK")){ ArrayList<String> answer_tmp = new ArrayList<String>(); answer_tmp=ServerUtil.requestAnswerFromServer(q.getQuery()); System.out.println("Sending Query to Server: "+q.getQuery()); @@ -263,8 +314,10 @@ //else go_on=false; //go_on=true; go_on=false; + if(special) go_on=true; System.out.println("Got Answer from Server with this Query: "+ q.getQuery()); if(qp.size()>(id+1)){ + //&&anzahl<2 if(q.getRank()==qp.get(id+1).getRank()){ go_on=true; } @@ -278,6 +331,12 @@ break; } } + /*System.out.println("\n Answer from Server Befor check answer: \n"); + for(String answer:answer_tmp){ + System.out.println(answer); + }*/ + + for(String s : answer_tmp){ if(checkAnswer(s)){ boolean double_result = false; @@ -287,6 +346,57 @@ //TODO Test!!!!!! //if in one answer is an http, only add uri's if(!double_result){ + if (Question.toLowerCase().contains("who")){ + if(!s.contains("http"))answers.add(s); + } + else if(contains_uri){ + if(s.contains("http"))answers.add(s); + } + else answers.add(s); + } + } + } + //if(checkAnswer(answer_tmp))answers.addAll(answer_tmp); + } + } + + else if(q.getRank()>Setting.getThresholdAsk()&go_on &q.getQuery().contains("ASK")){ + ArrayList<String> answer_tmp = new ArrayList<String>(); + answer_tmp=ServerUtil.requestAnswerFromServer(q.getQuery()); + System.out.println("Sending Query to Server: "+q.getQuery()); + if(answer_tmp.isEmpty()) go_on=true; + + else{ + //else go_on=false; + //go_on=true; + go_on=false; + if(special) go_on=true; + System.out.println("Got Answer from Server with this Query: "+ q.getQuery()); + if(qp.size()>(id+1)){ + if(q.getRank()==qp.get(id+1).getRank()){ + go_on=true; + } + } + + + boolean contains_uri=false; + for(String s : answer_tmp){ + if(s.contains("http")){ + contains_uri=true; + break; + } + } + + + for(String s : answer_tmp){ + if(checkAnswer(s)){ + boolean double_result = false; + for(String s_t : answers){ + if(s_t.contains(s)) double_result=true; + } + //TODO Test!!!!!! + //if in one answer is an http, only add uri's + if(!double_result){ if(contains_uri){ if(s.contains("http"))answers.add(s); } @@ -300,6 +410,15 @@ anzahl+=1; id+=1; } + /* + * here Filter answer + */ + /*System.out.println("\n Answer from Server: \n"); + for(String answer:answers){ + System.out.println(answer); + }*/ + //System.out.println("FILTER NOW!!"); + answers=filterAnswer(answers,Question); System.out.println("\n Answer from Server: \n"); for(String answer:answers){ System.out.println(answer); @@ -310,7 +429,31 @@ - + private static ArrayList<String> filterAnswer(ArrayList<String> answers, String Question){ + if(Question.toLowerCase().contains("who")){ + boolean contains_only_uri=true; + for(String s: answers){ + if(!s.contains("http")) contains_only_uri=false; + } + if(contains_only_uri==false){ + ArrayList<String> new_answer= new ArrayList<String>(); + for(String s: answers){ + if(!s.contains("http")) { + System.out.println("s :"+s); + new_answer.add(s); + } + } + + return new_answer; + } + else{ + return answers; + } + } + + + return answers; + } private static boolean checkAnswer(String answer){ if(answer.contains("File:")||answer.contains(".png")||answer.contains("upload.wikimedia.org")||answer.contains("dbpedia.org/datatype/")||answer.contains("http://www.w3.org/2001/XMLSchema")||answer.contains("flickerwrappr/photos/")) return false; else return true; @@ -324,7 +467,7 @@ } private static void printQueries(ArrayList<QueryPair> qp, String type, String Question){ - String dateiname="/home/swalter/Dokumente/Auswertung/CreatedQueryList.txt"; + String dateiname="/home/swalter/Dokumente/Auswertung/CreatedQueryListNLD"+Setting.getLevenstheinMin()+".txt"; String result_string =""; //Open the file for reading try { Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/Setting.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/Setting.java 2012-04-29 12:31:22 UTC (rev 3664) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/Setting.java 2012-04-29 15:58:36 UTC (rev 3665) @@ -5,7 +5,14 @@ private static boolean waitModus; private static boolean debugModus; private static boolean newIndex; + private static double LevenstheinMin; + private static int anzahlAbgeschickterQueries; + private static double thresholdSelect; + private static double thresholdAsk; + private static int moduleStep; + + public static boolean isWaitModus() { return waitModus; } @@ -24,6 +31,37 @@ public static void setNewIndex(boolean newIndex) { Setting.newIndex = newIndex; } + public static double getLevenstheinMin() { + return LevenstheinMin; + } + public static void setLevenstheinMin(double levenstheinMin) { + LevenstheinMin = levenstheinMin; + } + public static int getAnzahlAbgeschickterQueries() { + return anzahlAbgeschickterQueries; + } + public static void setAnzahlAbgeschickterQueries( + int anzahlAbgeschickterQueries) { + Setting.anzahlAbgeschickterQueries = anzahlAbgeschickterQueries; + } + public static double getThresholdSelect() { + return thresholdSelect; + } + public static void setThresholdSelect(double thresholdSelect) { + Setting.thresholdSelect = thresholdSelect; + } + public static double getThresholdAsk() { + return thresholdAsk; + } + public static void setThresholdAsk(double thresholdAsk) { + Setting.thresholdAsk = thresholdAsk; + } + public static int getModuleStep() { + return moduleStep; + } + public static void setModuleStep(int moduleStep) { + Setting.moduleStep = moduleStep; + } } Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/exploration_main.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/exploration_main.java 2012-04-29 12:31:22 UTC (rev 3664) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/exploration_main.java 2012-04-29 15:58:36 UTC (rev 3665) @@ -70,6 +70,16 @@ Setting.setWaitModus(false); Setting.setDebugModus(false); Setting.setNewIndex(false); + Setting.setLevenstheinMin(0.95); + Setting.setAnzahlAbgeschickterQueries(10); + Setting.setThresholdAsk(0.9); + Setting.setThresholdSelect(0.5); + /* + * 1= only "Normal" + * 2= "Normal" + Levensthein + * 3= Normal+Levensthein+Wordnet + */ + Setting.setModuleStep(2); @@ -133,7 +143,14 @@ if(line.contains(":xml")&& schleife==true){ TimeZone.setDefault(TimeZone.getTimeZone("GMT")); - + for(int i = 0; i<1;i++){ + double min = 0.95; + min+=(i*0.05); + + //Setting.setLevenstheinMin(min); + Setting.setLevenstheinMin(0.95); + + /*System.out.println("Please enter Path of xml File:"); line=in.readLine();*/ //line="/home/swalter/Dokumente/Auswertung/XMLDateien/dbpedia-train.xml"; @@ -142,10 +159,13 @@ //line="/home/swalter/Dokumente/Auswertung/XMLDateien/berlin.xml"; //line="/home/swalter/Dokumente/Auswertung/XMLDateien/vortragfragen.xml"; //line="/home/swalter/Dokumente/Auswertung/XMLDateien/iteration-test.xml"; - line="/home/swalter/Dokumente/Auswertung/XMLDateien/dbpedia-train-tagged.xml"; + //line="/home/swalter/Dokumente/Auswertung/XMLDateien/dbpedia-train-tagged.xml"; //line="/home/swalter/Dokumente/Auswertung/XMLDateien/dbpedia-train-tagged-withoutNotparsed.xml"; //line="/home/swalter/Dokumente/Auswertung/XMLDateien/dbpedia-test-questions.xml"; + //line="/home/swalter/Dokumente/Auswertung/XMLDateien/dbpedia-train-tagged-onlyWithWorking.xml"; + line="/home/swalter/Dokumente/Auswertung/XMLDateien/dbpedia-train-tagged-new.xml"; + //create Structs ArrayList<queryInformation> list_of_structs = new ArrayList<queryInformation>(); @@ -160,21 +180,11 @@ anzahl=anzahl+1; System.out.println(""); if(qi.getId()==""||qi.getId()==null)System.out.println("NO"); - /*System.out.println("ID: "+qi.getId()); - System.out.println("Query: "+qi.getQuery()); - System.out.println("Type: "+qi.getType()); - System.out.println("XMLType: "+qi.getXMLtype());*/ String question = qi.getQuery(); ArrayList<String> answers=MainInterface.startQuestioning(question,btemplator,myindex,wordnet,lemmatiser); qi.setResult(answers); } - - /* //Print to Console - System.out.println("\n#############\n Result:"); - for(queryInformation s : list_of_resultstructs){ - System.out.println(s.getResult()); - }*/ long stopTime = System.currentTimeMillis(); System.out.println("For "+anzahl+" Questions the QA_System took "+ ((stopTime-startTime)/1000)+"sek"); @@ -183,7 +193,7 @@ systemid=createXML(list_of_structs); String filename_for_evaluation="/home/swalter/Dokumente/Auswertung/ResultXml/result"+systemid.replace(" ", "_")+".xml"; String execute = "python /home/swalter/Dokumente/Auswertung/Evaluation/Evaluation.py "+filename_for_evaluation+" 0"; - + System.out.println(filename_for_evaluation); /* * First only for training */ @@ -192,9 +202,13 @@ Runtime r = Runtime.getRuntime(); Process p = r.exec(execute); - String open_file="/home/swalter/Dokumente/Auswertung/Evaluation/upload/out"+systemid.replace(" ", "_")+".html"; + /* String open_file="/home/swalter/Dokumente/Auswertung/Evaluation/upload/out"+systemid.replace(" ", "_")+".html"; execute ="firefox "+ open_file; - p = r.exec(execute); + p = r.exec(execute);*/ + } + schleife=false; + System.out.println("Bye!"); + System.exit(0); } @@ -293,9 +307,12 @@ String xmlDocument=""; int counter=0; System.out.println("Anzahl queryInformations: "+list.size()); + int anzahl = 0; for (queryInformation s : list){ //why doing this? try that it doesnt matter if there is an answer or not.... - //if(!s.getResult().isEmpty()){ + anzahl+=1; + System.out.println("Number "+anzahl); + if(!s.getResult().isEmpty()){ String tmp; if(counter==0){ counter=counter+1; @@ -315,18 +332,18 @@ else if (i.contains("true")||i.contains("false")) input="<boolean>"+i+"</boolean>\n"; else if(i.matches("[0-9]*"))input="<number>"+i+"</number>\n"; else if(i.matches("[0-9]*-[0-9][0-9]-[0-9]*"))input="<date>"+i+"</date>\n"; - else input="<string>"+i+"</string>\n"; + else if(i.length()>=1 && !i.equals(" "))input="<string>"+i+"</string>\n"; tmp+="<answer>"+input+"</answer>\n"; } tmp+="</answers></question>\n"; xmlDocument+=tmp; - //} + } } xmlDocument+="</dataset>"; File file; FileWriter writer; - file = new File("/home/swalter/Dokumente/Auswertung/ResultXml/result"+systemid.replace(" ", "_")+".xml"); + file = new File("/home/swalter/Dokumente/Auswertung/ResultXml/result"+systemid.replace(" ", "_")+"NLD"+Setting.getLevenstheinMin()+".xml"); try { writer = new FileWriter(file ,true); writer.write(xmlDocument); Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/IterationModule.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/IterationModule.java 2012-04-29 12:31:22 UTC (rev 3664) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/IterationModule.java 2012-04-29 15:58:36 UTC (rev 3665) @@ -156,6 +156,7 @@ * Here start levenstehin, wordnet etc etc */ if(type.contains("LEVENSTHEIN"))resultHypothesenList= LevenstheinModule.doLevensthein(propertyVariable,array[0],el.getHm()); + if(type.contains("RELATE"))resultHypothesenList= SemanticRelatenes.doSemanticRelatenes(propertyVariable,array[0],el.getHm()); if(type.contains("WORDNET"))resultHypothesenList= WordnetModule.doWordnet(propertyVariable,array[0],el.getHm(),myindex,wordnet,lemmatiser); if(!PL.contains(propertyVariable)) PL.add(propertyVariable+"::"+h.getVariable()); } catch (SQLException e) { @@ -217,6 +218,7 @@ * Here start levenstehin, wordnet etc etc */ if(type.contains("LEVENSTHEIN"))resultHypothesenList= LevenstheinModule.doLevensthein(propertyVariable,h_small.getName(),el.getHm()); + if(type.contains("RELATE"))resultHypothesenList= SemanticRelatenes.doSemanticRelatenes(propertyVariable,h_small.getName(),el.getHm()); if(type.contains("WORDNET"))resultHypothesenList= WordnetModule.doWordnet(propertyVariable,h_small.getName(),el.getHm(),myindex,wordnet,lemmatiser); if(!PL.contains(propertyVariable)) PL.add(propertyVariable); for(Hypothesis h_temp : resultHypothesenList) HL.add(h_temp); @@ -458,6 +460,7 @@ */ if(type.contains("LEVENSTHEIN"))resultHypothesenList= LevenstheinModule.doLevensthein(condition.get(1),property_name,el.getHm()); if(type.contains("WORDNET"))resultHypothesenList= WordnetModule.doWordnet(condition.get(1),property_name,el.getHm(),myindex,wordnet,lemmatiser); + if(type.contains("RELATE"))resultHypothesenList= SemanticRelatenes.doSemanticRelatenes(condition.get(1),property_name,el.getHm()); for(Hypothesis h_temp : resultHypothesenList) { ArrayList<Hypothesis> temp_al = new ArrayList<Hypothesis>(); temp_al.add(h); @@ -577,14 +580,14 @@ for(ElementList el : resources){ //System.out.println("el.getVariablename(): "+el.getVariablename()); if(el.getVariablename().contains(class_h.getName())){ - System.out.println("In If Abfrage bei der Iteration ueber el"); + //System.out.println("In If Abfrage bei der Iteration ueber el"); String property_name=""; String property_variable=""; if(condition1_exists_isa)property_variable= condition2.get(1); if(condition2_exists_isa)property_variable= condition1.get(1); - System.out.println("property_variable: " + property_variable); + //System.out.println("property_variable: " + property_variable); for(Hypothesis h_t : givenHypothesenList){ if(h_t.getVariable().contains(property_variable)){ @@ -592,15 +595,17 @@ break; } } - System.out.println("property_name: " + property_name); + //System.out.println("property_name: " + property_name); ArrayList<Hypothesis> resultHypothesenList=new ArrayList<Hypothesis>(); - for (Entry<String, String> entry : el.getHm().entrySet()) { + /*for (Entry<String, String> entry : el.getHm().entrySet()) { System.out.println(entry.getKey()+" "+entry.getValue()); - } + }*/ /* * Here start levenstehin, wordnet etc etc */ if(type.contains("LEVENSTHEIN"))resultHypothesenList= LevenstheinModule.doLevensthein(property_variable,property_name,el.getHm()); + if(type.contains("RELATE"))resultHypothesenList= SemanticRelatenes.doSemanticRelatenes(property_variable,property_name,el.getHm()); + if(type.contains("WORDNET"))resultHypothesenList= WordnetModule.doWordnet(property_variable,property_name,el.getHm(),myindex,wordnet,lemmatiser); System.out.println("After generating new Hypothesen.\n "+resultHypothesenList.size()+" new were generated"); for(Hypothesis h_temp : resultHypothesenList) { @@ -687,10 +692,10 @@ for(ElementList el : resources){ //System.out.println("el.getVariablename(): "+el.getVariablename()); if(el.getVariablename().contains(resource_h.getName())&&el.getVariablename().contains(property_Side)){ - System.out.println("In If Abfrage bei der Iteration ueber el"); + //System.out.println("In If Abfrage bei der Iteration ueber el"); - System.out.println("property_name: " + property_name); + //System.out.println("property_name: " + property_name); ArrayList<Hypothesis> resultHypothesenList=new ArrayList<Hypothesis>(); /*for (Entry<String, String> entry : el.getHm().entrySet()) { System.out.println(entry.getKey()+" "+entry.getValue()); @@ -707,6 +712,8 @@ * Here start levenstehin, wordnet etc etc */ if(type.contains("LEVENSTHEIN"))resultHypothesenList= LevenstheinModule.doLevensthein(property_variable,property_name,el.getHm()); + if(type.contains("RELATE"))resultHypothesenList= SemanticRelatenes.doSemanticRelatenes(property_variable,property_name,el.getHm()); + if(type.contains("WORDNET"))resultHypothesenList= WordnetModule.doWordnet(property_variable,property_name,el.getHm(),myindex,wordnet,lemmatiser); System.out.println("After generating new Hypothesen.\n "+resultHypothesenList.size()+" new were generated"); for(Hypothesis h_temp : resultHypothesenList) { @@ -731,6 +738,8 @@ if(type.contains("LEVENSTHEIN"))second_resultHypothesenList= LevenstheinModule.doLevensthein(second_property_variable,second_property_name,hm_newClasses); + if(type.contains("RELATE"))second_resultHypothesenList= SemanticRelatenes.doSemanticRelatenes(second_property_variable,second_property_name,hm_newClasses); + if(type.contains("WORDNET"))second_resultHypothesenList= WordnetModule.doWordnet(second_property_variable,second_property_name,hm_newClasses,myindex,wordnet,lemmatiser); System.out.println("SIze of second_resultHypothesenList: "+second_resultHypothesenList.size()); Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/LevenstheinModule.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/LevenstheinModule.java 2012-04-29 12:31:22 UTC (rev 3664) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/LevenstheinModule.java 2012-04-29 15:58:36 UTC (rev 3665) @@ -7,9 +7,10 @@ import org.dllearner.algorithm.tbsl.exploration.Sparql.Hypothesis; import org.dllearner.algorithm.tbsl.exploration.Utils.Levenshtein; +import org.dllearner.algorithm.tbsl.exploration.exploration_main.Setting; public class LevenstheinModule { - private final static double LevenstheinMin=0.65; + //private final static double LevenstheinMin=0.65; public static ArrayList<Hypothesis> doLevensthein(String variable, String property_to_compare_with, HashMap<String, String> properties) throws SQLException { @@ -23,6 +24,7 @@ key=key.replace("@en",""); key=key.toLowerCase(); String value = entry.getValue(); + // System.out.println("Key: "+key); ArrayList<String> property_array=new ArrayList<String>(); property_array.add(property_to_compare_with); @@ -32,30 +34,48 @@ for(String s : array_temp) property_array.add(s); } for(String compare_property :property_array ){ - + // System.out.println("compare_property: "+compare_property); double nld=Levenshtein.nld(compare_property.toLowerCase(), key); //if(nld>=LevenstheinMin||key.contains(lemmatiser.stem(property_to_compare_with))||property_to_compare_with.contains(lemmatiser.stem(key))){ - if(key.contains(compare_property)||compare_property.contains(key)){ - if(nld<0.8){ - Hypothesis h = new Hypothesis(variable, key, value, "PROPERTY", 0.85); - listOfNewHypothesen.add(h); - } - else{ - Hypothesis h = new Hypothesis(variable, key, value, "PROPERTY", nld); - listOfNewHypothesen.add(h); - } + if((key.contains(compare_property)||compare_property.contains(key))){ + double score=0; + if(compare_property.length()>key.length()){ + score = 0.8+(key.length()/compare_property.length()); + } + else{ + score=0.8+(compare_property.length()/key.length()); + } + + + if(compare_property.length()>4&&key.length()>4) { + //0.95 + Hypothesis h = new Hypothesis(variable, key, value, "PROPERTY", score); + listOfNewHypothesen.add(h); + } + else{ + //0.7 + Hypothesis h = new Hypothesis(variable, key, value, "PROPERTY", score-0.2); + listOfNewHypothesen.add(h); + } + + + // Hypothesis h = new Hypothesis(variable, key, value, "PROPERTY", (key.length()/compare_property.length())); + // listOfNewHypothesen.add(h); + } - else if(key.substring(key.length()-1).contains("s")){ - String neuer_string = key.substring(0, key.length() -1); - if(neuer_string.contains(compare_property)||compare_property.contains(neuer_string)){ - Hypothesis h = new Hypothesis(variable, neuer_string, value, "PROPERTY", 1.5); + else if(compare_property.substring(compare_property.length()-2).contains("ed")){ + String compare_property_neu = compare_property.substring(0, compare_property.length() -2); + System.out.println("NEW compare_property: "+compare_property_neu); + if(key.contains(compare_property_neu)||compare_property_neu.contains(key)){ + + Hypothesis h = new Hypothesis(variable, key, value, "PROPERTY", 0.95); listOfNewHypothesen.add(h); } } - else if(nld>=LevenstheinMin){ + else if(nld>=Setting.getLevenstheinMin()){ Hypothesis h = new Hypothesis(variable, key, value, "PROPERTY", nld); listOfNewHypothesen.add(h); } Added: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/SemanticRelatenes.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/SemanticRelatenes.java (rev 0) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/SemanticRelatenes.java 2012-04-29 15:58:36 UTC (rev 3665) @@ -0,0 +1,63 @@ +package org.dllearner.algorithm.tbsl.exploration.modules; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map.Entry; + +import org.dllearner.algorithm.tbsl.exploration.Sparql.Hypothesis; + + + +public class SemanticRelatenes { + //private final static double LevenstheinMin=0.65; + + public static ArrayList<Hypothesis> doSemanticRelatenes(String variable, String property_to_compare_with, HashMap<String, String> properties) + throws SQLException { + ArrayList<Hypothesis> listOfNewHypothesen= new ArrayList<Hypothesis>(); + + + //iterate over properties + for (Entry<String, String> entry : properties.entrySet()) { + String key = entry.getKey(); + key=key.replace("\"",""); + key=key.replace("@en",""); + key=key.toLowerCase(); + String value = entry.getValue(); + + ArrayList<String> property_array=new ArrayList<String>(); + property_array.add(property_to_compare_with); + if(property_to_compare_with.contains(" ")){ + + String[] array_temp = property_to_compare_with.split(" "); + for(String s : array_temp) property_array.add(s); + } + for(String compare_property :property_array ){ + + + + double score=0; + try { + //score = CallSemRelatNess.returnSemRelat(key, compare_property); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + if(score>0){ + Hypothesis h = new Hypothesis(variable, key, value, "PROPERTY", score); + listOfNewHypothesen.add(h); + } + + + } + //compare property gotten from the resource with the property from the original query + + + } + + + + return listOfNewHypothesen; + } + + } Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/WordnetModule.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/WordnetModule.java 2012-04-29 12:31:22 UTC (rev 3664) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/WordnetModule.java 2012-04-29 15:58:36 UTC (rev 3665) @@ -20,7 +20,7 @@ public class WordnetModule { - private static int explorationdepthwordnet =2; + private static int explorationdepthwordnet =1; public static ArrayList<Hypothesis> doWordnet(String variable, String property_to_compare_with, HashMap<String, String> properties, SQLiteIndex myindex,WordNet wordnet,StanfordLemmatizer lemmatiser) throws SQLException, JWNLException { @@ -100,8 +100,61 @@ */ //if(key.contains(b.toLowerCase())||key.contains(lemmatiser.stem(b.toLowerCase()))||b.toLowerCase().contains(lemmatiser.stem(key))){ //System.out.println("B: "+b +" Key: "+key); - if(key.contains(b.toLowerCase())||b.toLowerCase().contains(key)){ + if(key.equals(b)){ + //System.out.println("EQUALS"); + //System.out.println("B: " +b); + //System.out.println("key: " +key); + Hypothesis h = new Hypothesis(variable, key, value, "PROPERTY", 1.0); + + + listOfNewHypothesen.add(h); + + } + else if(key.contains(b.toLowerCase())||b.toLowerCase().contains(key)){ + + //System.out.println("B: " +b); + //System.out.println("key: " +key); + + /*if(b.length()>key.length()) { + Hypothesis h = new Hypothesis(variable, key, value, "PROPERTY", (key.length()/b.length())); + listOfNewHypothesen.add(h); + } + else{ + Hypothesis h = new Hypothesis(variable, key, value, "PROPERTY", (b.length()/key.length())); + listOfNewHypothesen.add(h); + }*/ + if(b.length()>4&&key.length()>4) { + double score=0; + if(b.length()>key.length()){ + score = 0.8+(key.length()/b.length()); + } + else{ + score=0.8+(b.length()/key.length()); + } + //0.95 + Hypothesis h = new Hypothesis(variable, key, value, "PROPERTY", score); + listOfNewHypothesen.add(h); + } + else{ + Hypothesis h = new Hypothesis(variable, key, value, "PROPERTY", 0.7); + listOfNewHypothesen.add(h); + } + + + + + + + } + + else if(Levenshtein.nld(key.toLowerCase(), b.toLowerCase())>Setting.getLevenstheinMin()){ + Hypothesis h = new Hypothesis(variable, key, value, "PROPERTY", (Levenshtein.nld(key.toLowerCase(), b.toLowerCase()))); + + + listOfNewHypothesen.add(h); + } + /*System.out.println("Found: "+b); if(Setting.isWaitModus()) try { @@ -110,11 +163,11 @@ // TODO Auto-generated catch block e.printStackTrace(); }*/ - if(!result_SemanticsMatchProperties.contains(key)){ + /*if(!result_SemanticsMatchProperties.contains(key)){ result_SemanticsMatchProperties.add(key); if(key.toLowerCase().contains(property_to_compare_with.toLowerCase())||property_to_compare_with.toLowerCase().contains(key)){ - System.out.println("Variable: "+ variable+" key: "+key+" value : "+value); - Hypothesis h = new Hypothesis(variable, key, value, "PROPERTY", 1.5); + //System.out.println("Variable: "+ variable+" key: "+key+" value : "+value); + Hypothesis h = new Hypothesis(variable, key, value, "PROPERTY", (key.length()/property_to_compare_with.length())); listOfNewHypothesen.add(h); try { if(Setting.isWaitModus())DebugMode.waitForButton(); @@ -127,7 +180,7 @@ double nld=Levenshtein.nld(property_to_compare_with.toLowerCase(), key); Hypothesis h = new Hypothesis(variable, key, value, "PROPERTY", nld); listOfNewHypothesen.add(h); - System.out.println("Found for key: "+key); + //System.out.println("Found for key: "+key); try { if(Setting.isWaitModus())DebugMode.waitForButton(); } catch (IOException e) { @@ -136,8 +189,8 @@ } } - } - } + }*/ + //} } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ku...@us...> - 2012-04-29 12:31:28
|
Revision: 3664 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3664&view=rev Author: kurzum Date: 2012-04-29 12:31:22 +0000 (Sun, 29 Apr 2012) Log Message: ----------- error log improvements Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java trunk/interfaces/src/main/java/org/dllearner/server/Rest.java Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java 2012-04-28 17:27:17 UTC (rev 3663) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java 2012-04-29 12:31:22 UTC (rev 3664) @@ -6,6 +6,7 @@ import java.util.List; import java.util.Set; +import com.hp.hpl.jena.ontology.OntModelSpec; import org.dllearner.core.ComponentAnn; import org.dllearner.core.ComponentInitException; import org.dllearner.core.KnowledgeSource; @@ -57,7 +58,7 @@ private static Logger log = LoggerFactory.getLogger(SparqlSimpleExtractor.class); public SparqlSimpleExtractor() { - model = ModelFactory.createOntologyModel(); + model = ModelFactory.createOntologyModel(OntModelSpec.OWL_DL_MEM); } /** Modified: trunk/interfaces/src/main/java/org/dllearner/server/Rest.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/server/Rest.java 2012-04-28 17:27:17 UTC (rev 3663) +++ trunk/interfaces/src/main/java/org/dllearner/server/Rest.java 2012-04-29 12:31:22 UTC (rev 3664) @@ -1,39 +1,31 @@ package org.dllearner.server; -import java.io.BufferedInputStream; -import java.io.ByteArrayInputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.PrintWriter; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Map; - -import javax.servlet.ServletException; -import javax.servlet.http.HttpServlet; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - import org.apache.commons.lang.exception.ExceptionUtils; import org.dllearner.configuration.IConfiguration; import org.dllearner.configuration.spring.ApplicationContextBuilder; import org.dllearner.configuration.spring.DefaultApplicationContextBuilder; import org.dllearner.confparser3.ConfParserConfiguration; import org.dllearner.core.ClassExpressionLearningAlgorithm; -import org.dllearner.core.EvaluatedDescription; import org.dllearner.core.LearningAlgorithm; import org.dllearner.kb.sparql.SparqlQueryDescriptionConvertVisitor; import org.dllearner.learningproblems.EvaluatedDescriptionPosNeg; import org.json.simple.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.core.io.InputStreamResource; import org.springframework.core.io.Resource; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; + public class Rest extends HttpServlet { private static Logger log = LoggerFactory.getLogger(Rest.class); @@ -66,8 +58,8 @@ throw new IllegalArgumentException("Missing parameter: conf is required. "); } else { conf = httpServletRequest.getParameter("conf"); - if(isSet("limit" , httpServletRequest)){ - limit = Integer.parseInt(httpServletRequest.getParameter("limit")) ; + if (isSet("limit", httpServletRequest)) { + limit = Integer.parseInt(httpServletRequest.getParameter("limit")); } } @@ -108,17 +100,17 @@ } catch (IllegalArgumentException e) { String msg = e.getMessage();// + printParameterMap(httpServletRequest); - log.error(msg); + log.error("", ExceptionUtils.getRootCause(e)); learningResult.put("success", "0"); learningResult.put("error", msg); - learningResult.put("stacktrace", ExceptionUtils.getFullStackTrace(e)); + learningResult.put("stacktrace", ExceptionUtils.getRootCause(e)); } catch (Exception e) { String msg = "An error occured: " + e.getMessage(); //+ printParameterMap(httpServletRequest); - log.error(msg, e); + log.error("", ExceptionUtils.getRootCause(e)); learningResult.put("success", "0"); learningResult.put("error", msg); - learningResult.put("stacktrace", ExceptionUtils.getFullStackTrace(e)); + learningResult.put("stacktrace", ExceptionUtils.getRootCause(e)); } result.put("learningresult", learningResult); @@ -136,26 +128,20 @@ * @param conf the content of a conf file * @return */ - public EvaluatedDescriptionPosNeg learn(String conf) { - try { - Resource confFile = new InputStreamResource(new ByteArrayInputStream(conf.getBytes())); - - IConfiguration configuration = new ConfParserConfiguration(confFile); + public EvaluatedDescriptionPosNeg learn(String conf) throws Exception { + Resource confFile = new InputStreamResource(new ByteArrayInputStream(conf.getBytes())); - ApplicationContextBuilder builder = new DefaultApplicationContextBuilder(); - ApplicationContext context = builder.buildApplicationContext(configuration, new ArrayList<Resource>()); - - LearningAlgorithm algorithm = context.getBean(LearningAlgorithm.class); - algorithm.start(); - if(algorithm instanceof ClassExpressionLearningAlgorithm){ - return (EvaluatedDescriptionPosNeg)((ClassExpressionLearningAlgorithm) algorithm).getCurrentlyBestEvaluatedDescriptions(1).iterator().next(); - } - } catch (BeansException e) { - e.printStackTrace(); - } catch (IOException e) { - e.printStackTrace(); - } - return null; + IConfiguration configuration = new ConfParserConfiguration(confFile); + + ApplicationContextBuilder builder = new DefaultApplicationContextBuilder(); + ApplicationContext context = builder.buildApplicationContext(configuration, new ArrayList<Resource>()); + + LearningAlgorithm algorithm = context.getBean(LearningAlgorithm.class); + algorithm.start(); + if (algorithm instanceof ClassExpressionLearningAlgorithm) { + return (EvaluatedDescriptionPosNeg) ((ClassExpressionLearningAlgorithm) algorithm).getCurrentlyBestEvaluatedDescriptions(1).iterator().next(); + } + throw new Exception("only ClassExpressionLearningAlgorithm implemented currently"); } @@ -174,26 +160,29 @@ } return ret; } - - public static void main(String[] args) throws Exception{ - String filePath = "../examples/father.conf"; - byte[] buffer = new byte[(int) new File(filePath).length()]; + + public static void main(String[] args) throws Exception { + String filePath = "../examples/father.conf"; + byte[] buffer = new byte[(int) new File(filePath).length()]; BufferedInputStream f = null; try { f = new BufferedInputStream(new FileInputStream(filePath)); f.read(buffer); } finally { - if (f != null) try { f.close(); } catch (IOException ignored) { } + if (f != null) try { + f.close(); + } catch (IOException ignored) { + } } String confString = new String(buffer); - + Resource confFile = new InputStreamResource(new ByteArrayInputStream(confString.getBytes())); - + IConfiguration configuration = new ConfParserConfiguration(confFile); ApplicationContextBuilder builder = new DefaultApplicationContextBuilder(); - ApplicationContext context = builder.buildApplicationContext(configuration, new ArrayList<Resource>()); - + ApplicationContext context = builder.buildApplicationContext(configuration, new ArrayList<Resource>()); + LearningAlgorithm algorithm = context.getBean(LearningAlgorithm.class); algorithm.start(); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dc...@us...> - 2012-04-28 17:27:23
|
Revision: 3663 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3663&view=rev Author: dcherix Date: 2012-04-28 17:27:17 +0000 (Sat, 28 Apr 2012) Log Message: ----------- Add the use og the TypeOntology class Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java 2012-04-28 17:26:12 UTC (rev 3662) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java 2012-04-28 17:27:17 UTC (rev 3663) @@ -1,21 +1,32 @@ package org.dllearner.kb.sparql.simple; -import com.hp.hpl.jena.ontology.OntClass; -import com.hp.hpl.jena.ontology.OntModel; -import com.hp.hpl.jena.query.QueryParseException; -import com.hp.hpl.jena.rdf.model.*; -import com.jamonapi.Monitor; -import com.jamonapi.MonitorFactory; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + import org.dllearner.core.ComponentAnn; import org.dllearner.core.ComponentInitException; import org.dllearner.core.KnowledgeSource; import org.dllearner.core.config.ConfigOption; import org.dllearner.utilities.JamonMonitorLogger; +import org.dllearner.utilities.analyse.TypeOntology; import org.semanticweb.owlapi.model.OWLOntology; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.*; +import com.hp.hpl.jena.ontology.OntClass; +import com.hp.hpl.jena.ontology.OntModel; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; +import com.hp.hpl.jena.rdf.model.NodeIterator; +import com.hp.hpl.jena.rdf.model.RDFNode; +import com.hp.hpl.jena.rdf.model.ResIterator; +import com.hp.hpl.jena.rdf.model.Resource; +import com.hp.hpl.jena.vocabulary.OWL; +import com.jamonapi.Monitor; +import com.jamonapi.MonitorFactory; @ComponentAnn(name = "efficient SPARQL fragment extractor", shortName = "sparqls", version = 0.1) public class SparqlSimpleExtractor implements KnowledgeSource { @@ -118,6 +129,10 @@ throw new ComponentInitException( "An ontology schema description file (ontologyFile) in RDF ist required"); } + + for(String instance:instances){ + model.createIndividual(instance, OWL.Thing); + } Monitor monComp = MonitorFactory.start("Simple SPARQL Component") .start(); Monitor monIndexer = MonitorFactory.start("Schema Indexer").start(); @@ -125,12 +140,15 @@ indexer.setOntologySchemaUrls(ontologySchemaUrls); indexer.init(); monIndexer.stop(); + + TypeOntology typeOntology = new TypeOntology(); Monitor monQueryingABox; QueryExecutor executor = new QueryExecutor(); String queryString; Set<String> instancesSet = new HashSet<String>(instances); Set<String> alreadyQueried = new HashSet<String>(); + Monitor typizeModel; if (sparqlQuery == null) { ABoxQueryGenerator aGenerator = new ABoxQueryGenerator(); for (int i = 0; i < recursionDepth; i++) { @@ -141,12 +159,21 @@ log.info("processing (recursion " + i + ") " + instancesSet.size() + " new instances"); queryString = aGenerator.createQuery(instancesSet, aboxfilter); + System.out.println(queryString); log.debug("SPARQL: {}", queryString); monQueryingABox = MonitorFactory.start("ABox query time"); + try{ executor.executeQuery(queryString, endpointURL, model, defaultGraphURI); + } catch (Throwable t){ + t.printStackTrace(); + } monQueryingABox.stop(); + typizeModel=MonitorFactory.start("Typize the model"); + typeOntology.addTypes(model); + typizeModel.stop(); + alreadyQueried.addAll(instancesSet); instancesSet = difference(alreadyQueried, model); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dc...@us...> - 2012-04-28 17:26:20
|
Revision: 3662 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3662&view=rev Author: dcherix Date: 2012-04-28 17:26:12 +0000 (Sat, 28 Apr 2012) Log Message: ----------- A new class to add the type to object in a jena model Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/utilities/analyse/TypeOntology.java Added: trunk/components-core/src/main/java/org/dllearner/utilities/analyse/TypeOntology.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/analyse/TypeOntology.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/utilities/analyse/TypeOntology.java 2012-04-28 17:26:12 UTC (rev 3662) @@ -0,0 +1,128 @@ +package org.dllearner.utilities.analyse; + +import java.util.HashSet; +import java.util.Set; + +import org.dllearner.kb.sparql.simple.QueryExecutor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.hp.hpl.jena.graph.Node; +import com.hp.hpl.jena.graph.Triple; +import com.hp.hpl.jena.ontology.DatatypeProperty; +import com.hp.hpl.jena.ontology.Individual; +import com.hp.hpl.jena.ontology.ObjectProperty; +import com.hp.hpl.jena.ontology.OntClass; +import com.hp.hpl.jena.ontology.OntModel; +import com.hp.hpl.jena.rdf.model.ModelFactory; +import com.hp.hpl.jena.util.iterator.ExtendedIterator; +import com.hp.hpl.jena.vocabulary.OWL; +import com.hp.hpl.jena.vocabulary.RDF; + +public class TypeOntology { + + private static Logger log = LoggerFactory.getLogger(TypeOntology.class); + + public void addTypes(OntModel model) { + Set<DatatypeProperty> dataProperties = model.listDatatypeProperties() + .toSet(); + Set<ObjectProperty> objectProperties = model.listObjectProperties() + .toSet(); + Set<OntClass> classes = model.listClasses().toSet(); + Set<Individual> individuals = model.listIndividuals().toSet(); + Set<Triple> triples = model.getGraph().find(Triple.ANY).toSet(); + Node subject; + Node predicate; + Node object; + // while (!triples.isEmpty()) { + ExtendedIterator<Triple> iterator = model.getGraph().find(Triple.ANY); + // System.out.println(triples.size()); + for (Triple triple : triples) { + // System.out.println(triple); + subject = triple.getSubject(); + predicate = triple.getPredicate(); + object = triple.getObject(); + if (individuals.contains(model.getResource(subject.getURI()))) { + log.debug("{}", triple); + if (predicate.hasURI(RDF.type.getURI())) { + if (!classes.contains(model.getResource(object.getURI())) + && !object.getURI().equals(OWL.Thing.getURI())) { + model.getResource(subject.getURI()).addProperty( + com.hp.hpl.jena.vocabulary.RDFS.subClassOf, + OWL.Thing); + classes = model.listClasses().toSet(); + log.debug("{} is a class", object); + } + } else if (object.isLiteral()) { + if (!objectProperties.contains(model.getResource(predicate + .getURI()))) { + model.createDatatypeProperty(predicate.getURI()); + dataProperties = model.listDatatypeProperties().toSet(); + log.debug("{} is a dataproperty", predicate); + } else { + model.createOntProperty(predicate.getURI()); + log.info("{} is a rdf:property", predicate); + } + } else if (!individuals.contains(model.getResource(object + .getURI()))) { + model.getResource(object.getURI()).addProperty(RDF.type, + OWL.Thing); + individuals = model.listIndividuals().toSet(); + if (!dataProperties.contains(model.getResource(predicate + .getURI()))) { + model.createObjectProperty(predicate.getURI()); + objectProperties = model.listObjectProperties().toSet(); + log.debug("{} is an objectproperty", predicate); + } else { + model.createOntProperty(predicate.getURI()); + log.info("{} is a rdf:property", predicate); + } + log.debug("{} is an individual", object); + } + + } else if (classes.contains(model.getResource(subject.getURI()))) { + model.getResource(object.getURI()).addProperty( + com.hp.hpl.jena.vocabulary.RDFS.subClassOf, OWL.Thing); + } + } + } + + public static void main(String... args) { + String sparql = "CONSTRUCT {?s ?p ?o}" + + "{ ?s ?p ?o " + + "FILTER (?s IN( <http://dbpedia.org/resource/Philolaus>," + + " <http://dbpedia.org/resource/Zeno_of_Elea>," + + " <http://dbpedia.org/resource/Socrates>," + + " <http://dbpedia.org/resource/Pythagoras>," + + " <http://dbpedia.org/resource/Archytas>," + + " <http://dbpedia.org/resource/Plato>," + + " <http://dbpedia.org/resource/Democritus> )) ." + + " FILTER ( !isLiteral(?o) && regex(str(?o), '^http://dbpedia.org/resource/') &&" + + " ! regex(str(?o), '^http://dbpedia.org/resource/Category') &&" + + " ! regex(str(?o), '^http://dbpedia.org/resource/Template') ) . }"; + OntModel model = ModelFactory.createOntologyModel(); + model.createIndividual("http://dbpedia.org/resource/Philolaus", + OWL.Thing); + model.createIndividual("http://dbpedia.org/resource/Zeno_of_Elea", + OWL.Thing); + model.createIndividual("http://dbpedia.org/resource/Socrates", + OWL.Thing); + model.createIndividual("http://dbpedia.org/resource/Pytagoras", + OWL.Thing); + model.createIndividual("http://dbpedia.org/resource/Archytas", + OWL.Thing); + model.createIndividual("http://dbpedia.org/resource/Plato", OWL.Thing); + model.createIndividual("http://dbpedia.org/resource/Democritus", + OWL.Thing); + QueryExecutor exec = new QueryExecutor(); + exec.executeQuery(sparql, "http://live.dbpedia.org/sparql", model, + "http://dbpedia.org"); + System.out.println(model.listIndividuals().toSet()); + System.out.println(model.listObjectProperties().toSet()); + TypeOntology type = new TypeOntology(); + type.addTypes(model); + System.out.println(model.listIndividuals().toSet()); + System.out.println(model.listObjectProperties().toSet()); + System.out.println(model.listDatatypeProperties().toSet()); + } +} Property changes on: trunk/components-core/src/main/java/org/dllearner/utilities/analyse/TypeOntology.java ___________________________________________________________________ Added: svn:mime-type + text/plain This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-04-28 14:41:55
|
Revision: 3661 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3661&view=rev Author: lorenz_b Date: 2012-04-28 14:41:49 +0000 (Sat, 28 Apr 2012) Log Message: ----------- Removed unused method. Modified Paths: -------------- trunk/scripts/src/main/java/org/dllearner/scripts/SPARQLSampleDebugging.java Modified: trunk/scripts/src/main/java/org/dllearner/scripts/SPARQLSampleDebugging.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/SPARQLSampleDebugging.java 2012-04-28 14:02:44 UTC (rev 3660) +++ trunk/scripts/src/main/java/org/dllearner/scripts/SPARQLSampleDebugging.java 2012-04-28 14:41:49 UTC (rev 3661) @@ -24,7 +24,6 @@ import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.Random; @@ -88,12 +87,8 @@ import com.hp.hpl.jena.query.QueryFactory; import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; -import com.hp.hpl.jena.rdf.model.InfModel; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; -import com.hp.hpl.jena.reasoner.Reasoner; -import com.hp.hpl.jena.reasoner.ValidityReport; -import com.hp.hpl.jena.reasoner.ValidityReport.Report; import com.hp.hpl.jena.vocabulary.OWL; import com.hp.hpl.jena.vocabulary.RDFS; @@ -304,19 +299,6 @@ return explanations; } - private void computeExplanations(Model model){ - logger.info("Computing explanations..."); - Reasoner reasoner = org.mindswap.pellet.jena.PelletReasonerFactory.theInstance().create(); - InfModel infModel = ModelFactory.createInfModel(reasoner, model); - long startTime = System.currentTimeMillis(); - ValidityReport report = infModel.validate(); - Iterator<Report> i = report.getReports(); - while(i.hasNext()){ - System.out.println(i.next()); - } - logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); - } - private OWLOntology loadReferenceOntology() throws OWLOntologyCreationException{ long startTime = System.currentTimeMillis(); logger.info("Loading reference ontology..."); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2012-04-28 14:02:51
|
Revision: 3660 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3660&view=rev Author: jenslehmann Date: 2012-04-28 14:02:44 +0000 (Sat, 28 Apr 2012) Log Message: ----------- completed previous commit Modified Paths: -------------- trunk/components-ext/pom.xml trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/util/NifExamples.java trunk/components-ext/src/test/java/org/dllearner/algorithm/tbsl/POSTaggerEvaluation.java Modified: trunk/components-ext/pom.xml =================================================================== --- trunk/components-ext/pom.xml 2012-04-28 13:50:47 UTC (rev 3659) +++ trunk/components-ext/pom.xml 2012-04-28 14:02:44 UTC (rev 3660) @@ -150,11 +150,13 @@ <artifactId>jwnl</artifactId> <version>1.4.1.RC2</version> </dependency> + <!-- <dependency> <groupId>org.nlp2rdf</groupId> <artifactId>nif</artifactId> <version>1.1</version> </dependency> + --> <dependency> <groupId>org.xerial</groupId> <artifactId>sqlite-jdbc</artifactId> Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/util/NifExamples.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/util/NifExamples.java 2012-04-28 13:50:47 UTC (rev 3659) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/util/NifExamples.java 2012-04-28 14:02:44 UTC (rev 3660) @@ -4,8 +4,6 @@ import com.hp.hpl.jena.ontology.OntModel; import com.hp.hpl.jena.util.iterator.ExtendedIterator; import org.apache.log4j.Logger; -import org.nlp2rdf.ontology.olia.OLiAManager; -import org.nlp2rdf.ontology.olia.OLiAOntology; import java.util.Set; @@ -15,28 +13,30 @@ private static Logger log = Logger.getLogger(NifExamples.class); public static void main(String[] args) { - OLiAManager m = new OLiAManager(); - OLiAOntology brown = m.getOLiAOntology("http://purl.org/olia/brown-link.rdf"); - - System.out.println(brown); - String posTag = "BED"; - String oliaIndividual = null; - if ((oliaIndividual = brown.getIndividualURIForTag(posTag)) != null) { - log.info("The OLia Annotation individual can be null, if the ontology has a gap"); - log.info(oliaIndividual + ""); - } - //adding pos classes from olia and olia-top - Set<String> classes = brown.getClassURIsForTag(posTag); - log.info("Classes found for the POS tag " + posTag); - log.info("" + classes); - - for (String classUri : classes) { - log.info("found: " + classUri + " for: " + posTag); - OntModel hierarchy = brown.getHierarchy(classUri); - for (ExtendedIterator<OntClass> it = hierarchy.listClasses(); it.hasNext(); ) { - OntClass oc = it.next(); - log.info("flattended: " + oc); - } - } + // commented out since the NIF dependency was removed + +// OLiAManager m = new OLiAManager(); +// OLiAOntology brown = m.getOLiAOntology("http://purl.org/olia/brown-link.rdf"); +// +// System.out.println(brown); +// String posTag = "BED"; +// String oliaIndividual = null; +// if ((oliaIndividual = brown.getIndividualURIForTag(posTag)) != null) { +// log.info("The OLia Annotation individual can be null, if the ontology has a gap"); +// log.info(oliaIndividual + ""); +// } +// //adding pos classes from olia and olia-top +// Set<String> classes = brown.getClassURIsForTag(posTag); +// log.info("Classes found for the POS tag " + posTag); +// log.info("" + classes); +// +// for (String classUri : classes) { +// log.info("found: " + classUri + " for: " + posTag); +// OntModel hierarchy = brown.getHierarchy(classUri); +// for (ExtendedIterator<OntClass> it = hierarchy.listClasses(); it.hasNext(); ) { +// OntClass oc = it.next(); +// log.info("flattended: " + oc); +// } +// } } } Modified: trunk/components-ext/src/test/java/org/dllearner/algorithm/tbsl/POSTaggerEvaluation.java =================================================================== --- trunk/components-ext/src/test/java/org/dllearner/algorithm/tbsl/POSTaggerEvaluation.java 2012-04-28 13:50:47 UTC (rev 3659) +++ trunk/components-ext/src/test/java/org/dllearner/algorithm/tbsl/POSTaggerEvaluation.java 2012-04-28 14:02:44 UTC (rev 3660) @@ -11,8 +11,6 @@ import org.dllearner.algorithm.tbsl.nlp.LingPipePartOfSpeechTagger; import org.dllearner.algorithm.tbsl.nlp.PartOfSpeechTagger; import org.dllearner.algorithm.tbsl.nlp.StanfordPartOfSpeechTagger; -import org.nlp2rdf.ontology.olia.OLiAManager; -import org.nlp2rdf.ontology.olia.OLiAOntology; import com.aliasi.corpus.ObjectHandler; import com.aliasi.corpus.StringParser; @@ -24,8 +22,10 @@ import com.hp.hpl.jena.util.iterator.ExtendedIterator; import com.hp.hpl.jena.util.iterator.Filter; -public class POSTaggerEvaluation extends StringParser<ObjectHandler<Tagging<String>>>{ +// commented out because the NIF dependency was removed +public class POSTaggerEvaluation { /* extends StringParser<ObjectHandler<Tagging<String>>>{ + private List<PartOfSpeechTagger> taggers = Arrays.asList(new PartOfSpeechTagger[]{ new ApachePartOfSpeechTagger(), new StanfordPartOfSpeechTagger(), new LingPipePartOfSpeechTagger()}); @@ -44,8 +44,8 @@ public POSTaggerEvaluation() { - brown = m.getOLiAOntology("http://purl.org/olia/brown-link.rdf"); - penn = m.getOLiAOntology("http://purl.org/olia/penn-link.rdf"); +// brown = m.getOLiAOntology("http://purl.org/olia/brown-link.rdf"); +// penn = m.getOLiAOntology("http://purl.org/olia/penn-link.rdf"); } public void run(File directory){ @@ -178,9 +178,9 @@ } } - /* - * Returns TRUE if in the OLia hierarchy is somewhere a common class. - */ + // + // Returns TRUE if in the OLia hierarchy is somewhere a common class. + // private boolean matchesOLiaClass(String brownTag, String pennTag){ Set<String> brownClasses = brown.getClassURIsForTag(brownTag.toUpperCase()); Set<String> pennClasses = penn.getClassURIsForTag(pennTag); @@ -215,5 +215,5 @@ eval.run(new File(args[0])); } - +*/ } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2012-04-28 13:50:53
|
Revision: 3659 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3659&view=rev Author: jenslehmann Date: 2012-04-28 13:50:47 +0000 (Sat, 28 Apr 2012) Log Message: ----------- removed NIF dependency Modified Paths: -------------- trunk/pom.xml Modified: trunk/pom.xml =================================================================== --- trunk/pom.xml 2012-04-27 10:35:29 UTC (rev 3658) +++ trunk/pom.xml 2012-04-28 13:50:47 UTC (rev 3659) @@ -161,6 +161,7 @@ </exclusions> </dependency> + <!-- <dependency> <groupId>org.nlp2rdf</groupId> <artifactId>nif</artifactId> @@ -172,6 +173,7 @@ </exclusion> </exclusions> </dependency> + --> <!--Available via central, we use the latest with minor mods to DL Learner source (IE Dig related code) --> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-04-27 10:35:36
|
Revision: 3658 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3658&view=rev Author: lorenz_b Date: 2012-04-27 10:35:29 +0000 (Fri, 27 Apr 2012) Log Message: ----------- Changed base URI of enrichment ontology vocabulary. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/EnrichmentVocabulary.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/EnrichmentVocabulary.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/EnrichmentVocabulary.java 2012-04-24 07:47:35 UTC (rev 3657) +++ trunk/components-core/src/main/java/org/dllearner/utilities/EnrichmentVocabulary.java 2012-04-27 10:35:29 UTC (rev 3658) @@ -33,7 +33,7 @@ private static final OWLDataFactory factory = new OWLDataFactoryImpl(); //the default namespace - public static final String NS = "http://www.dl-learner.org/enrichment.owl#"; + public static final String NS = "http://www.dl-learner.org/ontologies/enrichment.owl#"; //the classes This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |