From: <jen...@us...> - 2011-01-31 19:37:43
|
Revision: 2635 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=2635&view=rev Author: jenslehmann Date: 2011-01-31 19:37:37 +0000 (Mon, 31 Jan 2011) Log Message: ----------- started unit tests for pos-neg learning measures Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/learningproblems/PosNegLPStandard.java trunk/components-core/src/test/java/org/dllearner/test/junit/HeuristicTests.java Modified: trunk/components-core/src/main/java/org/dllearner/learningproblems/PosNegLPStandard.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/learningproblems/PosNegLPStandard.java 2011-01-26 18:39:46 UTC (rev 2634) +++ trunk/components-core/src/main/java/org/dllearner/learningproblems/PosNegLPStandard.java 2011-01-31 19:37:37 UTC (rev 2635) @@ -313,18 +313,7 @@ @Override public double getAccuracyOrTooWeak(Description description, double noise) { // delegates to the appropriate methods - return useApproximations ? getAccuracyOrTooWeakApprox(description, noise) : getAccuracyOrTooWeakExact(description, noise); - /* - if(useApproximations) { - if(useFMeasure) { - return getFMeasureOrTooWeakApprox(description, noise); - } else { - throw new Error("approximating pred. acc not implemented"); - } - } else { - return getPredAccuracyOrTooWeakExact(description, noise); - } - */ + return useApproximations ? getAccuracyOrTooWeakApprox(description, noise) : getAccuracyOrTooWeakExact(description, noise); } public double getAccuracyOrTooWeakApprox(Description description, double noise) { @@ -386,6 +375,8 @@ return ret; } else if(heuristic.equals(HeuristicType.FMEASURE)) { + System.out.println("Testing " + description); + // we abort when there are too many uncovered positives int maxNotCovered = (int) Math.ceil(noise*positiveExamples.size()); int instancesCovered = 0; @@ -437,6 +428,8 @@ if(heuristic.equals(HeuristicType.PRED_ACC)) { return getPredAccuracyOrTooWeakExact(description, noise); } else if(heuristic.equals(HeuristicType.FMEASURE)) { + return getFMeasureOrTooWeakExact(description, noise); + /* // computing R(C) restricted to relevant instances int additionalInstances = 0; for(Individual ind : negativeExamples) { @@ -457,6 +450,7 @@ double precision = (additionalInstances + coveredInstances == 0) ? 0 : coveredInstances / (double) (coveredInstances + additionalInstances); return Heuristics.getFScore(recall, precision); + */ } else { throw new Error("Heuristic " + heuristic + " not implemented."); } @@ -518,7 +512,8 @@ double precision = (additionalInstances + coveredInstances == 0) ? 0 : coveredInstances / (double) (coveredInstances + additionalInstances); - return getFMeasure(recall, precision); +// return getFMeasure(recall, precision); + return Heuristics.getFScore(recall, precision); } // instead of using the standard operation, we use optimisation Modified: trunk/components-core/src/test/java/org/dllearner/test/junit/HeuristicTests.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/junit/HeuristicTests.java 2011-01-26 18:39:46 UTC (rev 2634) +++ trunk/components-core/src/test/java/org/dllearner/test/junit/HeuristicTests.java 2011-01-31 19:37:37 UTC (rev 2635) @@ -23,6 +23,8 @@ import java.net.MalformedURLException; import java.net.URL; +import java.util.Set; +import java.util.TreeSet; import org.dllearner.core.ComponentInitException; import org.dllearner.core.ComponentManager; @@ -37,9 +39,13 @@ import org.dllearner.kb.KBFile; import org.dllearner.learningproblems.ClassLearningProblem; import org.dllearner.learningproblems.Heuristics; +import org.dllearner.learningproblems.PosNegLPStandard; import org.dllearner.reasoning.OWLAPIReasoner; +import org.dllearner.utilities.Helper; import org.junit.Test; +import scala.actors.threadpool.Arrays; + /** * Tests for various heuristics employed in learning problems. * @@ -159,6 +165,60 @@ } @Test + public void posNegLPLearningTests() throws ComponentInitException { + // create artificial ontology + KB kb = new KB(); + String ns = "http://dl-learner.org/junit/"; + NamedClass[] nc = new NamedClass[5]; + for(int i=0; i<5; i++) { + nc[i] = new NamedClass(ns + "A" + i); + } + Individual[] ind = new Individual[100]; + for(int i=0; i<100; i++) { + ind[i] = new Individual(ns + "i" + i); + } + + // assert individuals to owl:Thing (such that they exist in the knowledge base) + for(int i=0; i<100; i++) { + kb.addAxiom(new ClassAssertionAxiom(Thing.instance,ind[i])); + } + + // A0 has 20 instances (i0 to i19) + for(int i=0; i<20; i++) { + kb.addAxiom(new ClassAssertionAxiom(nc[0],ind[i])); + } + + // A1 has 20 instances (i10 to i29) + for(int i=10; i<30; i++) { + kb.addAxiom(new ClassAssertionAxiom(nc[1],ind[i])); + } + + // A2 has 40 instances (i10 to i49) + for(int i=10; i<50; i++) { + kb.addAxiom(new ClassAssertionAxiom(nc[2],ind[i])); + } + + // A3 has 5 instances (i8 to i12) + for(int i=8; i<13; i++) { + kb.addAxiom(new ClassAssertionAxiom(nc[3],ind[i])); + } + + ComponentManager cm = ComponentManager.getInstance(); + KnowledgeSource ks = new KBFile(kb); + ReasonerComponent reasoner = cm.reasoner(OWLAPIReasoner.class, ks); + PosNegLPStandard problem = cm.learningProblem(PosNegLPStandard.class, reasoner); + ks.init(); + reasoner.init(); + + Individual[] pos1 = new Individual[] {ind[1], ind[2]}; + Individual[] neg1 = new Individual[] {ind[3], ind[4]}; + HeuristicTests.configurePosNegStandardLP(problem, pos1, neg1, "fmeasure", false); + + // TODO: continue + } + + + @Test public void approximationTests() { // perform F-Measure example in ontology engineering paper, which was computed on paper double[] approx1 = Heuristics.getFScoreApproximation(800, 0.8, 1, 10000, 41, 31); @@ -226,4 +286,19 @@ problem.init(); } + @SuppressWarnings("unchecked") + private static void configurePosNegStandardLP(PosNegLPStandard problem, Individual[] positiveExamples, Individual[] negativeExamples, String accuracyMethod, boolean useApproximations) throws ComponentInitException { + Set<Individual> s1 = new TreeSet<Individual>(Arrays.asList(positiveExamples)); + Set<Individual> s2 = new TreeSet<Individual>(Arrays.asList(negativeExamples)); + HeuristicTests.configurePosNegStandardLP(problem, s1, s2, accuracyMethod, useApproximations); + } + + // convencience method to set the learning problem to a desired configuration (approximations disabled) + private static void configurePosNegStandardLP(PosNegLPStandard problem, Set<Individual> positiveExamples, Set<Individual> negativeExamples, String accuracyMethod, boolean useApproximations) throws ComponentInitException { + problem.getConfigurator().setPositiveExamples(Helper.getStringSet(positiveExamples)); + problem.getConfigurator().setNegativeExamples(Helper.getStringSet(negativeExamples)); + problem.getConfigurator().setAccuracyMethod(accuracyMethod); + problem.getConfigurator().setUseApproximations(useApproximations); + problem.init(); + } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ji...@us...> - 2011-04-13 14:50:43
|
Revision: 2768 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=2768&view=rev Author: jialva Date: 2011-04-13 14:50:32 +0000 (Wed, 13 Apr 2011) Log Message: ----------- fuzzy F-measure added Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest.java Modified: trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java 2011-04-13 09:19:58 UTC (rev 2767) +++ trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java 2011-04-13 14:50:32 UTC (rev 2768) @@ -507,7 +507,7 @@ */ public double getPredAccuracyOrTooWeakExact(Description description, double noise) { - // double crispAccuracy = crispAccuracy(description, noise); + double crispAccuracy = crispAccuracy(description, noise); // if I erase next line, fuzzy reasoning fails // if (crispAccuracy == -1) return -1; @@ -538,7 +538,7 @@ // double crispAccuracy = crispAccuracy(description, noise); // } - double crispAccuracy = fuzzyAccuracy; + crispAccuracy = fuzzyAccuracy; if (crispAccuracy != fuzzyAccuracy) { System.err.println("***********************************************"); @@ -574,6 +574,27 @@ } public double getFMeasureOrTooWeakExact(Description description, double noise) { + + // added by Josue + // fuzzy F-measure + double coveredMembershipDegree = 0; + double totalMembershipDegree = 0; + double invertedCoveredMembershipDegree = 0; + + for (FuzzyIndividual ind: fuzzyExamples) { + coveredMembershipDegree += reasoner.hasTypeFuzzyMembership(description, ind) * ind.getBeliefDegree(); + totalMembershipDegree += ind.getBeliefDegree(); + invertedCoveredMembershipDegree += (1 - ind.getBeliefDegree()) * (1 - reasoner.hasTypeFuzzyMembership(description, ind)); + } + double fuzzyRecall = totalMembershipDegree == 0 ? 0 :coveredMembershipDegree/totalMembershipDegree; + // TODO this is like this??? not sure + if(fuzzyRecall < 1 - noise) { + return -1; + } + double fuzzyPrecision = (coveredMembershipDegree + invertedCoveredMembershipDegree) == 0 ? 0: coveredMembershipDegree / (coveredMembershipDegree + invertedCoveredMembershipDegree); + double fuzzyFmeasure = Heuristics.getFScore(fuzzyRecall, fuzzyPrecision); + + // crisp F-measure int additionalInstances = 0; for(Individual ind : negativeExamples) { if(reasoner.hasType(description, ind)) { @@ -597,7 +618,20 @@ double precision = (additionalInstances + coveredInstances == 0) ? 0 : coveredInstances / (double) (coveredInstances + additionalInstances); // return getFMeasure(recall, precision); - return Heuristics.getFScore(recall, precision); + double crispFmeasure = Heuristics.getFScore(recall, precision); + + crispFmeasure = fuzzyFmeasure; + + if (crispFmeasure != fuzzyFmeasure) { + System.err.println("************************"); + System.err.println("* crispFmeasuer = " + crispFmeasure); + System.err.println("* fuzzyFmeasuer = " + fuzzyFmeasure); + System.err.println("************************"); + Scanner sc = new Scanner(System.in); + sc.nextLine(); + } + + return crispFmeasure; } // instead of using the standard operation, we use optimisation Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java 2011-04-13 09:19:58 UTC (rev 2767) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java 2011-04-13 14:50:32 UTC (rev 2768) @@ -16,13 +16,8 @@ public class FuzzyDLReasonerManager { - private static final String AUXFUZZYKBFILENAME = "/Users/josue/Documents/PhD/AKSW/DL_Learner/workspace/dllearner-parent/components-core/src/main/resources/FuzzyDL/examples/output/auxFuzzyKBfileName.fuzzyDL.txt"; - private static final String AUXFUZZYKBFILENAME_MANUALLYMODIFIED = "/Users/josue/Documents/PhD/AKSW/DL_Learner/workspace/dllearner-parent/components-core/src/main/resources/FuzzyDL/examples/output/auxFuzzyKBfileName_manuallyModified.fuzzyDL.txt"; - private static final String AUXNOFUZZYKBFILENAME_MANUALLYMODIFIED = "/Users/josue/Documents/PhD/AKSW/DL_Learner/workspace/dllearner-parent/components-core/src/main/resources/FuzzyDL/examples/output/auxNoFuzzyKBfileName_manuallyModified.fuzzyDL.txt"; - private static final String AUXNOFUZZYKBFILENAME_MANUALLYMODIFIED_NOPREFIX = "/Users/josue/Documents/PhD/AKSW/DL_Learner/workspace/dllearner-parent/components-core/src/main/resources/FuzzyDL/examples/output/auxNoFuzzyKBfileName_manuallyModified_noPrefix.fuzzyDL.txt"; - private static final String CHANGING_JUST_HIERARCHI_PROBLEM = "/Users/josue/Documents/PhD/AKSW/DL_Learner/workspace/dllearner-parent/components-core/src/main/resources/FuzzyDL/examples/output/fuzzyOWL2fuzzyDLparserOutput_manual.fuzzyDL.txt"; + private static final String CHANGING_JUST_HIERARCHI_PROBLEM = "/Users/josue/Documents/PhD/AKSW/fuzzySemanticTools/FuzzyDLMacOSX/FuzzyDL/examples/output/fuzzyOWL2fuzzyDLparserOutput_manual.fuzzyDL.txt"; private static final String FUZZYOWL2FUZZYDLPARSEROUTPUT = "/Users/josue/Documents/PhD/AKSW/fuzzySemanticTools/FuzzyDLMacOSX/FuzzyDL/examples/output/fuzzyOWL2fuzzyDLparserOutput.fuzzyDL.txt"; - private static String AUX = "/Users/josue/Documents/PhD/AKSW/DL_Learner/workspace/dllearner-parent/components-core/src/main/resources/FuzzyDL/examples/output/kk.fuzzyDL.txt"; private static String CONFIG_FILENAME = "/Users/josue/Documents/PhD/AKSW/fuzzySemanticTools/FuzzyDLMacOSX/FuzzyDL/CONFIG"; private Solution queryResult; Modified: trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest.java 2011-04-13 09:19:58 UTC (rev 2767) +++ trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest.java 2011-04-13 14:50:32 UTC (rev 2768) @@ -98,7 +98,7 @@ ComponentManager cm = ComponentManager.getInstance(); OWLFile ks = cm.knowledgeSource(OWLFile.class); - ks.getConfigurator().setUrl(new URL("file:///Users/josue/Documents/PhD/AKSW/ontologies/foodItems_v1.1.owl")); + ks.getConfigurator().setUrl(new URL("file:///Users/josue/Documents/PhD/AKSW/ontologies/fuzzyFoodItems_v1.owl")); ks.init(); //ReasonerComponent rc = cm.reasoner(OWLAPIReasoner.class, ks); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ji...@us...> - 2011-05-04 09:45:02
|
Revision: 2782 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=2782&view=rev Author: jialva Date: 2011-05-04 09:44:56 +0000 (Wed, 04 May 2011) Log Message: ----------- New update of fuzzyDL-Learner Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java 2011-05-03 15:27:07 UTC (rev 2781) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java 2011-05-04 09:44:56 UTC (rev 2782) @@ -63,9 +63,9 @@ fuzzyFileParser = new FuzzyOwl2toFuzzyDL(ontologyFile, FUZZYOWL2FUZZYDLPARSEROUTPUT); fuzzyFileParser.translateOwl2Ontology(); - System.err.println("WARNING: you're using a particular fuzzy ontology"); - parser = new Parser(new FileInputStream(CHANGING_JUST_HIERARCHI_PROBLEM)); - // parser = new Parser(new FileInputStream(FUZZYOWL2FUZZYDLPARSEROUTPUT)); +// System.err.println("WARNING: you're using a particular fuzzy ontology"); +// parser = new Parser(new FileInputStream(CHANGING_JUST_HIERARCHI_PROBLEM)); + parser = new Parser(new FileInputStream(FUZZYOWL2FUZZYDLPARSEROUTPUT)); parser.Start(); return parser.getKB(); Modified: trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java 2011-05-03 15:27:07 UTC (rev 2781) +++ trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java 2011-05-04 09:44:56 UTC (rev 2782) @@ -78,7 +78,7 @@ ComponentManager cm = ComponentManager.getInstance(); OWLFile ks = cm.knowledgeSource(OWLFile.class); - ks.getConfigurator().setUrl(new URL("file:///Users/josue/Documents/PhD/AKSW/ontologies/fuzzyTrains/noFuzzyTrains_v1.2.owl")); + ks.getConfigurator().setUrl(new URL("file:///Users/josue/Documents/PhD/AKSW/ontologies/fuzzyTrains/fuzzyTrains_v1.2.owl")); ks.init(); //ReasonerComponent rc = cm.reasoner(OWLAPIReasoner.class, ks); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ji...@us...> - 2011-05-11 10:20:29
|
Revision: 2789 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=2789&view=rev Author: jialva Date: 2011-05-11 10:20:23 +0000 (Wed, 11 May 2011) Log Message: ----------- New update of fuzzyDL-Learner Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java Modified: trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java 2011-05-10 12:31:35 UTC (rev 2788) +++ trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java 2011-05-11 10:20:23 UTC (rev 2789) @@ -515,7 +515,7 @@ // if I erase next line, fuzzy reasoning fails if (crispAccuracy == -1) { // System.out.println("crisp return -1"); - return -1; + // return -1; } // BEGIN @@ -527,18 +527,38 @@ // System.out.println("noise = " + noise); - int individualCounter = fuzzyExamples.size(); + + // TODO + // TODO + // TODO + // TODO \xC1\xC1\xC1sacar de aqu\x92 y metenerlo en la clase fuzzyExample/fuzzyIndividual o .. + double totalTruth = 0; for (FuzzyIndividual fuzzyExample : fuzzyExamples) { + totalTruth += fuzzyExample.getBeliefDegree(); + } + // TODO + // TODO + // TODO + // TODO + + // int individualCounter = fuzzyExamples.size(); + double individualCounter = totalTruth; + for (FuzzyIndividual fuzzyExample : fuzzyExamples) { descriptionMembership += reasoner.hasTypeFuzzyMembership(description, fuzzyExample); - individualCounter--; - if ((descriptionMembership + individualCounter) / fuzzyExamples.size() < noise) + // individualCounter--; + if (individualCounter != 0) individualCounter--; + // before +// if ((descriptionMembership + individualCounter) / fuzzyExamples.size() < noise) +// return -1; + // after (to match the noise management of the crisp part) + if ((descriptionMembership + individualCounter) < ((1 - noise) * totalTruth)) return -1; } double fuzzyAccuracy = descriptionMembership / (double)fuzzyExamples.size(); - System.err.println("crispAccuracy = fuzzyAccuracy"); - crispAccuracy = fuzzyAccuracy; +// System.err.println("crispAccuracy = fuzzyAccuracy"); +// crispAccuracy = fuzzyAccuracy; if (crispAccuracy != fuzzyAccuracy) { System.err.println("***********************************************"); @@ -546,8 +566,8 @@ System.err.println("* (crispAccuracy[" + crispAccuracy + "] != fuzzyAccuracy[" + fuzzyAccuracy + "])"); System.err.println("* DESC: " + description); System.err.println("***********************************************"); -// Scanner sc = new Scanner(System.in); -// sc.nextLine(); + Scanner sc = new Scanner(System.in); + sc.nextLine(); } return fuzzyAccuracy; Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java 2011-05-10 12:31:35 UTC (rev 2788) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java 2011-05-11 10:20:23 UTC (rev 2789) @@ -21,7 +21,7 @@ public class FuzzyDLReasonerManager { - private static final String CHANGING_JUST_HIERARCHI_PROBLEM = "/Users/josue/Documents/PhD/AKSW/fuzzySemanticTools/FuzzyDLMacOSX/FuzzyDL/examples/output/fuzzyOWL2fuzzyDLparserOutput_manual.fuzzyDL.txt"; + private static final String CHANGING_JUST_HIERARCHI_PROBLEM = "fuzzyOWL2fuzzyDLparserOutput_manual.fuzzyDL.txt"; private static final String FUZZYOWL2FUZZYDLPARSEROUTPUT = "fuzzyOWL2fuzzyDLparserOutput.fuzzyDL.txt"; private static String CONFIG_FILENAME = "/Users/josue/Documents/PhD/AKSW/fuzzySemanticTools/FuzzyDLMacOSX/FuzzyDL/CONFIG"; @@ -66,9 +66,9 @@ fuzzyFileParser = new FuzzyOwl2toFuzzyDL(ontologyFile, FUZZYOWL2FUZZYDLPARSEROUTPUT); fuzzyFileParser.translateOwl2Ontology(); -// System.err.println("WARNING: you're using a particular fuzzy ontology"); -// parser = new Parser(new FileInputStream(CHANGING_JUST_HIERARCHI_PROBLEM)); - parser = new Parser(new FileInputStream(FUZZYOWL2FUZZYDLPARSEROUTPUT)); + System.err.println("WARNING: you're using a particular fuzzy ontology"); + parser = new Parser(new FileInputStream(CHANGING_JUST_HIERARCHI_PROBLEM)); +// parser = new Parser(new FileInputStream(FUZZYOWL2FUZZYDLPARSEROUTPUT)); parser.Start(); return parser.getKB(); Modified: trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java 2011-05-10 12:31:35 UTC (rev 2788) +++ trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java 2011-05-11 10:20:23 UTC (rev 2789) @@ -80,7 +80,7 @@ ComponentManager cm = ComponentManager.getInstance(); OWLFile ks = cm.knowledgeSource(OWLFile.class); - ks.getConfigurator().setUrl(new URL("file:///Users/josue/Documents/PhD/AKSW/ontologies/fuzzyTrains/fuzzyTrains_v1.3.owl")); + ks.getConfigurator().setUrl(new URL("file:///Users/josue/Documents/PhD/AKSW/ontologies/fuzzyTrains/noFuzzyTrains_v1.3.owl")); ks.init(); //ReasonerComponent rc = cm.reasoner(OWLAPIReasoner.class, ks); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ji...@us...> - 2011-05-11 12:44:17
|
Revision: 2790 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=2790&view=rev Author: jialva Date: 2011-05-11 12:44:11 +0000 (Wed, 11 May 2011) Log Message: ----------- New update of fuzzyDL-Learner Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLP.java trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_noFuzzyTrains.java Modified: trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLP.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLP.java 2011-05-11 10:20:23 UTC (rev 2789) +++ trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLP.java 2011-05-11 12:44:11 UTC (rev 2790) @@ -57,6 +57,7 @@ protected boolean useRetrievalForClassification = false; protected UseMultiInstanceChecks useMultiInstanceChecks = UseMultiInstanceChecks.TWOCHECKS; protected double percentPerLengthUnit = 0.05; + protected double totalTruth = 0; /** * If instance checks are used for testing concepts (e.g. no retrieval), then @@ -117,8 +118,12 @@ public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { String name = entry.getOptionName(); // added by Josue - if (name.equals("fuzzyExamples")) + if (name.equals("fuzzyExamples")){ fuzzyExamples = CommonConfigMappings.getFuzzyIndividualSet((Set<FuzzyExample>) entry.getValue()); + for (FuzzyIndividual fuzzyExample : fuzzyExamples) { + totalTruth += fuzzyExample.getBeliefDegree(); + } + } // TODO delete positiveExamples & negativeExamples else if (name.equals("positiveExamples")) positiveExamples = CommonConfigMappings Modified: trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java 2011-05-11 10:20:23 UTC (rev 2789) +++ trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java 2011-05-11 12:44:11 UTC (rev 2790) @@ -509,7 +509,7 @@ */ public double getPredAccuracyOrTooWeakExact(Description description, double noise) { - System.out.println(errorIndex++); + // System.out.println(errorIndex++); double crispAccuracy = crispAccuracy(description, noise); // if I erase next line, fuzzy reasoning fails @@ -532,10 +532,10 @@ // TODO // TODO // TODO \xC1\xC1\xC1sacar de aqu\x92 y metenerlo en la clase fuzzyExample/fuzzyIndividual o .. - double totalTruth = 0; - for (FuzzyIndividual fuzzyExample : fuzzyExamples) { - totalTruth += fuzzyExample.getBeliefDegree(); - } +// double totalTruth = 0; +// for (FuzzyIndividual fuzzyExample : fuzzyExamples) { +// totalTruth += fuzzyExample.getBeliefDegree(); +// } // TODO // TODO // TODO Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java 2011-05-11 10:20:23 UTC (rev 2789) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java 2011-05-11 12:44:11 UTC (rev 2790) @@ -66,9 +66,9 @@ fuzzyFileParser = new FuzzyOwl2toFuzzyDL(ontologyFile, FUZZYOWL2FUZZYDLPARSEROUTPUT); fuzzyFileParser.translateOwl2Ontology(); - System.err.println("WARNING: you're using a particular fuzzy ontology"); - parser = new Parser(new FileInputStream(CHANGING_JUST_HIERARCHI_PROBLEM)); -// parser = new Parser(new FileInputStream(FUZZYOWL2FUZZYDLPARSEROUTPUT)); +// System.err.println("WARNING: you're using a particular fuzzy ontology"); +// parser = new Parser(new FileInputStream(CHANGING_JUST_HIERARCHI_PROBLEM)); + parser = new Parser(new FileInputStream(FUZZYOWL2FUZZYDLPARSEROUTPUT)); parser.Start(); return parser.getKB(); Modified: trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java 2011-05-11 10:20:23 UTC (rev 2789) +++ trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java 2011-05-11 12:44:11 UTC (rev 2790) @@ -51,9 +51,9 @@ "http://www.example.com/fuzzyTrains.owl#west6", "http://www.example.com/fuzzyTrains.owl#west7" }; - + public Description learn() throws LearningProblemUnsupportedException, IOException, ComponentInitException { - + // // positive and negative examples // @@ -80,7 +80,7 @@ ComponentManager cm = ComponentManager.getInstance(); OWLFile ks = cm.knowledgeSource(OWLFile.class); - ks.getConfigurator().setUrl(new URL("file:///Users/josue/Documents/PhD/AKSW/ontologies/fuzzyTrains/noFuzzyTrains_v1.3.owl")); + ks.getConfigurator().setUrl(new URL("file:///Users/josue/Documents/PhD/AKSW/ontologies/fuzzyTrains/fuzzyTrains_v1.3.owl")); ks.init(); //ReasonerComponent rc = cm.reasoner(OWLAPIReasoner.class, ks); @@ -113,8 +113,13 @@ } public static void main(String args[]) throws LearningProblemUnsupportedException, IOException, ComponentInitException { + long start = System.currentTimeMillis(); + FuzzyDLLTest_Trains test = new FuzzyDLLTest_Trains(); test.learn(); + + System.err.println("running time (s) = " + (System.currentTimeMillis() - start)/1000); + } } \ No newline at end of file Modified: trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_noFuzzyTrains.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_noFuzzyTrains.java 2011-05-11 10:20:23 UTC (rev 2789) +++ trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_noFuzzyTrains.java 2011-05-11 12:44:11 UTC (rev 2790) @@ -118,8 +118,12 @@ } public static void main(String args[]) throws LearningProblemUnsupportedException, IOException, ComponentInitException { + long start = System.currentTimeMillis(); + FuzzyDLLTest_noFuzzyTrains test = new FuzzyDLLTest_noFuzzyTrains(); test.learn(); + + System.err.println("running time (s) = " + (System.currentTimeMillis() - start)/1000); } } \ No newline at end of file This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ji...@us...> - 2011-05-12 11:45:25
|
Revision: 2796 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=2796&view=rev Author: jialva Date: 2011-05-12 11:45:19 +0000 (Thu, 12 May 2011) Log Message: ----------- New update of fuzzyDL-Learner Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java 2011-05-12 10:23:17 UTC (rev 2795) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyDLReasonerManager.java 2011-05-12 11:45:19 UTC (rev 2796) @@ -21,9 +21,9 @@ public class FuzzyDLReasonerManager { - private static final String CHANGING_JUST_HIERARCHI_PROBLEM = "fuzzyOWL2fuzzyDLparserOutput_manual.fuzzyDL.txt"; - private static final String FUZZYOWL2FUZZYDLPARSEROUTPUT = "fuzzyOWL2fuzzyDLparserOutput.fuzzyDL.txt"; - private static String CONFIG_FILENAME = "/Users/josue/Documents/PhD/AKSW/fuzzySemanticTools/FuzzyDLMacOSX/FuzzyDL/CONFIG"; + // private static final String CHANGING_JUST_HIERARCHI_PROBLEM = "../examples/fuzzydll/fuzzyOWL2fuzzyDLparserOutput_manual.fuzzyDL.txt"; + private static final String FUZZYOWL2FUZZYDLPARSEROUTPUT = "../examples/fuzzydll/fuzzyOWL2fuzzyDLparserOutput.fuzzyDL.txt"; + private static String CONFIG_FILENAME = "../examples/fuzzydll/CONFIG"; private Solution queryResult; private KnowledgeBase fuzzyKB; @@ -32,7 +32,7 @@ private FuzzyOwl2toFuzzyDL fuzzyFileParser; private int auxCounter = 0; - private FileOutputStream errorFile; + // private FileOutputStream errorFile; public FuzzyDLReasonerManager(String ontologyFile) throws Exception { queryResult = null; @@ -48,7 +48,7 @@ solveKB(); - errorFile = new FileOutputStream("errorFile.txt"); + // errorFile = new FileOutputStream("errorFile.txt"); } private void solveKB() { @@ -94,17 +94,17 @@ // System.exit(0); } } catch (Exception e) { - // e.printStackTrace(); - try { - errorFile.write(fIndividual.toString().getBytes()); - errorFile.write("\n".getBytes()); - errorFile.write(fConcept.toString().getBytes()); - errorFile.write("\n".getBytes()); - errorFile.write(getStackTrace(e).getBytes()); - } catch (IOException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } + e.printStackTrace(); +// try { +// errorFile.write(fIndividual.toString().getBytes()); +// errorFile.write("\n".getBytes()); +// errorFile.write(fConcept.toString().getBytes()); +// errorFile.write("\n".getBytes()); +// errorFile.write(getStackTrace(e).getBytes()); +// } catch (IOException e1) { +// // TODO Auto-generated catch block +// e1.printStackTrace(); +// } // Scanner sc = new Scanner(System.in); // sc.nextLine(); } Modified: trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java 2011-05-12 10:23:17 UTC (rev 2795) +++ trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java 2011-05-12 11:45:19 UTC (rev 2796) @@ -80,7 +80,7 @@ ComponentManager cm = ComponentManager.getInstance(); OWLFile ks = cm.knowledgeSource(OWLFile.class); - ks.getConfigurator().setUrl(new URL("file:///Users/josue/Documents/PhD/AKSW/ontologies/fuzzyTrains/fuzzyTrains_v1.3.owl")); + ks.getConfigurator().setUrl(new URL("file", null, "../examples/fuzzydll/fuzzyTrains_v1.3.owl")); ks.init(); //ReasonerComponent rc = cm.reasoner(OWLAPIReasoner.class, ks); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ji...@us...> - 2011-06-08 18:10:20
|
Revision: 2857 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=2857&view=rev Author: jialva Date: 2011-06-08 18:10:11 +0000 (Wed, 08 Jun 2011) Log Message: ----------- New update of fuzzyDL-Learner Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java trunk/components-core/src/main/java/org/dllearner/core/ReasonerComponent.java trunk/components-core/src/main/java/org/dllearner/core/fuzzydll/FuzzyIndividualReasoner.java trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_noFuzzyTrains.java Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/refinementoperators/fuzzydll/ trunk/components-core/src/main/java/org/dllearner/refinementoperators/fuzzydll/FuzzyRhoDRDown.java trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains_noFuzzyIndividuals.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java 2011-06-08 12:11:40 UTC (rev 2856) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java 2011-06-08 18:10:11 UTC (rev 2857) @@ -19,7 +19,11 @@ */ package org.dllearner.algorithms.fuzzydll; +import java.io.BufferedWriter; import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.io.PrintWriter; import java.text.DecimalFormat; import java.util.Collection; import java.util.Iterator; @@ -59,6 +63,7 @@ import org.dllearner.refinementoperators.OperatorInverter; import org.dllearner.refinementoperators.RefinementOperator; import org.dllearner.refinementoperators.RhoDRDown; +import org.dllearner.refinementoperators.fuzzydll.FuzzyRhoDRDown; import org.dllearner.utilities.Files; import org.dllearner.utilities.Helper; import org.dllearner.utilities.owl.ConceptComparator; @@ -144,6 +149,10 @@ private int minHorizExp = 0; private int maxHorizExp = 0; + // TODO remove this variable, just for testing purposes + private int counter = 0; + private PrintWriter out; + @Override public FuzzyCELOEConfigurator getConfigurator() { return configurator; @@ -196,6 +205,17 @@ @Override public void init() throws ComponentInitException { + + // TODO remove, just for testing purposes + FileWriter fstream; + try { + fstream = new FileWriter("../examples/fuzzydll/testOut_TriRecEq.log"); + out = new PrintWriter(fstream); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + // copy class hierarchy and modify it such that each class is only // reachable via a single path ClassHierarchy classHierarchy = reasoner.getClassHierarchy().clone(); @@ -210,7 +230,7 @@ singleSuggestionMode = configurator.getSingleSuggestionMode(); // create refinement operator - operator = new RhoDRDown(reasoner, classHierarchy, startClass, configurator); + operator = new FuzzyRhoDRDown(reasoner, classHierarchy, startClass, configurator); baseURI = reasoner.getBaseURI(); prefixes = reasoner.getPrefixes(); if(configurator.getWriteSearchTree()) { @@ -315,7 +335,6 @@ } else if (learningProblem instanceof FuzzyPosNegLP) { examples = Helper.union(((FuzzyPosNegLP)learningProblem).getPositiveExamples(),((FuzzyPosNegLP)learningProblem).getNegativeExamples()); } - } @Override @@ -375,12 +394,15 @@ Monitor mon = MonitorFactory.start("refineNode"); TreeSet<Description> refinements = refineNode(nextNode); mon.stop(); - -// System.out.println("next node: " + nextNode); -// for(Description refinement : refinements) { -// System.out.println("refinement: " + refinement); -// } + // TODO just for testing purposes + counter++; + System.out.println(counter + " next node: " + nextNode); + for(Description refinement : refinements) { + System.out.println("refinement: " + refinement); + } + System.out.println(); + while(refinements.size() != 0) { // pick element from set Description refinement = refinements.pollFirst(); @@ -483,20 +505,18 @@ // add node to search tree if it is not too weak // returns true if node was added and false otherwise private boolean addNode(Description description, FuzzyOENode parentNode) { - -// System.out.println(description); - + // counter++; + // System.out.println(counter + " " + description); + // redundancy check (return if redundant) boolean nonRedundant = descriptions.add(description); if(!nonRedundant) { return false; } - // check whether the description is allowed if(!isDescriptionAllowed(description, parentNode)) { return false; - } - + } // System.out.println("Test " + new Date()); // quality of description (return if too weak) double accuracy = learningProblem.getAccuracyOrTooWeak(description, noise); @@ -544,6 +564,18 @@ // necessary since rewriting is expensive boolean isCandidate = !bestEvaluatedDescriptions.isFull(); if(!isCandidate) { + + // TODO remove, just testing purposes +// Iterator i = bestEvaluatedDescriptions.getSet().iterator(); +// int j = 0; +// out.println(counter + " " + description); +// while (i.hasNext()) { +// j++; +// // System.err.println(j + " -> " + i.next()); +// out.println(j + " -> " + i.next()); +// } +// out.println(); + EvaluatedDescription worst = bestEvaluatedDescriptions.getWorst(); double accThreshold = worst.getAccuracy(); isCandidate = Modified: trunk/components-core/src/main/java/org/dllearner/core/ReasonerComponent.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/ReasonerComponent.java 2011-06-08 12:11:40 UTC (rev 2856) +++ trunk/components-core/src/main/java/org/dllearner/core/ReasonerComponent.java 2011-06-08 18:10:11 UTC (rev 2857) @@ -358,7 +358,32 @@ throws ReasoningMethodUnsupportedException { throw new ReasoningMethodUnsupportedException(); } + + @Override + public final SortedSet<FuzzyIndividual> getFuzzyIndividuals(Description concept) { + reasoningStartTimeTmp = System.nanoTime(); + SortedSet<FuzzyIndividual> result; + try { + result = getFuzzyIndividualsImpl(concept); + } catch (ReasoningMethodUnsupportedException e) { + handleExceptions(e); + return null; + } + nrOfRetrievals++; + reasoningDurationTmp = System.nanoTime() - reasoningStartTimeTmp; + retrievalReasoningTimeNs += reasoningDurationTmp; + overallReasoningTimeNs += reasoningDurationTmp; + if(logger.isTraceEnabled()) { + logger.trace("reasoner query getIndividuals: " + concept + " " + result); + } + return result; + } + protected SortedSet<FuzzyIndividual> getFuzzyIndividualsImpl(Description concept) + throws ReasoningMethodUnsupportedException { + throw new ReasoningMethodUnsupportedException(); + } + @Override public final boolean hasType(Description concept, Individual s) { reasoningStartTimeTmp = System.nanoTime(); Modified: trunk/components-core/src/main/java/org/dllearner/core/fuzzydll/FuzzyIndividualReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/fuzzydll/FuzzyIndividualReasoner.java 2011-06-08 12:11:40 UTC (rev 2856) +++ trunk/components-core/src/main/java/org/dllearner/core/fuzzydll/FuzzyIndividualReasoner.java 2011-06-08 18:10:11 UTC (rev 2857) @@ -1,5 +1,7 @@ package org.dllearner.core.fuzzydll; +import java.util.SortedSet; + import org.dllearner.core.owl.Description; import org.dllearner.core.owl.fuzzydll.FuzzyIndividual; @@ -20,4 +22,5 @@ * @return fuzzy membership degree of <code>individual</code> satisfying <code>description</code> [0-1]. */ public double hasTypeFuzzyMembership(Description description, FuzzyIndividual individual); + public SortedSet<FuzzyIndividual> getFuzzyIndividuals(Description concept); } Modified: trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java 2011-06-08 12:11:40 UTC (rev 2856) +++ trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java 2011-06-08 18:10:11 UTC (rev 2857) @@ -511,7 +511,7 @@ // System.out.println(errorIndex++); - double crispAccuracy = crispAccuracy(description, noise); + // double crispAccuracy = crispAccuracy(description, noise); // if I erase next line, fuzzy reasoning fails // if (crispAccuracy == -1) { // System.out.print(description); @@ -525,20 +525,17 @@ // double posMembership = 0; // double negMembership = 0; double descriptionMembership = 0; - double singleMembership = 0; // double accumulatedSingleMembership = 0; double nonAccumulativeDescriptionMembership = 0; double accumulativeDescriptionMembership = 0; // System.out.println("noise = " + noise); - // TODO in order the noise check to work ... is it necessary to have the examples ordered by its truthBelief? // int individualCounter = fuzzyExamples.size(); double individualCounter = totalTruth; for (FuzzyIndividual fuzzyExample : fuzzyExamples) { - singleMembership = reasoner.hasTypeFuzzyMembership(description, fuzzyExample); // accumulatedSingleMembership += singleMembership; - nonAccumulativeDescriptionMembership = 1 - Math.abs(fuzzyExample.getTruthDegree() - singleMembership); + nonAccumulativeDescriptionMembership = 1 - Math.abs(fuzzyExample.getTruthDegree() - reasoner.hasTypeFuzzyMembership(description, fuzzyExample)); descriptionMembership += nonAccumulativeDescriptionMembership; individualCounter -= fuzzyExample.getTruthDegree(); if ((accumulativeDescriptionMembership + (nonAccumulativeDescriptionMembership * fuzzyExample.getTruthDegree()) + individualCounter) < ((1 - noise) * totalTruth)) @@ -552,15 +549,15 @@ // System.err.println("crispAccuracy = fuzzyAccuracy"); // crispAccuracy = fuzzyAccuracy; - if (crispAccuracy != fuzzyAccuracy) { - System.err.println("***********************************************"); - //System.err.println("* " + (errorIndex++)); - System.err.println("* (crispAccuracy[" + crispAccuracy + "] != fuzzyAccuracy[" + fuzzyAccuracy + "])"); - System.err.println("* DESC: " + description); - System.err.println("***********************************************"); - Scanner sc = new Scanner(System.in); - sc.nextLine(); - } +// if (crispAccuracy != fuzzyAccuracy) { +// System.err.println("***********************************************"); +// //System.err.println("* " + (errorIndex++)); +// System.err.println("* (crispAccuracy[" + crispAccuracy + "] != fuzzyAccuracy[" + fuzzyAccuracy + "])"); +// System.err.println("* DESC: " + description); +// System.err.println("***********************************************"); +// Scanner sc = new Scanner(System.in); +// sc.nextLine(); +// } return fuzzyAccuracy; } @@ -587,7 +584,35 @@ } return (positiveExamples.size() - notCoveredPos + notCoveredNeg) / (double) allExamples.size(); } - + + // added by Josue + private double crispfMeasure(Description description, double noise) { + // crisp F-measure + int additionalInstances = 0; + for(Individual ind : negativeExamples) { + if(reasoner.hasType(description, ind)) { + additionalInstances++; + } + } + + int coveredInstances = 0; + for(Individual ind : positiveExamples) { + if(reasoner.hasType(description, ind)) { + coveredInstances++; + } + } + + double recall = coveredInstances/(double)positiveExamples.size(); + + if(recall < 1 - noise) { + return -1; + } + + double precision = (additionalInstances + coveredInstances == 0) ? 0 : coveredInstances / (double) (coveredInstances + additionalInstances); + + return Heuristics.getFScore(recall, precision); + } + public double getFMeasureOrTooWeakExact(Description description, double noise) { // added by Josue @@ -611,43 +636,19 @@ } double fuzzyPrecision = (coveredMembershipDegree + invertedCoveredMembershipDegree) == 0 ? 0: coveredMembershipDegree / (coveredMembershipDegree + invertedCoveredMembershipDegree); double fuzzyFmeasure = Heuristics.getFScore(fuzzyRecall, fuzzyPrecision); + + // double crispFmeasure = crispfMeasure(description, noise); - // crisp F-measure - int additionalInstances = 0; - for(Individual ind : negativeExamples) { - if(reasoner.hasType(description, ind)) { - additionalInstances++; - } - } - - int coveredInstances = 0; - for(Individual ind : positiveExamples) { - if(reasoner.hasType(description, ind)) { - coveredInstances++; - } - } - - double recall = coveredInstances/(double)positiveExamples.size(); - - if(recall < 1 - noise) { - return -1; - } - - double precision = (additionalInstances + coveredInstances == 0) ? 0 : coveredInstances / (double) (coveredInstances + additionalInstances); - -// return getFMeasure(recall, precision); - double crispFmeasure = Heuristics.getFScore(recall, precision); - // crispFmeasure = fuzzyFmeasure; - if (crispFmeasure != fuzzyFmeasure) { - System.err.println("************************"); - System.err.println("* crispFmeasuer = " + crispFmeasure); - System.err.println("* fuzzyFmeasuer = " + fuzzyFmeasure); - System.err.println("************************"); - Scanner sc = new Scanner(System.in); - sc.nextLine(); - } +// if (crispFmeasure != fuzzyFmeasure) { +// System.err.println("************************"); +// System.err.println("* crispFmeasuer = " + crispFmeasure); +// System.err.println("* fuzzyFmeasuer = " + fuzzyFmeasure); +// System.err.println("************************"); +// Scanner sc = new Scanner(System.in); +// sc.nextLine(); +// } return fuzzyFmeasure; } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java 2011-06-08 12:11:40 UTC (rev 2856) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java 2011-06-08 18:10:11 UTC (rev 2857) @@ -643,6 +643,19 @@ } @Override + public SortedSet<FuzzyIndividual> getFuzzyIndividualsImpl(Description concept) { +// OWLDescription d = getOWLAPIDescription(concept); + OWLClassExpression d = OWLAPIDescriptionConvertVisitor.getOWLClassExpression(concept); + Set<OWLNamedIndividual> individuals = reasoner.getInstances(d, false).getFlattened(); + SortedSet<FuzzyIndividual> inds = new TreeSet<FuzzyIndividual>(); + for(OWLNamedIndividual ind : individuals) + //ugly code + if(ind != null) + inds.add(new FuzzyIndividual(ind.toStringID(), this.hasTypeFuzzyMembershipImpl(concept, new FuzzyIndividual(ind.toStringID(),1)))); + return inds; + } + + @Override public Set<NamedClass> getTypesImpl(Individual individual) { Set<Node<OWLClass>> result = null; Added: trunk/components-core/src/main/java/org/dllearner/refinementoperators/fuzzydll/FuzzyRhoDRDown.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/refinementoperators/fuzzydll/FuzzyRhoDRDown.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/refinementoperators/fuzzydll/FuzzyRhoDRDown.java 2011-06-08 18:10:11 UTC (rev 2857) @@ -0,0 +1,1531 @@ +/** + * Copyright (C) 2007-2008, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package org.dllearner.refinementoperators.fuzzydll; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeMap; +import java.util.TreeSet; +import java.util.Map.Entry; + +import org.apache.log4j.Logger; +import org.dllearner.core.ReasonerComponent; +import org.dllearner.core.configurators.OCELConfigurator; +import org.dllearner.core.configurators.RefinementOperatorConfigurator; +import org.dllearner.core.options.CommonConfigOptions; +import org.dllearner.core.owl.BooleanValueRestriction; +import org.dllearner.core.owl.ClassHierarchy; +import org.dllearner.core.owl.Constant; +import org.dllearner.core.owl.DataRange; +import org.dllearner.core.owl.DatatypeProperty; +import org.dllearner.core.owl.DatatypeSomeRestriction; +import org.dllearner.core.owl.Description; +import org.dllearner.core.owl.DoubleMaxValue; +import org.dllearner.core.owl.DoubleMinValue; +import org.dllearner.core.owl.Individual; +import org.dllearner.core.owl.Intersection; +import org.dllearner.core.owl.NamedClass; +import org.dllearner.core.owl.Negation; +import org.dllearner.core.owl.Nothing; +import org.dllearner.core.owl.ObjectAllRestriction; +import org.dllearner.core.owl.ObjectCardinalityRestriction; +import org.dllearner.core.owl.ObjectMaxCardinalityRestriction; +import org.dllearner.core.owl.ObjectMinCardinalityRestriction; +import org.dllearner.core.owl.ObjectProperty; +import org.dllearner.core.owl.ObjectPropertyExpression; +import org.dllearner.core.owl.ObjectQuantorRestriction; +import org.dllearner.core.owl.ObjectSomeRestriction; +import org.dllearner.core.owl.ObjectValueRestriction; +import org.dllearner.core.owl.StringValueRestriction; +import org.dllearner.core.owl.Thing; +import org.dllearner.core.owl.Union; +import org.dllearner.core.owl.fuzzydll.FuzzyIndividual; +import org.dllearner.refinementoperators.MathOperations; +import org.dllearner.refinementoperators.RefinementOperatorAdapter; +import org.dllearner.refinementoperators.Utility; +import org.dllearner.utilities.Helper; +import org.dllearner.utilities.owl.ConceptComparator; +import org.dllearner.utilities.owl.ConceptTransformation; + +/** + * A downward refinement operator, which makes use of domains + * and ranges of properties. The operator is currently under + * development. Its aim is to span a much "cleaner" and smaller search + * tree compared to RhoDown by omitting many class descriptions, + * which are obviously too weak, because they violate + * domain/range restrictions. Furthermore, it makes use of disjoint + * classes in the knowledge base. + * + * TODO Some of the code has moved to {@link Utility} in a modified + * form to make it accessible for implementations of other refinement + * operators. These utility methods may be completed and carefully + * integrated back later. + * + * @author Jens Lehmann + * + */ +public class FuzzyRhoDRDown extends RefinementOperatorAdapter { + + private static Logger logger = Logger + .getLogger(FuzzyRhoDRDown.class); + + private ReasonerComponent rs; + + // hierarchies + private ClassHierarchy subHierarchy; + + // domains and ranges + private Map<ObjectProperty,Description> opDomains = new TreeMap<ObjectProperty,Description>(); + private Map<DatatypeProperty,Description> dpDomains = new TreeMap<DatatypeProperty,Description>(); + private Map<ObjectProperty,Description> opRanges = new TreeMap<ObjectProperty,Description>(); + + // maximum number of fillers for eeach role + private Map<ObjectProperty,Integer> maxNrOfFillers = new TreeMap<ObjectProperty,Integer>(); + // limit for cardinality restrictions (this makes sense if we e.g. have compounds with up to + // more than 200 atoms but we are only interested in atoms with certain characteristics and do + // not want something like e.g. >= 204 hasAtom.NOT Carbon-87; which blows up the search space + private int cardinalityLimit = 5; + + // start concept (can be used to start from an arbitrary concept, needs + // to be Thing or NamedClass), note that when you use e.g. Compound as + // start class, then the algorithm should start the search with class + // Compound (and not with Thing), because otherwise concepts like + // NOT Carbon-87 will be returned which itself is not a subclass of Compound + private Description startClass = new Thing(); + + // the length of concepts of top refinements, the first values is + // for refinements of \rho_\top(\top), the second one for \rho_A(\top) + private int topRefinementsLength = 0; + private Map<NamedClass, Integer> topARefinementsLength = new TreeMap<NamedClass, Integer>(); + // M is finite and this value is the maximum length of any value in M + private static int mMaxLength = 4; + + // the sets M_\top and M_A + private Map<Integer,SortedSet<Description>> m = new TreeMap<Integer,SortedSet<Description>>(); + private Map<NamedClass,Map<Integer,SortedSet<Description>>> mA = new TreeMap<NamedClass,Map<Integer,SortedSet<Description>>>(); + + // @see MathOperations.getCombos + private Map<Integer, List<List<Integer>>> combos = new HashMap<Integer, List<List<Integer>>>(); + + // refinements of the top concept ordered by length + private Map<Integer, SortedSet<Description>> topRefinements = new TreeMap<Integer, SortedSet<Description>>(); + private Map<NamedClass,Map<Integer, SortedSet<Description>>> topARefinements = new TreeMap<NamedClass,Map<Integer, SortedSet<Description>>>(); + + // cumulated refinements of top (all from length one to the specified length) + private Map<Integer, TreeSet<Description>> topRefinementsCumulative = new HashMap<Integer, TreeSet<Description>>(); + private Map<NamedClass,Map<Integer, TreeSet<Description>>> topARefinementsCumulative = new TreeMap<NamedClass,Map<Integer, TreeSet<Description>>>(); + + // app_A set of applicable properties for a given class (separate for + // object properties, boolean datatypes, and double datatypes) + private Map<NamedClass, Set<ObjectProperty>> appOP = new TreeMap<NamedClass, Set<ObjectProperty>>(); + private Map<NamedClass, Set<DatatypeProperty>> appBD = new TreeMap<NamedClass, Set<DatatypeProperty>>(); + private Map<NamedClass, Set<DatatypeProperty>> appDD = new TreeMap<NamedClass, Set<DatatypeProperty>>(); + + // most general applicable properties + private Map<NamedClass,Set<ObjectProperty>> mgr = new TreeMap<NamedClass,Set<ObjectProperty>>(); + private Map<NamedClass,Set<DatatypeProperty>> mgbd = new TreeMap<NamedClass,Set<DatatypeProperty>>(); + private Map<NamedClass,Set<DatatypeProperty>> mgdd = new TreeMap<NamedClass,Set<DatatypeProperty>>(); + private Map<NamedClass,Set<DatatypeProperty>> mgsd = new TreeMap<NamedClass,Set<DatatypeProperty>>(); + + // concept comparator + private ConceptComparator conceptComparator = new ConceptComparator(); + + // splits for double datatype properties in ascening order + private Map<DatatypeProperty,List<Double>> splits = new TreeMap<DatatypeProperty,List<Double>>(); + private int maxNrOfSplits = 10; + + // data structure for a simple frequent pattern matching preprocessing phase + private int frequencyThreshold = CommonConfigOptions.valueFrequencyThresholdDefault; + private Map<ObjectProperty, Map<Individual, Integer>> valueFrequency = new HashMap<ObjectProperty, Map<Individual, Integer>>(); + // data structure with identified frequent values + private Map<ObjectProperty, Set<Individual>> frequentValues = new HashMap<ObjectProperty, Set<Individual>>(); + // frequent data values + private Map<DatatypeProperty, Set<Constant>> frequentDataValues = new HashMap<DatatypeProperty, Set<Constant>>(); + private Map<DatatypeProperty, Map<Constant, Integer>> dataValueFrequency = new HashMap<DatatypeProperty, Map<Constant, Integer>>(); + private boolean useDataHasValueConstructor = false; + + // staistics + public long mComputationTimeNs = 0; + public long topComputationTimeNs = 0; + + private boolean applyAllFilter = true; + private boolean applyExistsFilter = true; + private boolean useAllConstructor = true; + private boolean useExistsConstructor = true; + private boolean useHasValueConstructor = false; + private boolean useCardinalityRestrictions = true; + private boolean useNegation = true; + private boolean useBooleanDatatypes = true; + private boolean useDoubleDatatypes = true; + @SuppressWarnings("unused") + private boolean useStringDatatypes = false; + private boolean disjointChecks = true; + private boolean instanceBasedDisjoints = true; + + private boolean dropDisjuncts = false; + + // caches for reasoner queries + private Map<Description,Map<Description,Boolean>> cachedDisjoints = new TreeMap<Description,Map<Description,Boolean>>(conceptComparator); + +// private Map<NamedClass,Map<NamedClass,Boolean>> abDisjoint = new TreeMap<NamedClass,Map<NamedClass,Boolean>>(); +// private Map<NamedClass,Map<NamedClass,Boolean>> notABDisjoint = new TreeMap<NamedClass,Map<NamedClass,Boolean>>(); +// private Map<NamedClass,Map<NamedClass,Boolean>> notABMeaningful = new TreeMap<NamedClass,Map<NamedClass,Boolean>>(); + + public FuzzyRhoDRDown(ReasonerComponent reasoningService) { +// this(reasoningService, reasoningService.getClassHierarchy(), null, true, true, true, true, true, 3, true, true, true, true, null); + this.rs = reasoningService; + this.subHierarchy = rs.getClassHierarchy(); + init(); + } + + public FuzzyRhoDRDown(ReasonerComponent reasoner, ClassHierarchy subHierarchy, Description startClass, RefinementOperatorConfigurator configurator) { + this.rs = reasoner; + this.subHierarchy = subHierarchy; + this.startClass = startClass; + useAllConstructor = configurator.getUseAllConstructor(); + useExistsConstructor = configurator.getUseExistsConstructor(); + useHasValueConstructor = configurator.getUseHasValueConstructor(); + useDataHasValueConstructor = configurator.getUseDataHasValueConstructor(); + frequencyThreshold = configurator.getValueFrequencyThreshold(); + useCardinalityRestrictions = configurator.getUseCardinalityRestrictions(); + cardinalityLimit = configurator.getCardinalityLimit(); + useNegation = configurator.getUseNegation(); + useBooleanDatatypes = configurator.getUseBooleanDatatypes(); + useDoubleDatatypes = configurator.getUseDoubleDatatypes(); + useStringDatatypes = configurator.getUseStringDatatypes(); + init(); + } + + // TODO constructor which takes a RhoDRDownConfigurator object; + // this should be an interface implemented e.g. by ExampleBasedROLComponentConfigurator; + // the goal is to use the configurator system while still being flexible enough to + // use one refinement operator in several learning algorithms + public FuzzyRhoDRDown(ReasonerComponent reasoningService, ClassHierarchy subHierarchy, OCELConfigurator configurator, boolean applyAllFilter, boolean applyExistsFilter, boolean useAllConstructor, + boolean useExistsConstructor, boolean useHasValueConstructor, int valueFrequencyThreshold, boolean useCardinalityRestrictions,boolean useNegation, boolean useBooleanDatatypes, boolean useDoubleDatatypes, NamedClass startClass) { + this.rs = reasoningService; + this.subHierarchy = subHierarchy; + this.applyAllFilter = applyAllFilter; + this.applyExistsFilter = applyExistsFilter; + this.useAllConstructor = useAllConstructor; + this.useExistsConstructor = useExistsConstructor; + this.useHasValueConstructor = useHasValueConstructor; + this.frequencyThreshold = valueFrequencyThreshold; + this.useCardinalityRestrictions = useCardinalityRestrictions; + cardinalityLimit = configurator.getCardinalityLimit(); + this.useDataHasValueConstructor = configurator.getUseDataHasValueConstructor(); + this.useNegation = useNegation; + this.useBooleanDatatypes = useBooleanDatatypes; + this.useDoubleDatatypes = useDoubleDatatypes; + useStringDatatypes = configurator.getUseStringDatatypes(); + instanceBasedDisjoints = configurator.getInstanceBasedDisjoints(); + if(startClass != null) { + this.startClass = startClass; + } + init(); + } + +// subHierarchy = rs.getClassHierarchy(); + public void init() { + // query reasoner for domains and ranges + // (because they are used often in the operator) + for(ObjectProperty op : rs.getObjectProperties()) { + opDomains.put(op, rs.getDomain(op)); + opRanges.put(op, rs.getRange(op)); + + if(useHasValueConstructor) { + // init + Map<Individual, Integer> opMap = new TreeMap<Individual, Integer>(); + valueFrequency.put(op, opMap); + + // sets ordered by corresponding individual (which we ignore) + Collection<SortedSet<Individual>> fillerSets = rs.getPropertyMembers(op).values(); + for(SortedSet<Individual> fillerSet : fillerSets) { + for(Individual i : fillerSet) { +// System.out.println("op " + op + " i " + i); + Integer value = opMap.get(i); + + if(value != null) { + opMap.put(i, value+1); + } else { + opMap.put(i, 1); + } + } + } + + // keep only frequent patterns + Set<Individual> frequentInds = new TreeSet<Individual>(); + for(Individual i : opMap.keySet()) { + if(opMap.get(i) >= frequencyThreshold) { + frequentInds.add(i); +// break; + } + } + frequentValues.put(op, frequentInds); + + } + + } + + for(DatatypeProperty dp : rs.getDatatypeProperties()) { + dpDomains.put(dp, rs.getDomain(dp)); + + if(useDataHasValueConstructor) { + Map<Constant, Integer> dpMap = new TreeMap<Constant, Integer>(); + dataValueFrequency.put(dp, dpMap); + + // sets ordered by corresponding individual (which we ignore) + Collection<SortedSet<Constant>> fillerSets = rs.getDatatypeMembers(dp).values(); + for(SortedSet<Constant> fillerSet : fillerSets) { + for(Constant i : fillerSet) { +// System.out.println("op " + op + " i " + i); + Integer value = dpMap.get(i); + + if(value != null) { + dpMap.put(i, value+1); + } else { + dpMap.put(i, 1); + } + } + } + + // keep only frequent patterns + Set<Constant> frequentInds = new TreeSet<Constant>(); + for(Constant i : dpMap.keySet()) { + if(dpMap.get(i) >= frequencyThreshold) { + logger.trace("adding value "+i+", because "+dpMap.get(i) +">="+frequencyThreshold); + frequentInds.add(i); + } + } + frequentDataValues.put(dp, frequentInds); + } + } + + // we do not need the temporary set anymore and let the + // garbage collector take care of it + valueFrequency = null; + dataValueFrequency = null; + + // compute splits for double datatype properties + for(DatatypeProperty dp : rs.getDoubleDatatypeProperties()) { + computeSplits(dp); + } + + // determine the maximum number of fillers for each role + // (up to a specified cardinality maximum) + if(useCardinalityRestrictions) { + for(ObjectProperty op : rs.getObjectProperties()) { + int maxFillers = 0; + Map<Individual,SortedSet<Individual>> opMembers = rs.getPropertyMembers(op); + for(SortedSet<Individual> inds : opMembers.values()) { + if(inds.size()>maxFillers) + maxFillers = inds.size(); + if(maxFillers >= cardinalityLimit) { + maxFillers = cardinalityLimit; + break; + } + } + maxNrOfFillers.put(op, maxFillers); + } + } + + /* + String conceptStr = "(\"http://dl-learner.org/carcinogenesis#Compound\" AND (>= 2 \"http://dl-learner.org/carcinogenesis#hasStructure\".\"http://dl-learner.org/carcinogenesis#Ar_halide\" OR ((\"http://dl-learner.org/carcinogenesis#amesTestPositive\" IS TRUE) AND >= 5 \"http://dl-learner.org/carcinogenesis#hasBond\". TOP)))"; + try { + NamedClass struc = new NamedClass("http://dl-learner.org/carcinogenesis#Compound"); + Description d = KBParser.parseConcept(conceptStr); + SortedSet<Description> ds = (SortedSet<Description>) refine(d,15,null,struc); + System.out.println(ds); + + Individual i = new Individual("http://dl-learner.org/carcinogenesis#d101"); + rs.instanceCheck(ds.first(), i); + + } catch (ParseException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + System.exit(0); + */ + + /* + NamedClass struc = new NamedClass("http://dl-learner.org/carcinogenesis#Atom"); + ObjectProperty op = new ObjectProperty("http://dl-learner.org/carcinogenesis#hasAtom"); + ObjectSomeRestriction oar = new ObjectSomeRestriction(op,Thing.instance); + + Set<Description> ds = refine(Thing.instance,3,null,struc); +// Set<Description> improper = new HashSet<Description>(); + for(Description d : ds) { +// if(rs.subsumes(d, struc)) { +// improper.add(d); + System.out.println(d); +// } + } + System.out.println(ds.size()); +// System.out.println(improper.size()); + System.exit(0); + */ + + } + + /* (non-Javadoc) + * @see org.dllearner.algorithms.refinement.RefinementOperator#refine(org.dllearner.core.owl.Description) + */ + @Override + public Set<Description> refine(Description concept) { + throw new RuntimeException(); + } + + @Override + public Set<Description> refine(Description description, int maxLength) { + // check that maxLength is valid + if(maxLength < description.getLength()) { + throw new Error("length has to be at least description length (description: " + description + ", max length: " + maxLength + ")"); + } + return refine(description, maxLength, null, startClass); + } + + /* (non-Javadoc) + * @see org.dllearner.algorithms.refinement.RefinementOperator#refine(org.dllearner.core.owl.Description, int, java.util.List) + */ + @Override + public Set<Description> refine(Description description, int maxLength, + List<Description> knownRefinements) { + return refine(description, maxLength, knownRefinements, startClass); + } + + @SuppressWarnings({"unchecked"}) + public Set<Description> refine(Description description, int maxLength, + List<Description> knownRefinements, Description currDomain) { + +// System.out.println("|- " + description + " " + currDomain + " " + maxLength); + + // actions needing to be performed if this is the first time the + // current domain is used + if(!(currDomain instanceof Thing) && !topARefinementsLength.containsKey(currDomain)) + topARefinementsLength.put((NamedClass)currDomain, 0); + + // check whether using list or set makes more sense + // here; and whether HashSet or TreeSet should be used + // => TreeSet because duplicates are possible + Set<Description> refinements = new TreeSet<Description>(conceptComparator); + + // used as temporary variable + Set<Description> tmp = new HashSet<Description>(); + + if(description instanceof Thing) { + // extends top refinements if necessary + if(currDomain instanceof Thing) { + if(maxLength>topRefinementsLength) + computeTopRefinements(maxLength); + refinements = (TreeSet<Description>) topRefinementsCumulative.get(maxLength).clone(); + } else { + if(maxLength>topARefinementsLength.get(currDomain)) { + computeTopRefinements(maxLength, (NamedClass) currDomain); + } + refinements = (TreeSet<Description>) topARefinementsCumulative.get(currDomain).get(maxLength).clone(); + } +// refinements.addAll(subHierarchy.getMoreSpecialConcepts(description)); + } else if(description instanceof Nothing) { + // cannot be further refined + } else if(description instanceof NamedClass) { + refinements.addAll(subHierarchy.getSubClasses(description)); + refinements.remove(new Nothing()); + } else if (description instanceof Negation && description.getChild(0) instanceof NamedClass) { + + tmp = subHierarchy.getSuperClasses(description.getChild(0)); + + for(Description c : tmp) { + if(!(c instanceof Thing)) + refinements.add(new Negation(c)); + } + + } else if (description instanceof Intersection) { + + // refine one of the elements + for(Description child : description.getChildren()) { + + // refine the child; the new max length is the current max length minus + // the currently considered concept plus the length of the child + // TODO: add better explanation + tmp = refine(child, maxLength - description.getLength()+child.getLength(),null,currDomain); + + // create new intersection + for(Description c : tmp) { + List<Description> newChildren = (List<Description>)((LinkedList<Description>)description.getChildren()).clone(); + newChildren.add(c); + newChildren.remove(child); + Intersection mc = new Intersection(newChildren); + + // clean concept and transform it to ordered negation normal form + // (non-recursive variant because only depth 1 was modified) + ConceptTransformation.cleanConceptNonRecursive(mc); + ConceptTransformation.transformToOrderedNegationNormalFormNonRecursive(mc, conceptComparator); + + // check whether the intersection is OK (sanity checks), then add it + if(checkIntersection(mc)) + refinements.add(mc); + } + + } + + } else if (description instanceof Union) { + // refine one of the elements + for(Description child : description.getChildren()) { + +// System.out.println("union child: " + child + " " + maxLength + " " + description.getLength() + " " + child.getLength()); + + // refine child + tmp = refine(child, maxLength - description.getLength()+child.getLength(),null,currDomain); + + // construct intersection (see above) + for(Description c : tmp) { + List<Description> newChildren = new LinkedList<Description>(description.getChildren()); + newChildren.remove(child); + newChildren.add(c); + Union md = new Union(newChildren); + + // transform to ordered negation normal form + ConceptTransformation.transformToOrderedNegationNormalFormNonRecursive(md, conceptComparator); + // note that we do not have to call clean here because a disjunction will + // never be nested in another disjunction in this operator + + refinements.add(md); + } + + } + + // if enabled, we can remove elements of the disjunction + if(dropDisjuncts) { + // A1 OR A2 => {A1,A2} + if(description.getChildren().size() == 2) { + refinements.add(description.getChild(0)); + refinements.add(description.getChild(1)); + } else { + // copy children list and remove a different element in each turn + for(int i=0; i<description.getChildren().size(); i++) { + List<Description> newChildren = new LinkedList<Description>(description.getChildren()); + newChildren.remove(i); + Union md = new Union(newChildren); + refinements.add(md); + } + } + } + + } else if (description instanceof ObjectSomeRestriction) { + ObjectPropertyExpression role = ((ObjectQuantorRestriction)description).getRole(); + Description range = opRanges.get(role); + + // rule 1: EXISTS r.D => EXISTS r.E + tmp = refine(description.getChild(0), maxLength-2, null, range); + + for(Description c : tmp) + refinements.add(new ObjectSomeRestriction(((ObjectQuantorRestriction)description).getRole(),c)); + + // rule 2: EXISTS r.D => EXISTS s.D or EXISTS r^-1.D => EXISTS s^-1.D + // currently inverse roles are not supported + ObjectProperty ar = (ObjectProperty) role; + Set<ObjectProperty> moreSpecialRoles = rs.getSubProperties(ar); + for(ObjectProperty moreSpecialRole : moreSpecialRoles) + refinements.add(new ObjectSomeRestriction(moreSpecialRole, description.getChild(0))); + + // rule 3: EXISTS r.D => >= 2 r.D + // (length increases by 1 so we have to check whether max length is sufficient) + if(useCardinalityRestrictions) { + if(maxLength > description.getLength() && maxNrOfFillers.get(ar)>1) { + ObjectMinCardinalityRestriction min = new ObjectMinCardinalityRestriction(2,role,description.getChild(0)); + refinements.add(min); + } + } + + // rule 4: EXISTS r.TOP => EXISTS r.{value} + if(useHasValueConstructor && description.getChild(0) instanceof Thing) { + // watch out for frequent patterns + Set<Individual> frequentInds = frequentValues.get(role); + if(frequentInds != null) { + for(Individual ind : frequentInds) { + ObjectValueRestriction ovr = new ObjectValueRestriction((ObjectProperty)role, ind); + refinements.add(ovr); + } + } + } + + } else if (description instanceof ObjectAllRestriction) { + ObjectPropertyExpression role = ((ObjectQuantorRestriction)description).getRole(); + Description range = opRanges.get(role); + + // rule 1: ALL r.D => ALL r.E + tmp = refine(description.getChild(0), maxLength-2, null, range); + + for(Description c : tmp) { + refinements.add(new ObjectAllRestriction(((ObjectQuantorRestriction)description).getRole(),c)); + } + + // rule 2: ALL r.D => ALL r.BOTTOM if D is a most specific atomic concept + if(description.getChild(0) instanceof NamedClass && tmp.size()==0) { + refinements.add(new ObjectAllRestriction(((ObjectQuantorRestriction)description).getRole(),new Nothing())); + } + + // rule 3: ALL r.D => ALL s.D or ALL r^-1.D => ALL s^-1.D + // currently inverse roles are not supported + ObjectProperty ar = (ObjectProperty) role; + Set<ObjectProperty> moreSpecialRoles = rs.getSubProperties(ar); + for(ObjectProperty moreSpecialRole : moreSpecialRoles) { + refinements.add(new ObjectAllRestriction(moreSpecialRole, description.getChild(0))); + } + + // rule 4: ALL r.D => <= (maxFillers-1) r.D + // (length increases by 1 so we have to check whether max length is sufficient) + // => commented out because this is acutally not a downward refinement +// if(useCardinalityRestrictions) { +// if(maxLength > description.getLength() && maxNrOfFillers.get(ar)>1) { +// ObjectMaxCardinalityRestriction max = new ObjectMaxCardinalityRestriction(maxNrOfFillers.get(ar)-1,role,description.getChild(0)); +// refinements.add(max); +// } +// } + } else if (description instanceof ObjectCardinalityRestriction) { + ObjectPropertyExpression role = ((ObjectCardinalityRestriction)description).getRole(); + Description range = opRanges.get(role); + int number = ((ObjectCardinalityRestriction)description).getCardinality(); + if(description instanceof ObjectMaxCardinalityRestriction) { + // rule 1: <= x r.C => <= x r.D + tmp = refine(description.getChild(0), maxLength-3, null, range); + + for(Description d : tmp) { + refinements.add(new ObjectMaxCardinalityRestriction(number,role,d)); + } + + // rule 2: <= x r.C => <= (x-1) r.C + ObjectMaxCardinalityRestriction max = (ObjectMaxCardinalityRestriction) description; +// int number = max.getNumber(); + if(number > 1) + refinements.add(new ObjectMaxCardinalityRestriction(number-1,max.getRole(),max.getChild(0))); + + } else if(description instanceof ObjectMinCardinalityRestriction) { + tmp = refine(description.getChild(0), maxLength-3, null, range); + + for(Description d : tmp) { + refinements.add(new ObjectMinCardinalityRestriction(number,role,d)); + } + + // >= x r.C => >= (x+1) r.C + ObjectMinCardinalityRestriction min = (ObjectMinCardinalityRestriction) description; +// int number = min.getNumber(); + if(number < maxNrOfFillers.get(min.getRole())) + refinements.add(new ObjectMinCardinalityRestriction(number+1,min.getRole(),min.getChild(0))); + } + } else if (description instanceof DatatypeSomeRestriction) { + + DatatypeSomeRestriction dsr = (DatatypeSomeRestriction) description; + DatatypeProperty dp = (DatatypeProperty) dsr.getRestrictedPropertyExpression(); + DataRange dr = dsr.getDataRange(); + if(dr instanceof DoubleMaxValue) { + double value = ((DoubleMaxValue)dr).getValue(); + // find out which split value was used + int splitIndex = splits.get(dp).lastIndexOf(value); + if(splitIndex == -1) + throw new Error("split error"); + int newSplitIndex = splitIndex - 1; + if(newSplitIndex >= 0) { + DoubleMaxValue max = new DoubleMaxValue(splits.get(dp).get(newSplitIndex)); + DatatypeSomeRestriction newDSR = new DatatypeSomeRestriction(dp,max); + refinements.add(newDSR); +// System.out.println(description + " => " + newDSR); + } + } else if(dr instanceof DoubleMinValue) { + double value = ((DoubleMinValue)dr).getValue(); + // find out which split value was used + int splitIndex = splits.get(dp).lastIndexOf(value); + if(splitIndex == -1) + throw new Error("split error"); + int newSplitIndex = splitIndex + 1; + if(newSplitIndex < splits.get(dp).size()) { + DoubleMinValue min = new DoubleMinValue(splits.get(dp).get(newSplitIndex)); + DatatypeSomeRestriction newDSR = new DatatypeSomeRestriction(dp,min); + refinements.add(newDSR); + } + } + } else if (description instanceof StringValueRestriction) { + StringValueRestriction svr = (StringValueRestriction) description; + DatatypeProperty dp = svr.getRestrictedPropertyExpression(); + Set<DatatypeProperty> subDPs = rs.getSubProperties(dp); + for(DatatypeProperty subDP : subDPs) { + refinements.add(new StringValueRestriction(subDP, svr.getStringValue())); + } + } + + // if a refinement is not Bottom, Top, ALL r.Bottom a refinement of top can be appended + if(!(description instanceof Thing) && !(description instanceof Nothing) + && !(description instanceof ObjectAllRestriction && description.getChild(0) instanceof Nothing)) { + // -1 because of the AND symbol which is appended + int topRefLength = maxLength - description.getLength() - 1; + + // maybe we have to compute new top refinements here + if(currDomain instanceof Thing) { + if(topRefLength > topRefinementsLength) + computeTopRefinements(topRefLength); + } else if(topRefLength > topARefinementsLength.get(currDomain)) + computeTopRefinements(topRefLength,(NamedClass)currDomain); + + if(topRefLength>0) { + Set<Description> topRefs; + if(currDomain instanceof Thing) + topRefs = topRefinementsCumulative.get(topRefLength); + else + topRefs = topARefinementsCumulative.get(currDomain).get(topRefLength); + + for(Description c : topRefs) { + // true if refinement should be skipped due to filters, + // false otherwise + boolean skip = false; + + // if a refinement of of the form ALL r, we check whether ALL r + // does not occur already + if(applyAllFilter) { + if(c instanceof ObjectAllRestriction) { + for(Description child : description.getChildren()) { + if(child instanceof ObjectAllRestriction) { + ObjectPropertyExpression r1 = ((ObjectAllRestriction)c).getRole(); + ObjectPropertyExpression r2 = ((ObjectAllRestriction)child).getRole(); + if(r1.toString().equals(r2.toString())) + skip = true; + } + } + } + } + + // check for double datatype properties + /* + if(c instanceof DatatypeSomeRestriction && + description instanceof DatatypeSomeRestriction) { + DataRange dr = ((DatatypeSomeRestriction)c).getDataRange(); + DataRange dr2 = ((DatatypeSomeRestriction)description).getDataRange(); + // it does not make sense to have statements like height >= 1.8 AND height >= 1.7 + if((dr instanceof DoubleMaxValue && dr2 instanceof DoubleMaxValue) + ||(dr instanceof DoubleMinValue && dr2 instanceof DoubleMinValue)) + skip = true; + }*/ + + // perform a disjointness check when named classes are added; + // this can avoid a lot of superfluous computation in the algorithm e.g. + // when A1 looks good, so many refinements of the form (A1 OR (A2 AND A3)) + // are generated which are all equal to A1 due to disjointness of A2 and A3 + if(disjointChecks && c instanceof NamedClass && description instanceof NamedClass && isDisjoint(description, c)) { + skip = true; +// System.out.println(c + " ignored when refining " + description); + } + + if(!skip) { + Intersection mc = new Intersection(); + mc.addChild(description); + mc.addChild(c); + + // clean and transform to ordered negation normal form + ConceptTransformation.cleanConceptNonRecursive(mc); + ConceptTransformation.transformToOrderedNegationNormalFormNonRecursive(mc, conceptComparator); + + // last check before intersection is added + if(checkIntersection(mc)) + refinements.add(mc); + } + } + } + } + +// for(Description refinement : refinements) { +// if((refinement instanceof Intersection || refinement instanceof Union) && refinement.getChildren().size()<2) { +// System.out.println(description + " " + refinement + " " + currDomain + " " + maxLength); +// System.exit(0); +// } +// } + + return refinements; + } + + // when a child of an intersection is refined and reintegrated into the + // intersection, we can perform some sanity checks; + // method returns true if everything is OK and false otherwise + // TODO: can be implemented more efficiently if the newly added child + // is given as parameter + public static boolean checkIntersection(Intersection intersection) { + // rule 1: max. restrictions at most once + boolean maxDoubleOccurence = false; + // rule 2: min restrictions at most once + boolean minDoubleOccurence = false; + // rule 3: no double occurences of boolean datatypes + TreeSet<DatatypeProperty> occuredDP = new TreeSet<DatatypeProperty>(); + // rule 4: no double occurences of hasValue restrictions + TreeSet<ObjectProperty> occuredVR = new TreeSet<ObjectProperty>(); + + for(Description child : intersection.getChildren()) { + if(child instanceof DatatypeSomeRestriction) { + DataRange dr = ((DatatypeSomeRestriction)child).getDataRange(); + if(dr instanceof DoubleMaxValue) { + if(maxDoubleOccurence) + return false; + else + maxDoubleOccurence = true; + } else if(dr instanceof DoubleMinValue) { + if(minDoubleOccurence) + return false; + else + minDoubleOccurence = true; + } + } else if(child instanceof BooleanValueRestriction) { + DatatypeProperty dp = (DatatypeProperty) ((BooleanValueRestriction)child).getRestrictedPropertyExpression(); +// System.out.println("dp: " + dp); + // return false if the boolean property exists already + if(!occuredDP.add(dp)) + return false; + } else if(child instanceof ObjectValueRestriction) { + ObjectProperty op = (ObjectProperty) ((ObjectValueRestriction)child).getRestrictedPropertyExpression(); + if(!occuredVR.add(op)) + return false; + } +// System.out.println(child.getClass()); + } + return true; + } + + /** + * By default, the operator does not specialize e.g. (A or B) to A, because + * it only guarantees weak completeness. Under certain circumstances, e.g. + * refinement of a fixed given concept, it can be useful to allow such + * refinements, which can be done by passing the parameter true to this method. + * @param dropDisjuncts Whether to remove disjuncts in refinement process. + */ + public void setDropDisjuncts(boolean dropDisjuncts) { + this.dropDisjuncts = dropDisjuncts; + } + + private void computeTopRefinements(int maxLength) { + computeTopRefinements(maxLength, null); + } + + private void computeTopRefinements(int maxLength, NamedClass domain) { + long topComputationTimeStartNs = System.nanoTime(); + + if(domain == null && m.size() == 0) + computeM(); + + if(domain != null && !mA.containsKey(domain)) + computeM(domain); + + int refinementsLength; + + if(domain == null) { + refinementsLength = topRefinementsLength; + } else { + if(!topARefinementsLength.containsKey(domain)) + topARefinementsLength.put(domain,0); + + refinementsLength = topARefinementsLength.get(domain); + } + + // compute all possible combinations of the disjunction + for(int i = refinementsLength+1; i <= maxLength; i++) { + combos.put(i,MathOperations.getCombos(i, mMaxLength)); + + // initialise the refinements with empty sets + if(domain == null) { + topRefinements.put(i, new TreeSet<Description>(conceptComparator)); + } else { + if(!topARefinements.containsKey(domain)) + topARefinements.put(domain, new TreeMap<Integer,SortedSet<Description>>()); + topARefinements.get(domain).put(i, new TreeSet<Description>(conceptComparator)); + } + + for(List<Integer> combo : combos.get(i)) { + + // combination is a single number => try to use M + if(combo.size()==1) { + // note we cannot use "put" instead of "addAll" because there + // can be several combos for one length + if(domain == null) + topRefinements.get(i).addAll(m.get(i)); + else + topARefinements.get(domain).get(i).addAll(mA.get(domain).get(i)); + // combinations has several numbers => generate disjunct + } else { + + // check whether the combination makes sense, i.e. whether + // all lengths mentioned in it have corresponding elements + // e.g. when negation is deactivated there won't be elements of + // length 2 in M + boolean validCombo = true; + for(Integer j : combo) { + if((domain == null && m.get(j).size()==0) || + (domain != null && mA.get(domain).get(j).size()==0)) + validCombo = false; + } + + if(validCombo) { + + SortedSet<Union> baseSet = new TreeSet<Union>(conceptComparator); + for(Integer j : combo) { + if(domain == null) + baseSet = MathOperations.incCrossProduct(baseSet, m.get(j)); + else + baseSet = MathOperations.incCrossProduct(baseSet, mA.get(domain).get(j)); + } + + // convert all concepts in ordered negation normal form + for(Description concept : baseSet) { + ConceptTransformation.transformToOrderedForm(concept, conceptComparator); + } + + // apply the exists filter (throwing out all refinements with + // double \exists r for any r) + // TODO: similar filtering can be done for boolean datatype + // properties + if(applyExistsFilter) { + Iterator<Union> it = baseSet.iterator(); + while(it.hasNext()) { + if(MathOperations.containsDoubleObjectSomeRestriction(it.next())) + it.remove(); + } + } + + // add computed refinements + if(domain == null) + topRefinements.get(i).addAll(baseSet); + else + topARefinements.get(domain).get(i).addAll(baseSet); + + } + } + } + + // create cumulative versions of refinements such that they can + // be accessed easily + TreeSet<Description> cumulativeRefinements = new TreeSet<Description>(conceptComparator); + for(int j=1; j<=i; j++) { + if(domain == null) { + cumulativeRefinements.addAll(topRefinements.get(j)); + } else { + cumulativeRefinements.addAll(topARefinements.get(domain).get(j)); + } + } + + if(domain == null) { + topRefinementsCumulative.put(i, cumulativeRefinements); + } else { + if(!topARefinementsCumulative.containsKey(domain)) + topARefinementsCumulative.put(domain, new TreeMap<Integer, TreeSet<Description>>()); + topARefinementsCumulative.get(domain).put(i, cumulativeRefinements); + } + } + + // register new top refinements length + if(domain == null) + topRefinementsLength = maxLength; + else + topARefinementsLength.put(domain,maxLength); + + topComputationTimeNs += System.nanoTime() - topComputationTimeStartNs; + } + + // compute M_\top + private void computeM() { + long mComputationTimeStartNs = System.nanoTime(); + + // initialise all possible lengths (1 to 3) + for(int i=1; i<=mMaxLength; i++) { + m.put(i, new TreeSet<Description>(conceptComparator)); + } + + SortedSet<Description> m1 = subHierarchy.getSubClasses(new Thing()); + m.put(1,m1); + + SortedSet<Description> m2 = new TreeSet<Description>(conceptComparator); + if(useNegation) { + Set<Description> m2tmp = subHierarchy.getSuperClasses(new Nothing()); + for(Description c : m2tmp) { + if(!(c instanceof Thing)) { + m2.add(new Negation(c)); + } + } + } + + // boolean datatypes, e.g. testPositive = true + if(useBooleanDatatypes) { + Set<DatatypeProperty> booleanDPs = rs.getBooleanDatatypeProperties(); + for(DatatypeProperty dp : booleanDPs) { + m2.add(new BooleanValueRestriction(dp,true)); + m2.add(new BooleanValueRestriction(dp,false)); + } + } + m.put(2,m2); + + SortedSet<Description> m3 = new TreeSet<Description>(conceptComparator); + if(useExistsConstructor) { + // only uses most general roles + for(ObjectProperty r : rs.getMostGeneralProperties()) { + m3.add(new ObjectSomeRestriction(r, new Thing())); + } + } + + if(useAllConstructor) { + // we allow \forall r.\top here because otherwise the operator + // becomes too difficult to manage due to dependencies between + // M_A and M_A' where A'=ran(r) + for(ObjectProperty r : rs.getMostGeneralProperties()) { + m3.add(new ObjectAllRestriction(r, new Thing())); + } + } + + if(useDoubleDatatypes) { + Set<DatatypeProperty> doubleDPs = rs.getDoubleDatatypeProperties(); + for(DatatypeProperty dp : doubleDPs) { + if(splits.get(dp).size()>0) { + DoubleMaxValue max = new DoubleMaxValue(splits.get(dp).get(splits.get(dp).size()-1)); + DoubleMinValue min = new DoubleMinValue(splits.get(dp).get(0)); + m3.add(new DatatypeSomeRestriction(dp,max)); + m3.add(new DatatypeSomeRestriction(dp,min)); + } + } + } + + if(useDataHasValueConstructor) { + Set<DatatypeProperty> stringDPs = rs.getStringDatatypeProperties(); + for(DatatypeProperty dp : stringDPs) { + // loop over frequent values + Set<Constant> freqValues = frequentDataValues.get(dp); + for(Constant c : freqValues) { + m3.add(new StringValueRestriction(dp, c.getLiteral())); + } + } + } + + m.put(3,m3); + + SortedSet<Description> m4 = new TreeSet<Description>(conceptComparator); + if(useCardinalityRestrictions) { + for(ObjectProperty r : rs.getMostGeneralProperties()) { + int maxFillers = maxNrOfFillers.get(r); + // zero fillers: <= -1 r.C does not make sense + // one filler: <= 0 r.C is equivalent to NOT EXISTS r.C, + // but we still keep it, because ALL r.NOT C may be difficult to reach + if(maxFillers > 0) + m4.add(new ObjectMaxCardinalityRestriction(maxFillers-1, r, new Thing())); + } + } + m.put(4,m4); + + mComputationTimeNs += System.nanoTime() - mComputationTimeStartNs; + } + + // computation of the set M_A + // a major difference compared to the ILP 2007 \rho operator is that + // M is finite and contains elements of length (currently) at most 3 + private void computeM(NamedClass nc) { + long mComputationTimeStartNs = System.nanoTime(); + +// System.out.println(nc); + + mA.put(nc, new TreeMap<Integer,SortedSet<Description>>()); + // initialise all possible lengths (1 to 3) + for(int i=1; i<=mMaxLength; i++) { + mA.get(nc).put(i, new TreeSet<Description>(conceptComparator)); + } + + // incomplete, prior implementation +// SortedSet<Description> m1 = subHierarchy.getSubClasses(nc); +// mA.get(nc).put(1,m1); + + // most general classes, which are not disjoint with nc and provide real refinement + SortedSet<Description> m1 = getClassCandidates(nc); + mA.get(nc).put(1,m1); + + // most specific negated classes, which are not disjoint with nc + SortedSet<Description> m2 = new TreeSet<Description>(); + if(useNegation) { + m2 = getNegClassCandidates(nc); + mA.get(nc).put(2,m2); + } + +// System.out.println("m1 " + "(" + nc + "): " + m1); +// ... [truncated message content] |
From: <ji...@us...> - 2011-06-09 17:06:02
|
Revision: 2859 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=2859&view=rev Author: jialva Date: 2011-06-09 17:05:56 +0000 (Thu, 09 Jun 2011) Log Message: ----------- New update of fuzzyDL-Learner Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java 2011-06-09 16:12:18 UTC (rev 2858) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java 2011-06-09 17:05:56 UTC (rev 2859) @@ -209,7 +209,7 @@ // TODO remove, just for testing purposes FileWriter fstream; try { - fstream = new FileWriter("../examples/fuzzydll/testOut_TriRecEq.log"); + fstream = new FileWriter("../examples/fuzzydll/testOut_v2.0_noise30_10000.log"); out = new PrintWriter(fstream); } catch (IOException e) { // TODO Auto-generated catch block @@ -397,11 +397,11 @@ // TODO just for testing purposes counter++; - System.out.println(counter + " next node: " + nextNode); + out.println(counter + " next node: " + nextNode); for(Description refinement : refinements) { - System.out.println("refinement: " + refinement); + out.println("refinement: " + refinement); } - System.out.println(); + out.println(); while(refinements.size() != 0) { // pick element from set @@ -471,12 +471,19 @@ } private FuzzyOENode getNextNodeToExpand() { + // TODO: remove, just for testing purposes +// Iterator<FuzzyOENode> itx = nodes.descendingIterator(); +// while(itx.hasNext()) { +// FuzzyOENode node = itx.next(); +// out.println(node.getAccuracy() + " " + node.getHorizontalExpansion() + " " + node.getDescription().getLength() + " " + node); +// } // we expand the best node of those, which have not achieved 100% accuracy // already and have a horizontal expansion equal to their length // (rationale: further extension is likely to add irrelevant syntactical constructs) Iterator<FuzzyOENode> it = nodes.descendingIterator(); while(it.hasNext()) { FuzzyOENode node = it.next(); + // TODO: in a fuzzy environment ... has sense to return nodes with accuracy < 1 ??? if(node.getAccuracy() < 1.0 || node.getHorizontalExpansion() < node.getDescription().getLength()) { return node; } Modified: trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java 2011-06-09 16:12:18 UTC (rev 2858) +++ trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java 2011-06-09 17:05:56 UTC (rev 2859) @@ -43,6 +43,21 @@ */ public class FuzzyDLLTest_Trains { + String[] posEx = { + "http://www.example.com/fuzzyTrains.owl#east1", + "http://www.example.com/fuzzyTrains.owl#east2", + "http://www.example.com/fuzzyTrains.owl#east3", + "http://www.example.com/fuzzyTrains.owl#east4", + "http://www.example.com/fuzzyTrains.owl#east5" + }; + String[] negEx = { + "http://www.example.com/fuzzyTrains.owl#west6", + "http://www.example.com/fuzzyTrains.owl#west7", + "http://www.example.com/fuzzyTrains.owl#west8", + "http://www.example.com/fuzzyTrains.owl#west9", + "http://www.example.com/fuzzyTrains.owl#west0" + }; + // String[] posEx = { // "http://www.example.com/fuzzyTrains.owl#east1", // "http://www.example.com/fuzzyTrains.owl#east2" @@ -52,12 +67,12 @@ // "http://www.example.com/fuzzyTrains.owl#west7" // }; - String[] posEx = { - "http://www.example.com/fuzzyTrains.owl#carPositive" - }; - String[] negEx = { - "http://www.example.com/fuzzyTrains.owl#carNegative" - }; +// String[] posEx = { +// "http://www.example.com/fuzzyTrains.owl#carPositive" +// }; +// String[] negEx = { +// "http://www.example.com/fuzzyTrains.owl#carNegative" +// }; public Description learn() throws LearningProblemUnsupportedException, IOException, ComponentInitException { @@ -87,7 +102,7 @@ ComponentManager cm = ComponentManager.getInstance(); OWLFile ks = cm.knowledgeSource(OWLFile.class); - ks.getConfigurator().setUrl(new URL("file", null, "../examples/fuzzydll/fuzzyTrains_simplestExample_fuzzyLoad.owl")); + ks.getConfigurator().setUrl(new URL("file", null, "../examples/fuzzydll/fuzzyTrains_v2.0.owl")); ks.init(); // ReasonerComponent rc = cm.reasoner(OWLAPIReasoner.class, ks); @@ -111,13 +126,13 @@ // Set<String> aaaaaaaaaa = new TreeSet<String>(); // aaaaaaaaaa.add("Nothing"); // fc.getConfigurator().setIgnoredConcepts(aaaaaaaaaa); - fc.getConfigurator().setMaxClassDescriptionTests(1000); + fc.getConfigurator().setMaxClassDescriptionTests(10000); fc.getConfigurator().setMaxExecutionTimeInSeconds(0); fc.getConfigurator().setUseDoubleDatatypes(false); fc.getConfigurator().setUseCardinalityRestrictions(false); fc.getConfigurator().setWriteSearchTree(true); fc.getConfigurator().setSearchTreeFile("log/searchTreeFuzzy.txt"); - fc.getConfigurator().setNoisePercentage(100); + fc.getConfigurator().setNoisePercentage(30); fc.init(); fc.start(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2011-08-04 08:38:29
|
Revision: 2985 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=2985&view=rev Author: lorenz_b Date: 2011-08-04 08:38:23 +0000 (Thu, 04 Aug 2011) Log Message: ----------- Added Test class. Continued learning algorithms. Fixed config helper. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/PropertyDomainAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/PropertyRangeAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexivePropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/core/config/ConfigHelper.java Added Paths: ----------- trunk/components-core/src/test/java/org/dllearner/test/junit/PropertyLearningTest.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalPropertyAxiomLearner.java 2011-08-04 08:35:37 UTC (rev 2984) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalPropertyAxiomLearner.java 2011-08-04 08:38:23 UTC (rev 2985) @@ -28,7 +28,7 @@ public class FunctionalPropertyAxiomLearner extends Component implements AxiomLearningAlgorithm { -private static final Logger logger = LoggerFactory.getLogger(TransitivePropertyAxiomLearner.class); + private static final Logger logger = LoggerFactory.getLogger(FunctionalPropertyAxiomLearner.class); @ConfigOption(name="propertyToDescribe", description="", propertyEditorClass=ObjectPropertyEditor.class) private ObjectProperty propertyToDescribe; @@ -87,19 +87,26 @@ logger.info("Property is already declared as functional in knowledge base."); } - //get fraction of instances s with <s p o> also exists <o p s> - query = "SELECT (COUNT(?s)) AS ?all ,(COUNT(?o1)) AS ?functional WHERE {?s <%s> ?o. OPTIONAL{?o <%s> ?s. ?o <%s> ?o1}}"; - query = query.replace("%s", propertyToDescribe.getURI().toString()); + //get number of instances of s with <s p o> + query = String.format("SELECT (COUNT(DISTINCT ?s)) AS ?all WHERE {?s <%s> ?o.}", propertyToDescribe.getName()); ResultSet rs = executeQuery(query); QuerySolution qs; + int all = 1; while(rs.hasNext()){ qs = rs.next(); - int all = qs.getLiteral("all").getInt(); - int symmetric = qs.getLiteral("functional").getInt(); - double frac = symmetric / (double)all; - currentlyBestAxioms.add(new EvaluatedAxiom(new FunctionalObjectPropertyAxiom(propertyToDescribe), new AxiomScore(frac))); + all = qs.getLiteral("all").getInt(); } - + //get number of instances of s with <s p o> <s p o1> where o != o1 + query = "SELECT (COUNT(DISTINCT ?s)) AS ?notfunctional WHERE {?s <%s> ?o. ?s <%s> ?o1. FILTER(?o != ?o1) }"; + query = query.replace("%s", propertyToDescribe.getURI().toString()); + rs = executeQuery(query); + int notFunctional = 1; + while(rs.hasNext()){ + qs = rs.next(); + notFunctional = qs.getLiteral("notfunctional").getInt(); + } + double frac = (all - notFunctional) / (double)all; + currentlyBestAxioms.add(new EvaluatedAxiom(new FunctionalObjectPropertyAxiom(propertyToDescribe), new AxiomScore(frac))); logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/PropertyDomainAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/PropertyDomainAxiomLearner.java 2011-08-04 08:35:37 UTC (rev 2984) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/PropertyDomainAxiomLearner.java 2011-08-04 08:38:23 UTC (rev 2985) @@ -18,7 +18,6 @@ import org.dllearner.core.Component; import org.dllearner.core.ComponentInitException; import org.dllearner.core.EvaluatedAxiom; -import org.dllearner.core.config.ConfigHelper; import org.dllearner.core.config.ConfigOption; import org.dllearner.core.config.IntegerEditor; import org.dllearner.core.config.ObjectPropertyEditor; @@ -122,7 +121,7 @@ public List<EvaluatedAxiom> getCurrentlyBestEvaluatedAxioms(int nrOfAxioms) { int max = Math.min(currentlyBestAxioms.size(), nrOfAxioms); - List<EvaluatedAxiom> bestAxioms = currentlyBestAxioms.subList(0, max-1); + List<EvaluatedAxiom> bestAxioms = currentlyBestAxioms.subList(0, max); return bestAxioms; } @@ -229,9 +228,9 @@ public static void main(String[] args) throws Exception{ Map<String, String> propertiesMap = new HashMap<String, String>(); - propertiesMap.put("propertyToDescribe", "http://dbpedia.org/ontology/league"); - propertiesMap.put("maxExecutionTimeInSeconds", "20"); - propertiesMap.put("maxFetchedRows", "5000"); + propertiesMap.put("propertyToDescribe", "http://dbpedia.org/ontology/writer"); + propertiesMap.put("maxExecutionTimeInSeconds", "10"); + propertiesMap.put("maxFetchedRows", "15000"); PropertyDomainAxiomLearner l = new PropertyDomainAxiomLearner(new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpedia())); @@ -246,7 +245,6 @@ f.set(l, editor.getValue()); } } - ConfigHelper.configure(l, "propertyToDescribe", "test"); l.init(); l.start(); System.out.println(l.getCurrentlyBestEvaluatedAxioms(3)); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/PropertyRangeAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/PropertyRangeAxiomLearner.java 2011-08-04 08:35:37 UTC (rev 2984) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/PropertyRangeAxiomLearner.java 2011-08-04 08:38:23 UTC (rev 2985) @@ -1,42 +1,127 @@ package org.dllearner.algorithms.properties; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; import org.dllearner.core.AxiomLearningAlgorithm; import org.dllearner.core.Component; import org.dllearner.core.ComponentInitException; import org.dllearner.core.EvaluatedAxiom; +import org.dllearner.core.config.ConfigOption; +import org.dllearner.core.config.IntegerEditor; +import org.dllearner.core.config.ObjectPropertyEditor; import org.dllearner.core.configurators.Configurator; import org.dllearner.core.owl.Axiom; +import org.dllearner.core.owl.Description; +import org.dllearner.core.owl.Individual; +import org.dllearner.core.owl.NamedClass; +import org.dllearner.core.owl.ObjectProperty; +import org.dllearner.core.owl.ObjectPropertyRangeAxiom; import org.dllearner.kb.SparqlEndpointKS; +import org.dllearner.learningproblems.AxiomScore; +import org.dllearner.reasoning.SPARQLReasoner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import com.hp.hpl.jena.query.QuerySolution; +import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; + public class PropertyRangeAxiomLearner extends Component implements AxiomLearningAlgorithm { - private String propertyToDescribe; +private static final Logger logger = LoggerFactory.getLogger(PropertyRangeAxiomLearner.class); - public String getPropertyToDescribe() { + @ConfigOption(name="propertyToDescribe", description="", propertyEditorClass=ObjectPropertyEditor.class) + private ObjectProperty propertyToDescribe; + @ConfigOption(name="maxExecutionTimeInSeconds", description="", propertyEditorClass=IntegerEditor.class) + private int maxExecutionTimeInSeconds = 10; + @ConfigOption(name="maxFetchedRows", description="The maximum number of rows fetched from the endpoint to approximate the result.", propertyEditorClass=IntegerEditor.class) + private int maxFetchedRows = 0; + + private SPARQLReasoner reasoner; + private SparqlEndpointKS ks; + + private List<EvaluatedAxiom> currentlyBestAxioms; + private long startTime; + private int fetchedRows; + + public PropertyRangeAxiomLearner(SparqlEndpointKS ks){ + this.ks = ks; + } + + public int getMaxExecutionTimeInSeconds() { + return maxExecutionTimeInSeconds; + } + + public void setMaxExecutionTimeInSeconds(int maxExecutionTimeInSeconds) { + this.maxExecutionTimeInSeconds = maxExecutionTimeInSeconds; + } + + public ObjectProperty getPropertyToDescribe() { return propertyToDescribe; } - public void setPropertyToDescribe(String propertyToDescribe) { + public void setPropertyToDescribe(ObjectProperty propertyToDescribe) { this.propertyToDescribe = propertyToDescribe; } + + public int getMaxFetchedRows() { + return maxFetchedRows; + } - public PropertyRangeAxiomLearner(SparqlEndpointKS ks){ - + public void setMaxFetchedRows(int maxFetchedRows) { + this.maxFetchedRows = maxFetchedRows; } - + @Override public void start() { - // TODO Auto-generated method stub - + logger.info("Start learning..."); + startTime = System.currentTimeMillis(); + fetchedRows = 0; + currentlyBestAxioms = new ArrayList<EvaluatedAxiom>(); + //get existing range + Description existingRange = reasoner.getRange(propertyToDescribe); + logger.debug("Existing range: " + existingRange); + + //get objects with types + Map<Individual, Set<NamedClass>> individual2Types = new HashMap<Individual, Set<NamedClass>>(); + while(!terminationCriteriaSatisfied()){ + individual2Types.putAll(getObjectsWithTypes(fetchedRows)); + currentlyBestAxioms = buildBestAxioms(individual2Types); + fetchedRows += 1000; + } + logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); } @Override public List<Axiom> getCurrentlyBestAxioms(int nrOfAxioms) { - // TODO Auto-generated method stub - return null; + List<Axiom> bestAxioms = new ArrayList<Axiom>(); + + Iterator<EvaluatedAxiom> it = currentlyBestAxioms.iterator(); + while(bestAxioms.size() < nrOfAxioms && it.hasNext()){ + bestAxioms.add(it.next().getAxiom()); + } + + return bestAxioms; } + + @Override + public List<EvaluatedAxiom> getCurrentlyBestEvaluatedAxioms(int nrOfAxioms) { + int max = Math.min(currentlyBestAxioms.size(), nrOfAxioms); + + List<EvaluatedAxiom> bestAxioms = currentlyBestAxioms.subList(0, max); + + return bestAxioms; + } @Override public Configurator getConfigurator() { @@ -46,14 +131,96 @@ @Override public void init() throws ComponentInitException { - // TODO Auto-generated method stub + reasoner = new SPARQLReasoner(ks); } + + private boolean terminationCriteriaSatisfied(){ + boolean timeLimitExceeded = maxExecutionTimeInSeconds == 0 ? false : (System.currentTimeMillis() - startTime) >= maxExecutionTimeInSeconds * 1000; + boolean resultLimitExceeded = maxFetchedRows == 0 ? false : fetchedRows >= maxFetchedRows; + return timeLimitExceeded || resultLimitExceeded; + } + + private List<EvaluatedAxiom> buildBestAxioms(Map<Individual, Set<NamedClass>> individual2Types){ + List<EvaluatedAxiom> axioms = new ArrayList<EvaluatedAxiom>(); + Map<NamedClass, Integer> result = new HashMap<NamedClass, Integer>(); + for(Entry<Individual, Set<NamedClass>> entry : individual2Types.entrySet()){ + for(NamedClass nc : entry.getValue()){ + Integer cnt = result.get(nc); + if(cnt == null){ + cnt = Integer.valueOf(1); + } + result.put(nc, Integer.valueOf(cnt + 1)); + } + } + + EvaluatedAxiom evalAxiom; + for(Entry<NamedClass, Integer> entry : sortByValues(result)){ + evalAxiom = new EvaluatedAxiom(new ObjectPropertyRangeAxiom(propertyToDescribe, entry.getKey()), + new AxiomScore(entry.getValue() / (double)individual2Types.keySet().size())); + axioms.add(evalAxiom); + } + + return axioms; + } + + /* + * Returns the entries of the map sorted by value. + */ + private SortedSet<Entry<NamedClass, Integer>> sortByValues(Map<NamedClass, Integer> map){ + SortedSet<Entry<NamedClass, Integer>> sortedSet = new TreeSet<Map.Entry<NamedClass,Integer>>(new Comparator<Entry<NamedClass, Integer>>() { - @Override - public List<EvaluatedAxiom> getCurrentlyBestEvaluatedAxioms(int nrOfAxioms) { - // TODO Auto-generated method stub - return null; + @Override + public int compare(Entry<NamedClass, Integer> value1, Entry<NamedClass, Integer> value2) { + if(value1.getValue() < value2.getValue()){ + return 1; + } else if(value2.getValue() < value1.getValue()){ + return -1; + } else { + return value1.getKey().compareTo(value2.getKey()); + } + } + }); + sortedSet.addAll(map.entrySet()); + return sortedSet; } + + private Map<Individual, Set<NamedClass>> getObjectsWithTypes(int offset){ + Map<Individual, Set<NamedClass>> individual2Types = new HashMap<Individual, Set<NamedClass>>(); + int limit = 1000; + String query = String.format("SELECT ?ind ?type WHERE {?s <%s> ?ind. ?ind a ?type.} LIMIT %d OFFSET %d", propertyToDescribe.getName(), limit, offset); + ResultSet rs = executeQuery(query); + QuerySolution qs; + Individual ind; + Set<NamedClass> types; + while(rs.hasNext()){ + qs = rs.next(); + ind = new Individual(qs.getResource("ind").getURI()); + types = individual2Types.get(ind); + if(types == null){ + types = new HashSet<NamedClass>(); + individual2Types.put(ind, types); + } + types.add(new NamedClass(qs.getResource("type").getURI())); + } + return individual2Types; + } + + /* + * Executes a SELECT query and returns the result. + */ + private ResultSet executeQuery(String query){ + logger.info("Sending query \n {}", query); + + QueryEngineHTTP queryExecution = new QueryEngineHTTP(ks.getEndpoint().getURL().toString(), query); + for (String dgu : ks.getEndpoint().getDefaultGraphURIs()) { + queryExecution.addDefaultGraph(dgu); + } + for (String ngu : ks.getEndpoint().getNamedGraphURIs()) { + queryExecution.addNamedGraph(ngu); + } + ResultSet resultSet = queryExecution.execSelect(); + return resultSet; + } } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexivePropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexivePropertyAxiomLearner.java 2011-08-04 08:35:37 UTC (rev 2984) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexivePropertyAxiomLearner.java 2011-08-04 08:38:23 UTC (rev 2985) @@ -28,7 +28,7 @@ public class ReflexivePropertyAxiomLearner extends Component implements AxiomLearningAlgorithm { - private static final Logger logger = LoggerFactory.getLogger(TransitivePropertyAxiomLearner.class); + private static final Logger logger = LoggerFactory.getLogger(ReflexivePropertyAxiomLearner.class); @ConfigOption(name="propertyToDescribe", description="", propertyEditorClass=ObjectPropertyEditor.class) private ObjectProperty propertyToDescribe; Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricPropertyAxiomLearner.java 2011-08-04 08:35:37 UTC (rev 2984) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricPropertyAxiomLearner.java 2011-08-04 08:38:23 UTC (rev 2985) @@ -14,7 +14,6 @@ import org.dllearner.core.configurators.Configurator; import org.dllearner.core.owl.Axiom; import org.dllearner.core.owl.ObjectProperty; -import org.dllearner.core.owl.ReflexiveObjectPropertyAxiom; import org.dllearner.core.owl.SymmetricObjectPropertyAxiom; import org.dllearner.kb.SparqlEndpointKS; import org.dllearner.learningproblems.AxiomScore; @@ -29,7 +28,7 @@ public class SymmetricPropertyAxiomLearner extends Component implements AxiomLearningAlgorithm { - private static final Logger logger = LoggerFactory.getLogger(TransitivePropertyAxiomLearner.class); + private static final Logger logger = LoggerFactory.getLogger(SymmetricPropertyAxiomLearner.class); @ConfigOption(name="propertyToDescribe", description="", propertyEditorClass=ObjectPropertyEditor.class) private ObjectProperty propertyToDescribe; Modified: trunk/components-core/src/main/java/org/dllearner/core/config/ConfigHelper.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/config/ConfigHelper.java 2011-08-04 08:35:37 UTC (rev 2984) +++ trunk/components-core/src/main/java/org/dllearner/core/config/ConfigHelper.java 2011-08-04 08:38:23 UTC (rev 2985) @@ -1,15 +1,32 @@ package org.dllearner.core.config; import java.beans.PropertyEditor; -import java.lang.annotation.Annotation; import java.lang.reflect.Field; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import org.dllearner.algorithms.properties.PropertyDomainAxiomLearner; import org.dllearner.core.Component; public class ConfigHelper { + public final static Map<Class<?>, Class<?>> map = new HashMap<Class<?>, Class<?>>(); + + static { + map.put(Boolean.class, boolean.class); + map.put(Byte.class, byte.class); + map.put(Short.class, short.class); + map.put(Character.class, char.class); + map.put(Integer.class, int.class); + map.put(Long.class, long.class); + map.put(Float.class, float.class); + map.put(Double.class, double.class); + } + /** * Configures the given component by setting the value for the appropriate config option. * @param component the component to be configured @@ -25,13 +42,20 @@ try { PropertyEditor editor = (PropertyEditor) option.propertyEditorClass().newInstance(); editor.setAsText(configValue); - f.set(component, editor.getValue()); + Method method = component.getClass().getMethod("set" + Character.toUpperCase(f.getName().charAt(0)) + f.getName().substring(1), getClassForObject(editor.getValue())); + method.invoke(component, editor.getValue()); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); + } catch (InvocationTargetException e) { + e.printStackTrace(); + } catch (SecurityException e) { + e.printStackTrace(); + } catch (NoSuchMethodException e) { + e.printStackTrace(); } } @@ -57,5 +81,19 @@ return options; } + + private static Class<?> getClassForObject(Object obj){ + if(map.containsKey(obj.getClass())){ + return map.get(obj.getClass()); + } else { + return obj.getClass(); + } + } + + public static void main(String[] args) { + PropertyDomainAxiomLearner l = new PropertyDomainAxiomLearner(null); + ConfigHelper.configure(l, "maxExecutionTimeInSeconds", "11"); + System.out.println(l.getMaxExecutionTimeInSeconds()); + } } Added: trunk/components-core/src/test/java/org/dllearner/test/junit/PropertyLearningTest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/junit/PropertyLearningTest.java (rev 0) +++ trunk/components-core/src/test/java/org/dllearner/test/junit/PropertyLearningTest.java 2011-08-04 08:38:23 UTC (rev 2985) @@ -0,0 +1,78 @@ +package org.dllearner.test.junit; + +import org.dllearner.algorithms.properties.FunctionalPropertyAxiomLearner; +import org.dllearner.algorithms.properties.PropertyDomainAxiomLearner; +import org.dllearner.algorithms.properties.PropertyRangeAxiomLearner; +import org.dllearner.algorithms.properties.ReflexivePropertyAxiomLearner; +import org.dllearner.algorithms.properties.SymmetricPropertyAxiomLearner; +import org.dllearner.core.owl.ObjectProperty; +import org.dllearner.kb.SparqlEndpointKS; +import org.dllearner.kb.sparql.SparqlEndpoint; + +import junit.framework.TestCase; + +public class PropertyLearningTest extends TestCase{ + + private SparqlEndpointKS ks; + private int maxExecutionTimeInSeconds = 5; + private int nrOfAxioms = 3; + + private ObjectProperty functional = new ObjectProperty("http://dbpedia.org/ontology/league"); + private ObjectProperty reflexive = new ObjectProperty("http://dbpedia.org/ontology/influencedBy"); + private ObjectProperty symmetric = new ObjectProperty("http://dbpedia.org/ontology/influencedBy"); + private ObjectProperty domain = new ObjectProperty("http://dbpedia.org/ontology/writer"); + private ObjectProperty range = new ObjectProperty("http://dbpedia.org/ontology/writer"); + + + @Override + protected void setUp() throws Exception { + super.setUp(); + ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpedia()); + } + + public void testPropertyDomainAxiomLearning() throws Exception { + PropertyDomainAxiomLearner l = new PropertyDomainAxiomLearner(ks); + l.setMaxExecutionTimeInSeconds(maxExecutionTimeInSeconds); + l.setPropertyToDescribe(domain); + l.init(); + l.start(); + System.out.println(l.getCurrentlyBestEvaluatedAxioms(nrOfAxioms)); + } + + public void testPropertyRangeAxiomLearning() throws Exception { + PropertyRangeAxiomLearner l = new PropertyRangeAxiomLearner(ks); + l.setMaxExecutionTimeInSeconds(maxExecutionTimeInSeconds); + l.setPropertyToDescribe(range); + l.init(); + l.start(); + System.out.println(l.getCurrentlyBestEvaluatedAxioms(nrOfAxioms)); + } + + public void testReflexivePropertyAxiomLearning() throws Exception { + ReflexivePropertyAxiomLearner l = new ReflexivePropertyAxiomLearner(ks); + l.setMaxExecutionTimeInSeconds(maxExecutionTimeInSeconds); + l.setPropertyToDescribe(reflexive); + l.init(); + l.start(); + System.out.println(l.getCurrentlyBestEvaluatedAxioms(nrOfAxioms)); + } + + public void testFunctionalPropertyAxiomLearnining() throws Exception { + FunctionalPropertyAxiomLearner l = new FunctionalPropertyAxiomLearner(ks); + l.setMaxExecutionTimeInSeconds(maxExecutionTimeInSeconds); + l.setPropertyToDescribe(functional); + l.init(); + l.start(); + System.out.println(l.getCurrentlyBestEvaluatedAxioms(nrOfAxioms)); + } + + public void testSymmetricPropertyAxiomLearning() throws Exception { + SymmetricPropertyAxiomLearner l = new SymmetricPropertyAxiomLearner(ks); + l.setMaxExecutionTimeInSeconds(maxExecutionTimeInSeconds); + l.setPropertyToDescribe(symmetric); + l.init(); + l.start(); + System.out.println(l.getCurrentlyBestEvaluatedAxioms(nrOfAxioms)); + } + +} Property changes on: trunk/components-core/src/test/java/org/dllearner/test/junit/PropertyLearningTest.java ___________________________________________________________________ Added: svn:mime-type + text/plain This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2011-08-04 12:02:01
|
Revision: 2992 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=2992&view=rev Author: lorenz_b Date: 2011-08-04 12:01:55 +0000 (Thu, 04 Aug 2011) Log Message: ----------- Continued learning algorithm for subproperty axioms. Added toString method to ObjectPropertyRangeAxiom Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubPropertyOfAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/core/owl/ObjectPropertyRangeAxiom.java trunk/components-core/src/test/java/org/dllearner/test/junit/PropertyLearningTest.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubPropertyOfAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubPropertyOfAxiomLearner.java 2011-08-04 11:51:40 UTC (rev 2991) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubPropertyOfAxiomLearner.java 2011-08-04 12:01:55 UTC (rev 2992) @@ -1,27 +1,37 @@ package org.dllearner.algorithms.properties; -import java.beans.PropertyEditor; -import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; import org.dllearner.core.AxiomLearningAlgorithm; import org.dllearner.core.AbstractComponent; +import org.dllearner.core.ComponentAnn; import org.dllearner.core.ComponentInitException; -import org.dllearner.core.ComponentAnn; import org.dllearner.core.EvaluatedAxiom; import org.dllearner.core.config.ConfigOption; import org.dllearner.core.config.IntegerEditor; import org.dllearner.core.config.ObjectPropertyEditor; import org.dllearner.core.configurators.Configurator; import org.dllearner.core.owl.Axiom; +import org.dllearner.core.owl.Description; +import org.dllearner.core.owl.Individual; +import org.dllearner.core.owl.NamedClass; import org.dllearner.core.owl.ObjectProperty; +import org.dllearner.core.owl.ObjectPropertyDomainAxiom; import org.dllearner.kb.SparqlEndpointKS; -import org.dllearner.kb.sparql.SparqlEndpoint; +import org.dllearner.learningproblems.AxiomScore; import org.dllearner.reasoning.SPARQLReasoner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; @@ -30,38 +40,111 @@ @ComponentAnn(name="subPropertyOf learner") public class SubPropertyOfAxiomLearner extends AbstractComponent implements AxiomLearningAlgorithm { +private static final Logger logger = LoggerFactory.getLogger(PropertyDomainAxiomLearner.class); + @ConfigOption(name="propertyToDescribe", description="", propertyEditorClass=ObjectPropertyEditor.class) private ObjectProperty propertyToDescribe; @ConfigOption(name="maxExecutionTimeInSeconds", description="", propertyEditorClass=IntegerEditor.class) - private int maxExecutionTimeInSeconds; + private int maxExecutionTimeInSeconds = 10; + @ConfigOption(name="maxFetchedRows", description="The maximum number of rows fetched from the endpoint to approximate the result.", propertyEditorClass=IntegerEditor.class) + private int maxFetchedRows = 0; private SPARQLReasoner reasoner; private SparqlEndpointKS ks; + private List<EvaluatedAxiom> currentlyBestAxioms; + private long startTime; + private int fetchedRows; + public SubPropertyOfAxiomLearner(SparqlEndpointKS ks){ this.ks = ks; } + public int getMaxExecutionTimeInSeconds() { + return maxExecutionTimeInSeconds; + } + + public void setMaxExecutionTimeInSeconds(int maxExecutionTimeInSeconds) { + this.maxExecutionTimeInSeconds = maxExecutionTimeInSeconds; + } + + public ObjectProperty getPropertyToDescribe() { + return propertyToDescribe; + } + + public void setPropertyToDescribe(ObjectProperty propertyToDescribe) { + this.propertyToDescribe = propertyToDescribe; + } + + public int getMaxFetchedRows() { + return maxFetchedRows; + } + + public void setMaxFetchedRows(int maxFetchedRows) { + this.maxFetchedRows = maxFetchedRows; + } + @Override public void start() { - //get - Set<ObjectProperty> properties = new HashSet<ObjectProperty>(); - String query = String.format("SELECT ?p ?p1 WHERE {?s %s ?o. ?s ?p ?o1. ?s1 ?p1 ?o.}", inAngleBrackets(propertyToDescribe.getURI().toString())); - ResultSet rs = executeQuery(query); - QuerySolution qs; - while(rs.hasNext()){ - qs = rs.next(); - properties.add(new ObjectProperty(qs.getResource("p").getURI())); - properties.add(new ObjectProperty(qs.getResource("p1").getURI())); + logger.info("Start learning..."); + startTime = System.currentTimeMillis(); + fetchedRows = 0; + currentlyBestAxioms = new ArrayList<EvaluatedAxiom>(); + //get existing super properties + SortedSet<ObjectProperty> existingSuperProperties = reasoner.getSuperProperties(propertyToDescribe); + logger.debug("Existing super properties: " + existingSuperProperties); + + //get subjects with types + int limit = 1000; + int offset = 0; + String queryTemplate = "SELECT ?p (COUNT(?s)) AS ?count WHERE {?s ?p ?o." + + "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + + "}"; + String query; + Map<ObjectProperty, Integer> result = new HashMap<ObjectProperty, Integer>(); + ObjectProperty prop; + Integer oldCnt; + while(!terminationCriteriaSatisfied()){ + query = String.format(queryTemplate, propertyToDescribe, limit, offset); + ResultSet rs = executeQuery(query); + QuerySolution qs; + while(rs.hasNext()){ + qs = rs.next(); + prop = new ObjectProperty(qs.getResource("p").getURI()); + int newCnt = qs.getLiteral("count").getInt(); + oldCnt = result.get(prop); + if(oldCnt == null){ + oldCnt = Integer.valueOf(newCnt); + } + result.put(prop, oldCnt); + qs.getLiteral("count").getInt(); + } + offset += 1000; } - System.out.println(properties); + + logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); } @Override public List<Axiom> getCurrentlyBestAxioms(int nrOfAxioms) { - // TODO Auto-generated method stub - return null; + List<Axiom> bestAxioms = new ArrayList<Axiom>(); + + Iterator<EvaluatedAxiom> it = currentlyBestAxioms.iterator(); + while(bestAxioms.size() < nrOfAxioms && it.hasNext()){ + bestAxioms.add(it.next().getAxiom()); + } + + return bestAxioms; } + + @Override + public List<EvaluatedAxiom> getCurrentlyBestEvaluatedAxioms(int nrOfAxioms) { + int max = Math.min(currentlyBestAxioms.size(), nrOfAxioms); + + List<EvaluatedAxiom> bestAxioms = currentlyBestAxioms.subList(0, max); + + return bestAxioms; + } @Override public Configurator getConfigurator() { @@ -72,30 +155,65 @@ @Override public void init() throws ComponentInitException { reasoner = new SPARQLReasoner(ks); + } - public int getMaxExecutionTimeInSeconds() { - return maxExecutionTimeInSeconds; + private boolean terminationCriteriaSatisfied(){ + boolean timeLimitExceeded = maxExecutionTimeInSeconds == 0 ? false : (System.currentTimeMillis() - startTime) >= maxExecutionTimeInSeconds * 1000; + boolean resultLimitExceeded = maxFetchedRows == 0 ? false : fetchedRows >= maxFetchedRows; + return timeLimitExceeded || resultLimitExceeded; } - - public void setMaxExecutionTimeInSeconds(int maxExecutionTimeInSeconds) { - this.maxExecutionTimeInSeconds = maxExecutionTimeInSeconds; + + private List<EvaluatedAxiom> buildBestAxioms(Map<Individual, Set<NamedClass>> individual2Types){ + List<EvaluatedAxiom> axioms = new ArrayList<EvaluatedAxiom>(); + Map<NamedClass, Integer> result = new HashMap<NamedClass, Integer>(); + for(Entry<Individual, Set<NamedClass>> entry : individual2Types.entrySet()){ + for(NamedClass nc : entry.getValue()){ + Integer cnt = result.get(nc); + if(cnt == null){ + cnt = Integer.valueOf(1); + } + result.put(nc, Integer.valueOf(cnt + 1)); + } + } + + EvaluatedAxiom evalAxiom; + for(Entry<NamedClass, Integer> entry : sortByValues(result)){ + evalAxiom = new EvaluatedAxiom(new ObjectPropertyDomainAxiom(propertyToDescribe, entry.getKey()), + new AxiomScore(entry.getValue() / (double)individual2Types.keySet().size())); + axioms.add(evalAxiom); + } + + return axioms; } + + /* + * Returns the entries of the map sorted by value. + */ + private SortedSet<Entry<NamedClass, Integer>> sortByValues(Map<NamedClass, Integer> map){ + SortedSet<Entry<NamedClass, Integer>> sortedSet = new TreeSet<Map.Entry<NamedClass,Integer>>(new Comparator<Entry<NamedClass, Integer>>() { - public ObjectProperty getPropertyToDescribe() { - return propertyToDescribe; + @Override + public int compare(Entry<NamedClass, Integer> value1, Entry<NamedClass, Integer> value2) { + if(value1.getValue() < value2.getValue()){ + return 1; + } else if(value2.getValue() < value1.getValue()){ + return -1; + } else { + return value1.getKey().compareTo(value2.getKey()); + } + } + }); + sortedSet.addAll(map.entrySet()); + return sortedSet; } - - public void setPropertyToDescribe(ObjectProperty propertyToDescribe) { - this.propertyToDescribe = propertyToDescribe; - } - private String inAngleBrackets(String s){ - return "<" + s + ">"; - } - + /* + * Executes a SELECT query and returns the result. + */ private ResultSet executeQuery(String query){ - System.out.println(query); + logger.info("Sending query \n {}", query); + QueryEngineHTTP queryExecution = new QueryEngineHTTP(ks.getEndpoint().getURL().toString(), query); for (String dgu : ks.getEndpoint().getDefaultGraphURIs()) { queryExecution.addDefaultGraph(dgu); @@ -103,37 +221,8 @@ for (String ngu : ks.getEndpoint().getNamedGraphURIs()) { queryExecution.addNamedGraph(ngu); } - ResultSet resultset = queryExecution.execSelect(); - return resultset; + ResultSet resultSet = queryExecution.execSelect(); + return resultSet; } - public static void main(String[] args) throws Exception{ - Map<String, String> propertiesMap = new HashMap<String, String>(); - propertiesMap.put("propertyToDescribe", "http://dbpedia.org/ontology/league"); - propertiesMap.put("maxExecutionTimeInSeconds", "20"); - - SubPropertyOfAxiomLearner l = new SubPropertyOfAxiomLearner(new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveAKSW())); - - Field[] fields = l.getClass().getDeclaredFields(); - for(Field f : fields){ - ConfigOption option = f.getAnnotation(ConfigOption.class); - if(option != null){ - String configValue = propertiesMap.get(option.name()); - PropertyEditor editor = (PropertyEditor) option.propertyEditorClass().newInstance(); - editor.setAsText(configValue); - f.set(l, editor.getValue()); - } - } - - l.init(); - l.start(); - - } - - @Override - public List<EvaluatedAxiom> getCurrentlyBestEvaluatedAxioms(int nrOfAxioms) { - // TODO Auto-generated method stub - return null; - } - } Modified: trunk/components-core/src/main/java/org/dllearner/core/owl/ObjectPropertyRangeAxiom.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/owl/ObjectPropertyRangeAxiom.java 2011-08-04 11:51:40 UTC (rev 2991) +++ trunk/components-core/src/main/java/org/dllearner/core/owl/ObjectPropertyRangeAxiom.java 2011-08-04 12:01:55 UTC (rev 2992) @@ -48,8 +48,7 @@ * @see org.dllearner.core.owl.KBElement#toString(java.lang.String, java.util.Map) */ public String toString(String baseURI, Map<String, String> prefixes) { - // TODO Auto-generated method stub - return null; + return "Domain(" + getProperty() + ", " + getRange() + ")"; } public String toKBSyntaxString(String baseURI, Map<String, String> prefixes) { Modified: trunk/components-core/src/test/java/org/dllearner/test/junit/PropertyLearningTest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/junit/PropertyLearningTest.java 2011-08-04 11:51:40 UTC (rev 2991) +++ trunk/components-core/src/test/java/org/dllearner/test/junit/PropertyLearningTest.java 2011-08-04 12:01:55 UTC (rev 2992) @@ -4,6 +4,7 @@ import org.dllearner.algorithms.properties.PropertyDomainAxiomLearner; import org.dllearner.algorithms.properties.PropertyRangeAxiomLearner; import org.dllearner.algorithms.properties.ReflexivePropertyAxiomLearner; +import org.dllearner.algorithms.properties.SubPropertyOfAxiomLearner; import org.dllearner.algorithms.properties.SymmetricPropertyAxiomLearner; import org.dllearner.core.owl.ObjectProperty; import org.dllearner.kb.SparqlEndpointKS; @@ -22,6 +23,7 @@ private ObjectProperty symmetric = new ObjectProperty("http://dbpedia.org/ontology/influencedBy"); private ObjectProperty domain = new ObjectProperty("http://dbpedia.org/ontology/writer"); private ObjectProperty range = new ObjectProperty("http://dbpedia.org/ontology/writer"); + private ObjectProperty subProperty = new ObjectProperty("http://dbpedia.org/ontology/author"); @Override @@ -30,6 +32,15 @@ ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpedia()); } + public void testSubPropertyOfAxiomLearning() throws Exception { + SubPropertyOfAxiomLearner l = new SubPropertyOfAxiomLearner(ks); + l.setMaxExecutionTimeInSeconds(maxExecutionTimeInSeconds); + l.setPropertyToDescribe(subProperty); + l.init(); + l.start(); + System.out.println(l.getCurrentlyBestEvaluatedAxioms(nrOfAxioms)); + } + public void testPropertyDomainAxiomLearning() throws Exception { PropertyDomainAxiomLearner l = new PropertyDomainAxiomLearner(ks); l.setMaxExecutionTimeInSeconds(maxExecutionTimeInSeconds); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2011-08-05 13:33:48
|
Revision: 3008 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3008&view=rev Author: lorenz_b Date: 2011-08-05 13:33:42 +0000 (Fri, 05 Aug 2011) Log Message: ----------- Small fixes in query because otherwise jena returns different results compared to in the web interface. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubPropertyOfAxiomLearner.java trunk/components-core/src/test/java/org/dllearner/test/junit/PropertyLearningTest.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentPropertyAxiomLearner.java 2011-08-05 13:29:14 UTC (rev 3007) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentPropertyAxiomLearner.java 2011-08-05 13:33:42 UTC (rev 3008) @@ -23,6 +23,7 @@ import org.dllearner.core.owl.EquivalentObjectPropertiesAxiom; import org.dllearner.core.owl.ObjectProperty; import org.dllearner.kb.SparqlEndpointKS; +import org.dllearner.kb.sparql.SparqlQuery; import org.dllearner.learningproblems.AxiomScore; import org.dllearner.reasoning.SPARQLReasoner; import org.slf4j.Logger; @@ -92,7 +93,7 @@ //get subjects with types int limit = 1000; int offset = 0; - String queryTemplate = "SELECT ?p (COUNT(?s)) AS ?count WHERE {?s ?p ?o." + + String queryTemplate = "SELECT ?p COUNT(?s) AS ?count WHERE {?s ?p ?o." + "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + "}"; String query; Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubPropertyOfAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubPropertyOfAxiomLearner.java 2011-08-05 13:29:14 UTC (rev 3007) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubPropertyOfAxiomLearner.java 2011-08-05 13:33:42 UTC (rev 3008) @@ -98,7 +98,7 @@ //get subjects with types int limit = 1000; int offset = 0; - String queryTemplate = "SELECT ?p (COUNT(?s)) AS ?count WHERE {?s ?p ?o." + + String queryTemplate = "SELECT ?p COUNT(?s) AS ?count WHERE {?s ?p ?o." + "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + "}"; String query; Modified: trunk/components-core/src/test/java/org/dllearner/test/junit/PropertyLearningTest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/junit/PropertyLearningTest.java 2011-08-05 13:29:14 UTC (rev 3007) +++ trunk/components-core/src/test/java/org/dllearner/test/junit/PropertyLearningTest.java 2011-08-05 13:33:42 UTC (rev 3008) @@ -1,5 +1,6 @@ package org.dllearner.test.junit; +import org.dllearner.algorithms.properties.EquivalentPropertyAxiomLearner; import org.dllearner.algorithms.properties.FunctionalPropertyAxiomLearner; import org.dllearner.algorithms.properties.PropertyDomainAxiomLearner; import org.dllearner.algorithms.properties.PropertyRangeAxiomLearner; @@ -15,7 +16,7 @@ public class PropertyLearningTest extends TestCase{ private SparqlEndpointKS ks; - private int maxExecutionTimeInSeconds = 5; + private int maxExecutionTimeInSeconds = 3; private int nrOfAxioms = 3; private ObjectProperty functional = new ObjectProperty("http://dbpedia.org/ontology/league"); @@ -24,6 +25,7 @@ private ObjectProperty domain = new ObjectProperty("http://dbpedia.org/ontology/writer"); private ObjectProperty range = new ObjectProperty("http://dbpedia.org/ontology/writer"); private ObjectProperty subProperty = new ObjectProperty("http://dbpedia.org/ontology/author"); + private ObjectProperty equivProperty = new ObjectProperty("http://dbpedia.org/ontology/academyAward"); @Override @@ -41,6 +43,15 @@ System.out.println(l.getCurrentlyBestEvaluatedAxioms(nrOfAxioms)); } + public void testEquivalentPropertyOfAxiomLearning() throws Exception { + EquivalentPropertyAxiomLearner l = new EquivalentPropertyAxiomLearner(ks); + l.setMaxExecutionTimeInSeconds(maxExecutionTimeInSeconds); + l.setPropertyToDescribe(equivProperty); + l.init(); + l.start(); + System.out.println(l.getCurrentlyBestEvaluatedAxioms(nrOfAxioms)); + } + public void testPropertyDomainAxiomLearning() throws Exception { PropertyDomainAxiomLearner l = new PropertyDomainAxiomLearner(ks); l.setMaxExecutionTimeInSeconds(maxExecutionTimeInSeconds); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2011-08-23 16:05:53
|
Revision: 3102 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3102&view=rev Author: jenslehmann Date: 2011-08-23 16:05:46 +0000 (Tue, 23 Aug 2011) Log Message: ----------- weakened dependance on configurators in many components Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/BruteForceLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/RandomGuesser.java trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java trunk/components-core/src/main/java/org/dllearner/algorithms/el/ELLearningAlgorithm.java trunk/components-core/src/main/java/org/dllearner/algorithms/el/ELLearningAlgorithmDisjunctive.java trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java trunk/components-core/src/main/java/org/dllearner/algorithms/gp/GP.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/ISLE.java trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/OCEL.java trunk/components-core/src/main/java/org/dllearner/algorithms/refinement/ROLearner.java trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java trunk/components-core/src/main/java/org/dllearner/core/AbstractComponent.java trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java trunk/components-core/src/main/java/org/dllearner/kb/KBFile.java trunk/components-core/src/main/java/org/dllearner/kb/OWLAPIOntology.java trunk/components-core/src/main/java/org/dllearner/kb/OWLFile.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlKnowledgeSource.java trunk/components-core/src/main/java/org/dllearner/learningproblems/ClassLearningProblem.java trunk/components-core/src/main/java/org/dllearner/learningproblems/PosNegLPStandard.java trunk/components-core/src/main/java/org/dllearner/learningproblems/PosNegLPStrict.java trunk/components-core/src/main/java/org/dllearner/learningproblems/PosOnlyLP.java trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java trunk/components-core/src/main/java/org/dllearner/reasoning/DIGReasoner.java trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java trunk/components-core/src/main/java/org/dllearner/reasoning/FastRetrievalReasoner.java trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java trunk/components-core/src/main/java/org/dllearner/reasoning/PelletReasoner.java trunk/components-core/src/main/java/org/dllearner/reasoning/ProtegeReasoner.java trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java trunk/components-core/src/main/java/org/dllearner/utilities/components/ReasonerComponentFactory.java trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest.java trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains_noFuzzyIndividuals.java trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_noFuzzyTrains.java Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/core/config/BooleanEditor.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/BruteForceLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/BruteForceLearner.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/BruteForceLearner.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -28,7 +28,6 @@ import org.dllearner.core.AbstractCELA; import org.dllearner.core.AbstractLearningProblem; import org.dllearner.core.AbstractReasonerComponent; -import org.dllearner.core.configurators.BruteForceLearnerConfigurator; import org.dllearner.core.options.CommonConfigOptions; import org.dllearner.core.options.ConfigEntry; import org.dllearner.core.options.ConfigOption; @@ -58,11 +57,11 @@ */ public class BruteForceLearner extends AbstractCELA { - private BruteForceLearnerConfigurator configurator; - @Override - public BruteForceLearnerConfigurator getConfigurator(){ - return configurator; - } +// private BruteForceLearnerConfigurator configurator; +// @Override +// public BruteForceLearnerConfigurator getConfigurator(){ +// return configurator; +// } private AbstractLearningProblem learningProblem; @@ -85,7 +84,7 @@ super(learningProblem, rs); this.learningProblem = learningProblem; this.rs = rs; - this.configurator = new BruteForceLearnerConfigurator(this); +// this.configurator = new BruteForceLearnerConfigurator(this); } public static String getName() { Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/RandomGuesser.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/RandomGuesser.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/RandomGuesser.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -50,11 +50,11 @@ */ public class RandomGuesser extends AbstractCELA { - private RandomGuesserConfigurator configurator; - @Override - public RandomGuesserConfigurator getConfigurator(){ - return configurator; - } +// private RandomGuesserConfigurator configurator; +// @Override +// public RandomGuesserConfigurator getConfigurator(){ +// return configurator; +// } private Description bestDefinition = null; private Score bestScore; @@ -69,7 +69,7 @@ public RandomGuesser(AbstractLearningProblem learningProblem, AbstractReasonerComponent rs) { super(learningProblem, rs); - this.configurator = new RandomGuesserConfigurator(this); +// this.configurator = new RandomGuesserConfigurator(this); } public static String getName() { Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -144,7 +144,6 @@ private int minHorizExp = 0; private int maxHorizExp = 0; - @Override public CELOEConfigurator getConfigurator() { return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/el/ELLearningAlgorithm.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/el/ELLearningAlgorithm.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/el/ELLearningAlgorithm.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -30,10 +30,11 @@ import org.dllearner.core.AbstractCELA; import org.dllearner.core.AbstractLearningProblem; import org.dllearner.core.AbstractReasonerComponent; +import org.dllearner.core.config.BooleanEditor; +import org.dllearner.core.config.ConfigOption; import org.dllearner.core.configurators.Configurator; import org.dllearner.core.configurators.ELLearningAlgorithmConfigurator; import org.dllearner.core.options.CommonConfigOptions; -import org.dllearner.core.options.ConfigOption; import org.dllearner.core.owl.Description; import org.dllearner.core.owl.Thing; import org.dllearner.learningproblems.EvaluatedDescriptionPosNeg; @@ -54,7 +55,7 @@ public class ELLearningAlgorithm extends AbstractCELA { private static Logger logger = Logger.getLogger(ELLearningAlgorithm.class); - private ELLearningAlgorithmConfigurator configurator; +// private ELLearningAlgorithmConfigurator configurator; private ELDown2 operator; @@ -63,7 +64,11 @@ private double treeSearchTimeSeconds = 1.0; private long treeStartTime; + // "instanceBasedDisjoints", "Specifies whether to use real disjointness checks or instance based ones (no common instances) in the refinement operator." + @ConfigOption(name="instanceBasedDisjoints", required=false, defaultValue="true", description="Specifies whether to use real disjointness checks or instance based ones (no common instances) in the refinement operator.", propertyEditorClass=BooleanEditor.class) + private boolean instanceBasedDisjoints = true; + // a set with limited size (currently the ordering is defined in the class itself) private EvaluatedDescriptionSet bestEvaluatedDescriptions = new EvaluatedDescriptionSet(AbstractCELA.MAX_NR_OF_RESULTS); @@ -73,7 +78,7 @@ public ELLearningAlgorithm(PosNegLP problem, AbstractReasonerComponent reasoner) { super(problem, reasoner); - configurator = new ELLearningAlgorithmConfigurator(this); +// configurator = new ELLearningAlgorithmConfigurator(this); } public static String getName() { @@ -91,18 +96,18 @@ return (PosNegLP) learningProblem; } - @Override - public ELLearningAlgorithmConfigurator getConfigurator() { - return configurator; - } +// @Override +// public ELLearningAlgorithmConfigurator getConfigurator() { +// return configurator; +// } - public static Collection<ConfigOption<?>> createConfigOptions() { - Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); -// options.add(CommonConfigOptions.getNoisePercentage()); -// options.add(new StringConfigOption("startClass", "the named class which should be used to start the algorithm (GUI: needs a widget for selecting a class)")); - options.add(CommonConfigOptions.getInstanceBasedDisjoints()); - return options; - } +// public static Collection<ConfigOption<?>> createConfigOptions() { +// Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); +//// options.add(CommonConfigOptions.getNoisePercentage()); +//// options.add(new StringConfigOption("startClass", "the named class which should be used to start the algorithm (GUI: needs a widget for selecting a class)")); +// options.add(CommonConfigOptions.getInstanceBasedDisjoints()); +// return options; +// } @Override public void init() throws ComponentInitException { @@ -110,7 +115,7 @@ heuristic = new StableHeuristic(); candidates = new TreeSet<SearchTreeNode>(heuristic); - operator = new ELDown2(reasoner, configurator.getInstanceBasedDisjoints()); + operator = new ELDown2(reasoner, instanceBasedDisjoints); } @Override Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/el/ELLearningAlgorithmDisjunctive.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/el/ELLearningAlgorithmDisjunctive.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/el/ELLearningAlgorithmDisjunctive.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -150,7 +150,6 @@ return (PosNegLP) learningProblem; } - @Override public Configurator getConfigurator() { return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -154,7 +154,6 @@ // private PrintWriter out; // private long start = 0; - @Override public FuzzyCELOEConfigurator getConfigurator() { return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/gp/GP.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/gp/GP.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/gp/GP.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -56,11 +56,11 @@ */ public class GP extends AbstractCELA { - private GPConfigurator configurator; - @Override - public GPConfigurator getConfigurator(){ - return configurator; - } +// private GPConfigurator configurator; +// @Override +// public GPConfigurator getConfigurator(){ +// return configurator; +// } // NumberFormat f; @@ -141,7 +141,7 @@ */ public GP(PosNegLP learningProblem, AbstractReasonerComponent rs) { super(learningProblem, rs); - this.configurator = new GPConfigurator(this); +// this.configurator = new GPConfigurator(this); } public static String getName() { Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/ISLE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/ISLE.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/ISLE.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -132,7 +132,6 @@ private int minHorizExp = 0; private int maxHorizExp = 0; - @Override public ISLEConfigurator getConfigurator() { return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/OCEL.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/OCEL.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/OCEL.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -83,7 +83,7 @@ public class OCEL extends AbstractCELA { private OCELConfigurator configurator; - @Override + public OCELConfigurator getConfigurator(){ return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/refinement/ROLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/refinement/ROLearner.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/refinement/ROLearner.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -64,7 +64,7 @@ public class ROLearner extends AbstractCELA { private ROLearnerConfigurator configurator; - @Override + public ROLearnerConfigurator getConfigurator(){ return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -82,7 +82,6 @@ return returnList; } - @Override public Configurator getConfigurator() { // TODO Auto-generated method stub return null; Modified: trunk/components-core/src/main/java/org/dllearner/core/AbstractComponent.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AbstractComponent.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/core/AbstractComponent.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -36,7 +36,7 @@ */ public abstract class AbstractComponent implements Component { - protected Configurator configurator; +// protected Configurator configurator; /** * For each component, a configurator class is generated in package @@ -46,7 +46,7 @@ * a component. * @return An object allowing to configure this component. */ - public abstract Configurator getConfigurator(); +// public abstract Configurator getConfigurator(); /** * Returns the name of this component. By default, "unnamed Modified: trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -53,7 +53,9 @@ // components must be listed here if they should be supported in interfaces // (CLI, GUI, Web Service) and scripts (HTML documentation generator) private static List<String> componentClassNames = new ArrayList<String> ( Arrays.asList(new String[]{ - "org.dllearner.algorithms.celoe.CELOE", + "org.dllearner.algorithms.celoe.CELOE", + "org.dllearner.algorithms.BruteForceLearner", + "org.dllearner.algorithms.RandomGuesser", "org.dllearner.algorithms.properties.DisjointObjectPropertyAxiomLearner", "org.dllearner.algorithms.properties.EquivalentObjectPropertyAxiomLearner", "org.dllearner.algorithms.properties.FunctionalObjectPropertyAxiomLearner", Added: trunk/components-core/src/main/java/org/dllearner/core/config/BooleanEditor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/config/BooleanEditor.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/core/config/BooleanEditor.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -0,0 +1,83 @@ +package org.dllearner.core.config; + +import java.awt.Component; +import java.awt.Graphics; +import java.awt.Rectangle; +import java.beans.PropertyChangeListener; +import java.beans.PropertyEditor; + +public class BooleanEditor implements PropertyEditor { + + @Override + public void addPropertyChangeListener(PropertyChangeListener listener) { + // TODO Auto-generated method stub + + } + + @Override + public String getAsText() { + // TODO Auto-generated method stub + return null; + } + + @Override + public Component getCustomEditor() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getJavaInitializationString() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String[] getTags() { + // TODO Auto-generated method stub + return null; + } + + @Override + public Object getValue() { + // TODO Auto-generated method stub + return null; + } + + @Override + public boolean isPaintable() { + // TODO Auto-generated method stub + return false; + } + + @Override + public void paintValue(Graphics gfx, Rectangle box) { + // TODO Auto-generated method stub + + } + + @Override + public void removePropertyChangeListener(PropertyChangeListener listener) { + // TODO Auto-generated method stub + + } + + @Override + public void setAsText(String text) throws IllegalArgumentException { + // TODO Auto-generated method stub + + } + + @Override + public void setValue(Object value) { + // TODO Auto-generated method stub + + } + + @Override + public boolean supportsCustomEditor() { + // TODO Auto-generated method stub + return false; + } + +} Modified: trunk/components-core/src/main/java/org/dllearner/kb/KBFile.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/KBFile.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/kb/KBFile.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -20,13 +20,12 @@ package org.dllearner.kb; import java.io.File; -import java.io.IOException; import java.net.URI; -import java.net.URL; import java.util.Collection; import java.util.LinkedList; import org.apache.log4j.Logger; +import org.dllearner.core.ComponentAnn; import org.dllearner.core.ComponentInitException; import org.dllearner.core.AbstractKnowledgeSource; import org.dllearner.core.configurators.KBFileConfigurator; @@ -38,7 +37,9 @@ import org.dllearner.parser.KBParser; import org.dllearner.parser.ParseException; import org.dllearner.reasoning.DIGConverter; +import org.springframework.beans.propertyeditors.StringTrimmerEditor; + /** * KB files are an internal convenience format used in DL-Learner. Their * syntax is close to Description Logics and easy to use. KB files can be @@ -47,19 +48,20 @@ * @author Jens Lehmann * */ +@ComponentAnn(name = "KB file", shortName = "kbfile", version = 0.8) public class KBFile extends AbstractKnowledgeSource { private static Logger logger = Logger.getLogger(KBFile.class); private KB kb; - - private KBFileConfigurator configurator; + @org.dllearner.core.config.ConfigOption(name = "url", description = "URL pointer to the KB file", defaultValue = "", required = false, propertyEditorClass = StringTrimmerEditor.class) + private String url; + /** * Default constructor (needed for reflection in ComponentManager). */ public KBFile() { - configurator = new KBFileConfigurator(this); } /** @@ -71,65 +73,29 @@ * @param kb A KB object. */ public KBFile(KB kb) { - configurator = new KBFileConfigurator(this); this.kb = kb; } - @Override - public KBFileConfigurator getConfigurator(){ - return configurator; - } - public static String getName() { return "KB file"; } + @Override + public void init() throws ComponentInitException { + try { + if (getUrl() != null) { + kb = KBParser.parseKBFile(getUrl()); + logger.trace("KB File " + getUrl() + " parsed successfully."); + } else { + throw new ComponentInitException("No URL option or kb object given. Cannot initialise KBFile component."); + } - public static Collection<ConfigOption<?>> createConfigOptions() { - Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); -// options.add(new StringConfigOption("filename", "pointer to the KB file on local file system",null, true, true)); - URLConfigOption urlOption = new URLConfigOption("url", "URL pointer to the KB file",null, false, true); - urlOption.setRefersToFile(true); - options.add(urlOption); - return options; - } + } catch (ParseException e) { + throw new ComponentInitException("KB file " + getUrl() + " could not be parsed correctly.", e); + } + } /* - * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.ConfigEntry) - */ - @Override - public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { - - } - - /* (non-Javadoc) - * @see org.dllearner.core.Component#init() - */ - @Override - public void init() throws ComponentInitException { - try { - - // we either need a specified URL (if object is created - // via component manager) or the kb object has been - // passed directly (via constructor) - if(kb == null) { - if(configurator.getUrl() != null) { - kb = KBParser.parseKBFile(configurator.getUrl()); - logger.trace("KB File " + configurator.getUrl() + " parsed successfully."); - } else { - throw new ComponentInitException("No URL option or kb object given. Cannot initialise KBFile component."); - } - } - - } catch (IOException e) { - throw new ComponentInitException("KB file " + configurator.getUrl() + " could not be read.", e); - } catch (ParseException e) { - throw new ComponentInitException("KB file " + configurator.getUrl() + " could not be parsed correctly.", e); - } - - } - - /* * (non-Javadoc) * * @see org.dllearner.core.KnowledgeSource#toDIG() @@ -150,33 +116,19 @@ @Override public void export(File file, org.dllearner.core.OntologyFormat format){ kb.export(file, format); -// OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); -// URI ontologyURI = URI.create("http://example.com"); -// URI physicalURI = file.toURI(); -// SimpleURIMapper mapper = new SimpleURIMapper(ontologyURI, physicalURI); -// manager.addURIMapper(mapper); -// OWLOntology ontology; -// try { -// ontology = manager.createOntology(ontologyURI); -// // OWLAPIReasoner.fillOWLAPIOntology(manager,ontology,kb); -// OWLAPIAxiomConvertVisitor.fillOWLOntology(manager, ontology, kb); -// manager.saveOntology(ontology); -// } catch (OWLOntologyCreationException e) { -// e.printStackTrace(); -// } catch (UnknownOWLOntologyException e) { -// e.printStackTrace(); -// } catch (OWLOntologyStorageException e) { -// e.printStackTrace(); -// } } - public URL getURL() { - return configurator.getUrl(); + public String getUrl() { + return url; } @Override public KB toKB() { return kb; } - + + + public void setUrl(String url) { + this.url = url; + } } Modified: trunk/components-core/src/main/java/org/dllearner/kb/OWLAPIOntology.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/OWLAPIOntology.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/kb/OWLAPIOntology.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -39,7 +39,7 @@ public class OWLAPIOntology extends AbstractKnowledgeSource { private OWLAPIOntologyConfigurator configurator; - @Override + public OWLAPIOntologyConfigurator getConfigurator(){ return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/kb/OWLFile.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/OWLFile.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/kb/OWLFile.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -47,20 +47,23 @@ private static Logger logger = Logger .getLogger(OWLFile.class); + // TODO: turn this into a config option + private URL url; + // private URL url; - private OWLFileConfigurator configurator ; - @Override - public OWLFileConfigurator getConfigurator(){ - return configurator; - } +// private OWLFileConfigurator configurator ; +// @Override +// public OWLFileConfigurator getConfigurator(){ +// return configurator; +// } public static String getName() { return "OWL file"; } - public OWLFile(){ - configurator = new OWLFileConfigurator(this); - } +// public OWLFile(){ +// configurator = new OWLFileConfigurator(this); +// } public static Collection<ConfigOption<?>> createConfigOptions() { @@ -84,7 +87,7 @@ */ @Override public void init() throws ComponentInitException { - if(configurator.getUrl() == null) { + if(url == null) { logger.error("Cannot initialise OWL file with empty URL"); } @@ -107,15 +110,16 @@ @Override public String toDIG(URI kbURI) { // TODO: need some handling for cases where the URL was not set - return OWLAPIDIGConverter.getTellsString(configurator.getUrl(), OntologyFormat.RDF_XML, kbURI); + return OWLAPIDIGConverter.getTellsString(url, OntologyFormat.RDF_XML, kbURI); } public URL getURL() { - return configurator.getUrl(); +// return configurator.getUrl(); + return url; } public void setURL(URL url) { -// this.url = url; - configurator.setUrl(url); + this.url = url; +// configurator.setUrl(url); } /* (non-Javadoc) Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlKnowledgeSource.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -84,7 +84,6 @@ /** * @return the configurator for this Knowledgesource */ - @Override public SparqlKnowledgeSourceConfigurator getConfigurator() { return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/learningproblems/ClassLearningProblem.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/learningproblems/ClassLearningProblem.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/learningproblems/ClassLearningProblem.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -85,7 +85,6 @@ private HeuristicType heuristic = HeuristicType.AMEASURE; - @Override public ClassLearningProblemConfigurator getConfigurator(){ return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/learningproblems/PosNegLPStandard.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/learningproblems/PosNegLPStandard.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/learningproblems/PosNegLPStandard.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -65,7 +65,6 @@ private HeuristicType heuristic = HeuristicType.PRED_ACC; - @Override public PosNegLPStandardConfigurator getConfigurator() { return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/learningproblems/PosNegLPStrict.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/learningproblems/PosNegLPStrict.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/learningproblems/PosNegLPStrict.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -53,7 +53,7 @@ private double errorPenalty = defaultErrorPenalty; private PosNegLPStrictConfigurator configurator; - @Override + public PosNegLPStrictConfigurator getConfigurator(){ return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/learningproblems/PosOnlyLP.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/learningproblems/PosOnlyLP.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/learningproblems/PosOnlyLP.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -58,7 +58,6 @@ // private PosNegLPStandard definitionLP; private PosOnlyLPConfigurator configurator; - @Override public PosOnlyLPConfigurator getConfigurator(){ return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/learningproblems/fuzzydll/FuzzyPosNegLPStandard.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -76,7 +76,6 @@ private int errorIndex = 0; - @Override public FuzzyPosNegLPStandardConfigurator getConfigurator() { return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/DIGReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/DIGReasoner.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/DIGReasoner.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -78,7 +78,7 @@ public class DIGReasoner extends AbstractReasonerComponent { private DIGReasonerConfigurator configurator; - @Override + public DIGReasonerConfigurator getConfigurator(){ return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -104,7 +104,6 @@ private FastInstanceCheckerConfigurator configurator; - @Override public FastInstanceCheckerConfigurator getConfigurator() { return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/FastRetrievalReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/FastRetrievalReasoner.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/FastRetrievalReasoner.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -51,7 +51,7 @@ public class FastRetrievalReasoner extends AbstractReasonerComponent { private FastRetrievalReasonerConfigurator configurator; - @Override + public FastRetrievalReasonerConfigurator getConfigurator(){ return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -66,7 +66,6 @@ //private String reasonerType = "pellet"; private OWLAPIReasonerConfigurator configurator; - @Override public OWLAPIReasonerConfigurator getConfigurator() { return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/PelletReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/PelletReasoner.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/PelletReasoner.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -423,7 +423,6 @@ classifier.dispose(); } - @Override public PelletReasonerConfigurator getConfigurator() { return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/ProtegeReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/ProtegeReasoner.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/ProtegeReasoner.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -195,7 +195,6 @@ reasoner.dispose(); } - @Override public ProtegeReasonerConfigurator getConfigurator() { return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -139,7 +139,7 @@ // private String reasonerType = "pellet"; private FuzzyOWLAPIReasonerConfigurator configurator; - @Override + public FuzzyOWLAPIReasonerConfigurator getConfigurator(){ return configurator; } Modified: trunk/components-core/src/main/java/org/dllearner/utilities/components/ReasonerComponentFactory.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/components/ReasonerComponentFactory.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/main/java/org/dllearner/utilities/components/ReasonerComponentFactory.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -54,7 +54,7 @@ // knowledge source OWLFile ks = cm.knowledgeSource(OWLFile.class); URL fileURL = new File(ontologyFile).toURI().toURL(); - ks.getConfigurator().setUrl(fileURL); + ks.setURL(fileURL); ks.init(); // reasoner component Modified: trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -101,7 +101,7 @@ ComponentManager cm = ComponentManager.getInstance(); OWLFile ks = cm.knowledgeSource(OWLFile.class); - ks.getConfigurator().setUrl(new URL("file:///Users/josue/Documents/PhD/AKSW/ontologies/foodItems_v1.owl")); + ks.setURL(new URL("file:///Users/josue/Documents/PhD/AKSW/ontologies/foodItems_v1.owl")); ks.init(); //ReasonerComponent rc = cm.reasoner(OWLAPIReasoner.class, ks); Modified: trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -126,7 +126,7 @@ ComponentManager cm = ComponentManager.getInstance(); OWLFile ks = cm.knowledgeSource(OWLFile.class); - ks.getConfigurator().setUrl(new URL("file", null, "../examples/fuzzydll/fuzzyTrains_v5.0.owl")); + ks.setURL(new URL("file", null, "../examples/fuzzydll/fuzzyTrains_v5.0.owl")); ks.init(); // ReasonerComponent rc = cm.reasoner(OWLAPIReasoner.class, ks); Modified: trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains_noFuzzyIndividuals.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains_noFuzzyIndividuals.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains_noFuzzyIndividuals.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -99,7 +99,7 @@ ComponentManager cm = ComponentManager.getInstance(); OWLFile ks = cm.knowledgeSource(OWLFile.class); - ks.getConfigurator().setUrl(new URL("file", null, "../examples/fuzzydll/fuzzyTrains_v2.1a.owl")); + ks.setURL(new URL("file", null, "../examples/fuzzydll/fuzzyTrains_v2.1a.owl")); ks.init(); // ReasonerComponent rc = cm.reasoner(OWLAPIReasoner.class, ks); Modified: trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_noFuzzyTrains.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_noFuzzyTrains.java 2011-08-23 14:48:02 UTC (rev 3101) +++ trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_noFuzzyTrains.java 2011-08-23 16:05:46 UTC (rev 3102) @@ -110,7 +110,7 @@ OWLFile ks = cm.knowledgeSource(OWLFile.class); // ks.getConfigurator().setUrl(new URL("file:///Users/josue/Documents/PhD/AKSW/ontologies/fuzzyTrains/fuzzyTrains_v1.0.owl")); - ks.getConfigurator().setUrl(new URL("file", null, "../examples/fuzzydll/noFuzzyTrains_v1.5.owl")); + ks.setURL(new URL("file", null, "../examples/fuzzydll/noFuzzyTrains_v1.5.owl")); ks.init(); AbstractReasonerComponent rc = cm.reasoner(OWLAPIReasoner.class, ks); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2011-08-30 15:17:11
|
Revision: 3171 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3171&view=rev Author: jenslehmann Date: 2011-08-30 15:17:02 +0000 (Tue, 30 Aug 2011) Log Message: ----------- moved fuzzy CELOE to new architecture Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyOEHeuristicRuntime.java trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest.java trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains.java trunk/components-core/src/test/java/org/dllearner/test/FuzzyDLLTest_Trains_noFuzzyIndividuals.java Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/refinementoperators/FuzzyRhoDRDown.java Removed Paths: ------------- trunk/components-core/src/main/java/org/dllearner/refinementoperators/fuzzydll/ Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java 2011-08-30 12:13:54 UTC (rev 3170) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java 2011-08-30 15:17:02 UTC (rev 3171) @@ -35,6 +35,7 @@ import java.util.TreeSet; import org.apache.log4j.Logger; +import org.dllearner.algorithms.celoe.OEHeuristicRuntime; import org.dllearner.core.AbstractCELA; import org.dllearner.core.ComponentInitException; import org.dllearner.core.EvaluatedDescription; @@ -60,10 +61,10 @@ import org.dllearner.learningproblems.PosOnlyLP; import org.dllearner.learningproblems.fuzzydll.FuzzyPosNegLP; import org.dllearner.learningproblems.fuzzydll.FuzzyPosNegLPStandard; +import org.dllearner.refinementoperators.FuzzyRhoDRDown; import org.dllearner.refinementoperators.OperatorInverter; import org.dllearner.refinementoperators.RefinementOperator; import org.dllearner.refinementoperators.RhoDRDown; -import org.dllearner.refinementoperators.fuzzydll.FuzzyRhoDRDown; import org.dllearner.utilities.Files; import org.dllearner.utilities.Helper; import org.dllearner.utilities.owl.ConceptComparator; @@ -71,6 +72,7 @@ import org.dllearner.utilities.owl.DescriptionMinimizer; import org.dllearner.utilities.owl.EvaluatedDescriptionSet; import org.dllearner.utilities.owl.PropertyContext; +import org.springframework.beans.factory.annotation.Autowired; import com.jamonapi.Monitor; import com.jamonapi.MonitorFactory; @@ -86,7 +88,7 @@ public class FuzzyCELOE extends AbstractCELA implements FuzzyClassExpressionLearningAlgorithm { private static Logger logger = Logger.getLogger(FuzzyCELOE.class); - private FuzzyCELOEConfigurator configurator; +// private FuzzyCELOEConfigurator configurator; private boolean isRunning = false; private boolean stop = false; @@ -130,7 +132,6 @@ // important parameters private double noise; - private double maxDepth; private boolean filterFollowsFromKB; // less important parameters @@ -154,13 +155,47 @@ // private PrintWriter out; // private long start = 0; - public FuzzyCELOEConfigurator getConfigurator() { - return configurator; + // TODO: turn those into config options + + // important: do not initialise those with empty sets + // null = no settings for allowance / ignorance + // empty set = allow / ignore nothing (it is often not desired to allow no class!) + Set<NamedClass> allowedConcepts = null; + Set<NamedClass> ignoredConcepts = null; + + private boolean writeSearchTree = false; + + private String searchTreeFile = "log/searchTree.txt"; + + private int maxNrOfResults = 10; + + private double noisePercentage = 0.0; + + private boolean filterDescriptionsFollowingFromKB = false; + + private boolean reuseExistingDescription = false; + + private boolean replaceSearchTree = false; + + private int maxClassDescriptionTests = 0; + + private int maxExecutionTimeInSeconds = 100; + + private boolean terminateOnNoiseReached = false; + + private double maxDepth = 7; + + public FuzzyCELOE() { + } +// public FuzzyCELOEConfigurator getConfigurator() { +// return configurator; +// } + public FuzzyCELOE(AbstractLearningProblem problem, AbstractReasonerComponent reasoner) { super(problem, reasoner); - configurator = new FuzzyCELOEConfigurator(this); +// configurator = new FuzzyCELOEConfigurator(this); } public static Collection<Class<? extends AbstractLearningProblem>> supportedLearningProblems() { @@ -206,68 +241,72 @@ @Override public void init() throws ComponentInitException { - // TODO: remove, just for testing purposes -// FileWriter fstream; -// try { -// fstream = new FileWriter("../examples/fuzzydll/kk.log"); -// out = new PrintWriter(fstream); -// } catch (IOException e) { -// // TODO Auto-generated catch block -// e.printStackTrace(); -// } + // compute used concepts/roles from allowed/ignored + // concepts/roles + Set<NamedClass> usedConcepts; +// Set<NamedClass> allowedConcepts = configurator.getAllowedConcepts()==null ? null : CommonConfigMappings.getAtomicConceptSet(configurator.getAllowedConcepts()); +// Set<NamedClass> ignoredConcepts = configurator.getIgnoredConcepts()==null ? null : CommonConfigMappings.getAtomicConceptSet(configurator.getIgnoredConcepts()); + if(allowedConcepts != null) { + // sanity check to control if no non-existing concepts are in the list + Helper.checkConcepts(reasoner, allowedConcepts); + usedConcepts = allowedConcepts; + } else if(ignoredConcepts != null) { + usedConcepts = Helper.computeConceptsUsingIgnoreList(reasoner, ignoredConcepts); + } else { + usedConcepts = Helper.computeConcepts(reasoner); + } // copy class hierarchy and modify it such that each class is only // reachable via a single path - ClassHierarchy classHierarchy = reasoner.getClassHierarchy().clone(); +// ClassHierarchy classHierarchy = reasoner.getClassHierarchy().clone(); + ClassHierarchy classHierarchy = reasoner.getClassHierarchy().cloneAndRestrict(usedConcepts); classHierarchy.thinOutSubsumptionHierarchy(); + + // if no one injected a heuristic, we use a default one + if(heuristic == null) { + heuristic = new FuzzyOEHeuristicRuntime(); + } - heuristic = new FuzzyOEHeuristicRuntime(configurator); - minimizer = new DescriptionMinimizer(reasoner); startClass = Thing.instance; - singleSuggestionMode = configurator.getSingleSuggestionMode(); +// singleSuggestionMode = configurator.getSingleSuggestionMode(); - // TODO: 1. turn those into instance variables / fields 2. provide getters/setters; - // 3. annotate them with @ConfigOption => this all needs to be done in FuzzyRhoDRDown, - // not in this class - boolean useExistsConstructor = true; - int valueFrequencyThreshold = 2; - boolean useCardinalityRestrictions = false; - int cardinalityLimit = 1; - boolean useHasValueConstructor = false; - boolean useNegation = true; - boolean useStringDatatypes = false; - boolean useBooleanDatatypes = false; - boolean useDoubleDatatypes = false; - boolean instanceBasedDisjoints = true; - boolean applyAllFilter = true; - boolean applyExistsFilter = true; - boolean useAllConstructor = true; - // create refinement operator - operator = new FuzzyRhoDRDown(reasoner, classHierarchy, cardinalityLimit, useHasValueConstructor, useStringDatatypes, instanceBasedDisjoints, applyAllFilter, applyExistsFilter, useAllConstructor, - useExistsConstructor, valueFrequencyThreshold, useCardinalityRestrictions, useNegation, useBooleanDatatypes, useDoubleDatatypes, (NamedClass) startClass); + if(operator == null) { + operator = new RhoDRDown(); + ((RhoDRDown)operator).setStartClass(startClass); + ((RhoDRDown)operator).setSubHierarchy(classHierarchy); + ((RhoDRDown)operator).setReasoner(reasoner); + ((RhoDRDown)operator).init(); + } +// operator = new RhoDRDown(reasoner, classHierarchy, startClass, configurator); baseURI = reasoner.getBaseURI(); prefixes = reasoner.getPrefixes(); - if(configurator.getWriteSearchTree()) { - Files.clearFile(new File(configurator.getSearchTreeFile())); + if(writeSearchTree) { + File f = new File(searchTreeFile ); + Files.clearFile(f); } - bestEvaluatedDescriptions = new EvaluatedDescriptionSet(configurator.getMaxNrOfResults()); + bestEvaluatedDescriptions = new EvaluatedDescriptionSet(maxNrOfResults); isClassLearningProblem = (learningProblem instanceof ClassLearningProblem); // we put important parameters in class variables - noise = configurator.getNoisePercentage()/100d; + noise = noisePercentage/100d; // System.out.println("noise " + noise); - maxDepth = configurator.getMaxDepth(); +// maxDepth = configurator.getMaxDepth(); // (filterFollowsFromKB is automatically set to false if the problem // is not a class learning problem - filterFollowsFromKB = configurator.getFilterDescriptionsFollowingFromKB() - && isClassLearningProblem; + filterFollowsFromKB = filterDescriptionsFollowingFromKB && isClassLearningProblem; +// Set<Description> concepts = operator.refine(Thing.instance, 5); +// for(Description concept : concepts) { +// System.out.println(concept); +// } +// System.out.println("refinements of thing: " + concepts.size()); + // actions specific to ontology engineering if(isClassLearningProblem) { ClassLearningProblem problem = (ClassLearningProblem) learningProblem; @@ -281,7 +320,7 @@ // superfluous to add super classes in this case) if(isEquivalenceProblem) { Set<Description> existingDefinitions = reasoner.getAssertedDefinitions(classToDescribe); - if(configurator.getReuseExistingDescription() && (existingDefinitions.size() > 0)) { + if(reuseExistingDescription && (existingDefinitions.size() > 0)) { // the existing definition is reused, which in the simplest case means to // use it as a start class or, if it is already too specific, generalise it @@ -344,7 +383,7 @@ "sensible to learn a description in this case."); } } - } + } } else if(learningProblem instanceof PosOnlyLP) { examples = ((PosOnlyLP)learningProblem).getPositiveExamples(); // changed by Josue @@ -457,7 +496,7 @@ updateMinMaxHorizExp(nextNode); // writing the search tree (if configured) - if (configurator.getWriteSearchTree()) { + if (writeSearchTree) { String treeString = "best node: " + bestEvaluatedDescriptions.getBest() + "\n"; if (refinements.size() > 1) { treeString += "all expanded nodes:\n"; @@ -468,10 +507,10 @@ treeString += startNode.toTreeString(baseURI); treeString += "\n"; - if (configurator.getReplaceSearchTree()) - Files.createFile(new File(configurator.getSearchTreeFile()), treeString); + if (replaceSearchTree) + Files.createFile(new File(searchTreeFile), treeString); else - Files.appendFile(new File(configurator.getSearchTreeFile()), treeString); + Files.appendFile(new File(searchTreeFile), treeString); } // System.out.println(loop); @@ -758,9 +797,9 @@ private boolean terminationCriteriaSatisfied() { return stop || - (configurator.getMaxClassDescriptionTests() != 0 && (expressionTests >= configurator.getMaxClassDescriptionTests())) || - (configurator.getMaxExecutionTimeInSeconds() != 0 && ((System.nanoTime() - nanoStartTime) >= (configurator.getMaxExecutionTimeInSeconds()*1000000000l))) || - (configurator.getTerminateOnNoiseReached() && (100*getCurrentlyBestAccuracy()>100-configurator.getNoisePercentage())); + (maxClassDescriptionTests != 0 && (expressionTests >= maxClassDescriptionTests)) || + (maxExecutionTimeInSeconds != 0 && ((System.nanoTime() - nanoStartTime) >= (maxExecutionTimeInSeconds*1000000000l))) || + (terminateOnNoiseReached && (100*getCurrentlyBestAccuracy()>=100-noisePercentage)); } private void reset() { @@ -860,13 +899,6 @@ public int getMinimumHorizontalExpansion() { return minHorizExp; } - - /** - * @return the expressionTests - */ - public int getClassExpressionTests() { - return expressionTests; - } // added by Josue (when implementing FuzzyClassExpressionLearningAlgorithm) @@ -881,5 +913,128 @@ int nrOfDescriptions) { // TODO Auto-generated method stub return null; + } + + + /** + * @return the expressionTests + */ + public int getClassExpressionTests() { + return expressionTests; + } + + public RefinementOperator getOperator() { + return operator; + } + + @Autowired(required=false) + public void setOperator(RefinementOperator operator) { + this.operator = operator; + } + + public Description getStartClass() { + return startClass; + } + + public void setStartClass(Description startClass) { + this.startClass = startClass; + } + + public Set<NamedClass> getAllowedConcepts() { + return allowedConcepts; + } + + public void setAllowedConcepts(Set<NamedClass> allowedConcepts) { + this.allowedConcepts = allowedConcepts; + } + + public Set<NamedClass> getIgnoredConcepts() { + return ignoredConcepts; + } + + public void setIgnoredConcepts(Set<NamedClass> ignoredConcepts) { + this.ignoredConcepts = ignoredConcepts; + } + + public boolean isWriteSearchTree() { + return writeSearchTree; + } + + public void setWriteSearchTree(boolean writeSearchTree) { + this.writeSearchTree = writeSearchTree; + } + + public String getSearchTreeFile() { + return searchTreeFile; + } + + public void setSearchTreeFile(String searchTreeFile) { + this.searchTreeFile = searchTreeFile; + } + + public int getMaxNrOfResults() { + return maxNrOfResults; + } + + public void setMaxNrOfResults(int maxNrOfResults) { + this.maxNrOfResults = maxNrOfResults; + } + + public double getNoisePercentage() { + return noisePercentage; + } + + public void setNoisePercentage(double noisePercentage) { + this.noisePercentage = noisePercentage; + } + + public boolean isFilterDescriptionsFollowingFromKB() { + return filterDescriptionsFollowingFromKB; + } + + public void setFilterDescriptionsFollowingFromKB(boolean filterDescriptionsFollowingFromKB) { + this.filterDescriptionsFollowingFromKB = filterDescriptionsFollowingFromKB; + } + + public boolean isReplaceSearchTree() { + return replaceSearchTree; + } + + public void setReplaceSearchTree(boolean replaceSearchTree) { + this.replaceSearchTree = replaceSearchTree; + } + + public int getMaxClassDescriptionTests() { + return maxClassDescriptionTests; + } + + public void setMaxClassDescriptionTests(int maxClassDescriptionTests) { + this.maxClassDescriptionTests = maxClassDescriptionTests; + } + + public int getMaxExecutionTimeInSeconds() { + return maxExecutionTimeInSeconds; + } + + public void setMaxExecutionTimeInSeconds(int maxExecutionTimeInSeconds) { + this.maxExecutionTimeInSeconds = maxExecutionTimeInSeconds; + } + + public boolean isTerminateOnNoiseReached() { + return terminateOnNoiseReached; + } + + public void setTerminateOnNoiseReached(boolean terminateOnNoiseReached) { + this.terminateOnNoiseReached = terminateOnNoiseReached; + } + + public boolean isReuseExistingDescription() { + return reuseExistingDescription; + } + + public void setReuseExistingDescription(boolean reuseExistingDescription) { + this.reuseExistingDescription = reuseExistingDescription; } + + } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyOEHeuristicRuntime.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyOEHeuristicRuntime.java 2011-08-30 12:13:54 UTC (rev 3170) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyOEHeuristicRuntime.java 2011-08-30 15:17:02 UTC (rev 3171) @@ -44,8 +44,8 @@ // syntactic comparison as final comparison criterion private ConceptComparator conceptComparator = new ConceptComparator(); - public FuzzyOEHeuristicRuntime(FuzzyCELOEConfigurator configurator) { - expansionPenaltyFactor = configurator.getExpansionPenaltyFactor(); + public FuzzyOEHeuristicRuntime() { +// expansionPenaltyFactor = configurator.getExpansionPenaltyFactor(); } @Override @@ -81,7 +81,29 @@ return score; } + public double getExpansionPenaltyFactor() { return expansionPenaltyFactor; + } + + public double getGainBonusFactor() { + return gainBonusFactor; + } + + public void setGainBonusFactor(double gainBonusFactor) { + this.gainBonusFactor = gainBonusFactor; + } + + public double getNodeRefinementPenalty() { + return nodeRefinementPenalty; + } + + public void setNodeRefinementPenalty(double nodeRefinementPenalty) { + this.nodeRefinementPenalty = nodeRefinementPenalty; + } + + public void setExpansionPenaltyFactor(double expansionPenaltyFactor) { + this.expansionPenaltyFactor = expansionPenaltyFactor; } + } Copied: trunk/components-core/src/main/java/org/dllearner/refinementoperators/FuzzyRhoDRDown.java (from rev 3167, trunk/components-core/src/main/java/org/dllearner/refinementoperators/fuzzydll/FuzzyRhoDRDown.java) =================================================================== --- trunk/components-core/src/main/java/org/dllearner/refinementoperators/FuzzyRhoDRDown.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/refinementoperators/FuzzyRhoDRDown.java 2011-08-30 15:17:02 UTC (rev 3171) @@ -0,0 +1,1673 @@ +/** + * Copyright (C) 2007-2011, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package org.dllearner.refinementoperators; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeMap; +import java.util.TreeSet; + +import org.apache.log4j.Logger; +import org.dllearner.core.AbstractReasonerComponent; +import org.dllearner.core.config.BooleanEditor; +import org.dllearner.core.config.ConfigOption; +import org.dllearner.core.options.CommonConfigOptions; +import org.dllearner.core.owl.BooleanValueRestriction; +import org.dllearner.core.owl.ClassHierarchy; +import org.dllearner.core.owl.Constant; +import org.dllearner.core.owl.DataRange; +import org.dllearner.core.owl.DatatypeProperty; +import org.dllearner.core.owl.DatatypeSomeRestriction; +import org.dllearner.core.owl.Description; +import org.dllearner.core.owl.DoubleMaxValue; +import org.dllearner.core.owl.DoubleMinValue; +import org.dllearner.core.owl.Individual; +import org.dllearner.core.owl.Intersection; +import org.dllearner.core.owl.NamedClass; +import org.dllearner.core.owl.Negation; +import org.dllearner.core.owl.Nothing; +import org.dllearner.core.owl.ObjectAllRestriction; +import org.dllearner.core.owl.ObjectCardinalityRestriction; +import org.dllearner.core.owl.ObjectMaxCardinalityRestriction; +import org.dllearner.core.owl.ObjectMinCardinalityRestriction; +import org.dllearner.core.owl.ObjectProperty; +import org.dllearner.core.owl.ObjectPropertyExpression; +import org.dllearner.core.owl.ObjectQuantorRestriction; +import org.dllearner.core.owl.ObjectSomeRestriction; +import org.dllearner.core.owl.ObjectValueRestriction; +import org.dllearner.core.owl.StringValueRestriction; +import org.dllearner.core.owl.Thing; +import org.dllearner.core.owl.Union; +import org.dllearner.core.owl.fuzzydll.FuzzyIndividual; +import org.dllearner.utilities.Helper; +import org.dllearner.utilities.owl.ConceptComparator; +import org.dllearner.utilities.owl.ConceptTransformation; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * A downward refinement operator, which makes use of domains + * and ranges of properties. The operator is currently under + * development. Its aim is to span a much "cleaner" and smaller search + * tree compared to RhoDown by omitting many class descriptions, + * which are obviously too weak, because they violate + * domain/range restrictions. Furthermore, it makes use of disjoint + * classes in the knowledge base. + * + * TODO Some of the code has moved to {@link Utility} in a modified + * form to make it accessible for implementations of other refinement + * operators. These utility methods may be completed and carefully + * integrated back later. + * + * @author Jens Lehmann + * + */ +public class FuzzyRhoDRDown extends RefinementOperatorAdapter { + + private static Logger logger = Logger + .getLogger(FuzzyRhoDRDown.class); + + private AbstractReasonerComponent reasoner; + + // hierarchies + private ClassHierarchy subHierarchy; + + // domains and ranges + private Map<ObjectProperty,Description> opDomains = new TreeMap<ObjectProperty,Description>(); + private Map<DatatypeProperty,Description> dpDomains = new TreeMap<DatatypeProperty,Description>(); + private Map<ObjectProperty,Description> opRanges = new TreeMap<ObjectProperty,Description>(); + + // maximum number of fillers for eeach role + private Map<ObjectProperty,Integer> maxNrOfFillers = new TreeMap<ObjectProperty,Integer>(); + // limit for cardinality restrictions (this makes sense if we e.g. have compounds with up to + // more than 200 atoms but we are only interested in atoms with certain characteristics and do + // not want something like e.g. >= 204 hasAtom.NOT Carbon-87; which blows up the search space + private int cardinalityLimit = 5; + + // start concept (can be used to start from an arbitrary concept, needs + // to be Thing or NamedClass), note that when you use e.g. Compound as + // start class, then the algorithm should start the search with class + // Compound (and not with Thing), because otherwise concepts like + // NOT Carbon-87 will be returned which itself is not a subclass of Compound + private Description startClass = new Thing(); + + // the length of concepts of top refinements, the first values is + // for refinements of \rho_\top(\top), the second one for \rho_A(\top) + private int topRefinementsLength = 0; + private Map<NamedClass, Integer> topARefinementsLength = new TreeMap<NamedClass, Integer>(); + // M is finite and this value is the maximum length of any value in M + private static int mMaxLength = 4; + + // the sets M_\top and M_A + private Map<Integer,SortedSet<Description>> m = new TreeMap<Integer,SortedSet<Description>>(); + private Map<NamedClass,Map<Integer,SortedSet<Description>>> mA = new TreeMap<NamedClass,Map<Integer,SortedSet<Description>>>(); + + // @see MathOperations.getCombos + private Map<Integer, List<List<Integer>>> combos = new HashMap<Integer, List<List<Integer>>>(); + + // refinements of the top concept ordered by length + private Map<Integer, SortedSet<Description>> topRefinements = new TreeMap<Integer, SortedSet<Description>>(); + private Map<NamedClass,Map<Integer, SortedSet<Description>>> topARefinements = new TreeMap<NamedClass,Map<Integer, SortedSet<Description>>>(); + + // cumulated refinements of top (all from length one to the specified length) + private Map<Integer, TreeSet<Description>> topRefinementsCumulative = new HashMap<Integer, TreeSet<Description>>(); + private Map<NamedClass,Map<Integer, TreeSet<Description>>> topARefinementsCumulative = new TreeMap<NamedClass,Map<Integer, TreeSet<Description>>>(); + + // app_A set of applicable properties for a given class (separate for + // object properties, boolean datatypes, and double datatypes) + private Map<NamedClass, Set<ObjectProperty>> appOP = new TreeMap<NamedClass, Set<ObjectProperty>>(); + private Map<NamedClass, Set<DatatypeProperty>> appBD = new TreeMap<NamedClass, Set<DatatypeProperty>>(); + private Map<NamedClass, Set<DatatypeProperty>> appDD = new TreeMap<NamedClass, Set<DatatypeProperty>>(); + + // most general applicable properties + private Map<NamedClass,Set<ObjectProperty>> mgr = new TreeMap<NamedClass,Set<ObjectProperty>>(); + private Map<NamedClass,Set<DatatypeProperty>> mgbd = new TreeMap<NamedClass,Set<DatatypeProperty>>(); + private Map<NamedClass,Set<DatatypeProperty>> mgdd = new TreeMap<NamedClass,Set<DatatypeProperty>>(); + private Map<NamedClass,Set<DatatypeProperty>> mgsd = new TreeMap<NamedClass,Set<DatatypeProperty>>(); + + // concept comparator + private ConceptComparator conceptComparator = new ConceptComparator(); + + // splits for double datatype properties in ascening order + private Map<DatatypeProperty,List<Double>> splits = new TreeMap<DatatypeProperty,List<Double>>(); + private int maxNrOfSplits = 10; + + // data structure for a simple frequent pattern matching preprocessing phase + private int frequencyThreshold = CommonConfigOptions.valueFrequencyThresholdDefault; + private Map<ObjectProperty, Map<Individual, Integer>> valueFrequency = new HashMap<ObjectProperty, Map<Individual, Integer>>(); + // data structure with identified frequent values + private Map<ObjectProperty, Set<Individual>> frequentValues = new HashMap<ObjectProperty, Set<Individual>>(); + // frequent data values + private Map<DatatypeProperty, Set<Constant>> frequentDataValues = new HashMap<DatatypeProperty, Set<Constant>>(); + private Map<DatatypeProperty, Map<Constant, Integer>> dataValueFrequency = new HashMap<DatatypeProperty, Map<Constant, Integer>>(); + private boolean useDataHasValueConstructor = false; + + // staistics + public long mComputationTimeNs = 0; + public long topComputationTimeNs = 0; + + @ConfigOption(name = "applyAllFilter", defaultValue="true", propertyEditorClass = BooleanEditor.class) + private boolean applyAllFilter = true; + + @ConfigOption(name = "applyExistsFilter", defaultValue="true", propertyEditorClass = BooleanEditor.class) + private boolean applyExistsFilter = true; + + @ConfigOption(name = "useAllConstructor", defaultValue="true", propertyEditorClass = BooleanEditor.class) + private boolean useAllConstructor = true; + + @ConfigOption(name = "useExistsConstructor", defaultValue="true", propertyEditorClass = BooleanEditor.class) + private boolean useExistsConstructor = true; + + @ConfigOption(name = "useHasValueConstructor", defaultValue="false", propertyEditorClass = BooleanEditor.class) + private boolean useHasValueConstructor = false; + + @ConfigOption(name = "useCardinalityRestrictions", defaultValue="true", propertyEditorClass = BooleanEditor.class) + private boolean useCardinalityRestrictions = true; + + @ConfigOption(name = "useNegation", defaultValue="true", propertyEditorClass = BooleanEditor.class) + private boolean useNegation = true; + + @ConfigOption(name = "useBooleanDatatypes", defaultValue="true", propertyEditorClass = BooleanEditor.class) + private boolean useBooleanDatatypes = true; + + @ConfigOption(name = "useDoubleDatatypes", defaultValue="true", propertyEditorClass = BooleanEditor.class) + private boolean useDoubleDatatypes = true; + + @ConfigOption(name = "useStringDatatypes", defaultValue="false", propertyEditorClass = BooleanEditor.class) + private boolean useStringDatatypes = false; + + @ConfigOption(name = "disjointChecks", defaultValue="true", propertyEditorClass = BooleanEditor.class) + private boolean disjointChecks = true; + + @ConfigOption(name = "instanceBasedDisjoints", defaultValue="true", propertyEditorClass = BooleanEditor.class) + private boolean instanceBasedDisjoints = true; + + @ConfigOption(name = "dropDisjuncts", defaultValue="false", propertyEditorClass = BooleanEditor.class) + private boolean dropDisjuncts = false; + + // caches for reasoner queries + private Map<Description,Map<Description,Boolean>> cachedDisjoints = new TreeMap<Description,Map<Description,Boolean>>(conceptComparator); + +// private Map<NamedClass,Map<NamedClass,Boolean>> abDisjoint = new TreeMap<NamedClass,Map<NamedClass,Boolean>>(); +// private Map<NamedClass,Map<NamedClass,Boolean>> notABDisjoint = new TreeMap<NamedClass,Map<NamedClass,Boolean>>(); +// private Map<NamedClass,Map<NamedClass,Boolean>> notABMeaningful = new TreeMap<NamedClass,Map<NamedClass,Boolean>>(); + + public FuzzyRhoDRDown(AbstractReasonerComponent reasoningService) { +// this(reasoningService, reasoningService.getClassHierarchy(), null, true, true, true, true, true, 3, true, true, true, true, null); + this.reasoner = reasoningService; + this.subHierarchy = reasoner.getClassHierarchy(); + init(); + } + +// public FuzzyRhoDRDown(AbstractReasonerComponent reasoner, ClassHierarchy subHierarchy, Description startClass, RefinementOperatorConfigurator configurator) { +// this.rs = reasoner; +// this.subHierarchy = subHierarchy; +// this.startClass = startClass; +// useAllConstructor = configurator.getUseAllConstructor(); +// useExistsConstructor = configurator.getUseExistsConstructor(); +// useHasValueConstructor = configurator.getUseHasValueConstructor(); +// setUseDataHasValueConstructor(configurator.getUseDataHasValueConstructor()); +// frequencyThreshold = configurator.getValueFrequencyThreshold(); +// useCardinalityRestrictions = configurator.getUseCardinalityRestrictions(); +// cardinalityLimit = configurator.getCardinalityLimit(); +// useNegation = configurator.getUseNegation(); +// useBooleanDatatypes = configurator.getUseBooleanDatatypes(); +// useDoubleDatatypes = configurator.getUseDoubleDatatypes(); +// useStringDatatypes = configurator.getUseStringDatatypes(); +// init(); +// } +// + + // the goal is to use the configurator system while still being flexible enough to + // use one refinement operator in several learning algorithms + public FuzzyRhoDRDown(AbstractReasonerComponent reasoningService, ClassHierarchy subHierarchy, int cardinalityLimit, boolean useHasValueConstructor, boolean useStringDatatypes, boolean instanceBasedDisjoints, boolean applyAllFilter, boolean applyExistsFilter, boolean useAllConstructor, + boolean useExistsConstructor, int valueFrequencyThreshold, boolean useCardinalityRestrictions,boolean useNegation, boolean useBooleanDatatypes, boolean useDoubleDatatypes, NamedClass startClass) { + this.reasoner = reasoningService; + this.subHierarchy = subHierarchy; + this.applyAllFilter = applyAllFilter; + this.applyExistsFilter = applyExistsFilter; + this.useAllConstructor = useAllConstructor; + this.useExistsConstructor = useExistsConstructor; + this.useHasValueConstructor = useHasValueConstructor; + this.frequencyThreshold = valueFrequencyThreshold; + this.useCardinalityRestrictions = useCardinalityRestrictions; + this.cardinalityLimit = cardinalityLimit; + this.useNegation = useNegation; + this.useBooleanDatatypes = useBooleanDatatypes; + this.useDoubleDatatypes = useDoubleDatatypes; + this.useStringDatatypes = useStringDatatypes; + this.instanceBasedDisjoints = instanceBasedDisjoints; + if(startClass != null) { + this.startClass = startClass; + } + init(); + } + +// subHierarchy = rs.getClassHierarchy(); + public void init() { + // query reasoner for domains and ranges + // (because they are used often in the operator) + for(ObjectProperty op : reasoner.getObjectProperties()) { + opDomains.put(op, reasoner.getDomain(op)); + opRanges.put(op, reasoner.getRange(op)); + + if(useHasValueConstructor) { + // init + Map<Individual, Integer> opMap = new TreeMap<Individual, Integer>(); + valueFrequency.put(op, opMap); + + // sets ordered by corresponding individual (which we ignore) + Collection<SortedSet<Individual>> fillerSets = reasoner.getPropertyMembers(op).values(); + for(SortedSet<Individual> fillerSet : fillerSets) { + for(Individual i : fillerSet) { +// System.out.println("op " + op + " i " + i); + Integer value = opMap.get(i); + + if(value != null) { + opMap.put(i, value+1); + } else { + opMap.put(i, 1); + } + } + } + + // keep only frequent patterns + Set<Individual> frequentInds = new TreeSet<Individual>(); + for(Individual i : opMap.keySet()) { + if(opMap.get(i) >= frequencyThreshold) { + frequentInds.add(i); +// break; + } + } + frequentValues.put(op, frequentInds); + + } + + } + + for(DatatypeProperty dp : reasoner.getDatatypeProperties()) { + dpDomains.put(dp, reasoner.getDomain(dp)); + + if(useDataHasValueConstructor) { + Map<Constant, Integer> dpMap = new TreeMap<Constant, Integer>(); + dataValueFrequency.put(dp, dpMap); + + // sets ordered by corresponding individual (which we ignore) + Collection<SortedSet<Constant>> fillerSets = reasoner.getDatatypeMembers(dp).values(); + for(SortedSet<Constant> fillerSet : fillerSets) { + for(Constant i : fillerSet) { +// System.out.println("op " + op + " i " + i); + Integer value = dpMap.get(i); + + if(value != null) { + dpMap.put(i, value+1); + } else { + dpMap.put(i, 1); + } + } + } + + // keep only frequent patterns + Set<Constant> frequentInds = new TreeSet<Constant>(); + for(Constant i : dpMap.keySet()) { + if(dpMap.get(i) >= frequencyThreshold) { + logger.trace("adding value "+i+", because "+dpMap.get(i) +">="+frequencyThreshold); + frequentInds.add(i); + } + } + frequentDataValues.put(dp, frequentInds); + } + } + + // we do not need the temporary set anymore and let the + // garbage collector take care of it + valueFrequency = null; + dataValueFrequency = null; + + // compute splits for double datatype properties + for(DatatypeProperty dp : reasoner.getDoubleDatatypeProperties()) { + computeSplits(dp); + } + + // determine the maximum number of fillers for each role + // (up to a specified cardinality maximum) + if(useCardinalityRestrictions) { + for(ObjectProperty op : reasoner.getObjectProperties()) { + int maxFillers = 0; + Map<Individual,SortedSet<Individual>> opMembers = reasoner.getPropertyMembers(op); + for(SortedSet<Individual> inds : opMembers.values()) { + if(inds.size()>maxFillers) + maxFillers = inds.size(); + if(maxFillers >= cardinalityLimit) { + maxFillers = cardinalityLimit; + break; + } + } + maxNrOfFillers.put(op, maxFillers); + } + } + + /* + String conceptStr = "(\"http://dl-learner.org/carcinogenesis#Compound\" AND (>= 2 \"http://dl-learner.org/carcinogenesis#hasStructure\".\"http://dl-learner.org/carcinogenesis#Ar_halide\" OR ((\"http://dl-learner.org/carcinogenesis#amesTestPositive\" IS TRUE) AND >= 5 \"http://dl-learner.org/carcinogenesis#hasBond\". TOP)))"; + try { + NamedClass struc = new NamedClass("http://dl-learner.org/carcinogenesis#Compound"); + Description d = KBParser.parseConcept(conceptStr); + SortedSet<Description> ds = (SortedSet<Description>) refine(d,15,null,struc); + System.out.println(ds); + + Individual i = new Individual("http://dl-learner.org/carcinogenesis#d101"); + rs.instanceCheck(ds.first(), i); + + } catch (ParseException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + System.exit(0); + */ + + /* + NamedClass struc = new NamedClass("http://dl-learner.org/carcinogenesis#Atom"); + ObjectProperty op = new ObjectProperty("http://dl-learner.org/carcinogenesis#hasAtom"); + ObjectSomeRestriction oar = new ObjectSomeRestriction(op,Thing.instance); + + Set<Description> ds = refine(Thing.instance,3,null,struc); +// Set<Description> improper = new HashSet<Description>(); + for(Description d : ds) { +// if(rs.subsumes(d, struc)) { +// improper.add(d); + System.out.println(d); +// } + } + System.out.println(ds.size()); +// System.out.println(improper.size()); + System.exit(0); + */ + + } + + /* (non-Javadoc) + * @see org.dllearner.algorithms.refinement.RefinementOperator#refine(org.dllearner.core.owl.Description) + */ + @Override + public Set<Description> refine(Description concept) { + throw new RuntimeException(); + } + + @Override + public Set<Description> refine(Description description, int maxLength) { + // check that maxLength is valid + if(maxLength < description.getLength()) { + throw new Error("length has to be at least description length (description: " + description + ", max length: " + maxLength + ")"); + } + return refine(description, maxLength, null, startClass); + } + + /* (non-Javadoc) + * @see org.dllearner.algorithms.refinement.RefinementOperator#refine(org.dllearner.core.owl.Description, int, java.util.List) + */ + @Override + public Set<Description> refine(Description description, int maxLength, + List<Description> knownRefinements) { + return refine(description, maxLength, knownRefinements, startClass); + } + + @SuppressWarnings({"unchecked"}) + public Set<Description> refine(Description description, int maxLength, + List<Description> knownRefinements, Description currDomain) { + +// System.out.println("|- " + description + " " + currDomain + " " + maxLength); + + // actions needing to be performed if this is the first time the + // current domain is used + if(!(currDomain instanceof Thing) && !topARefinementsLength.containsKey(currDomain)) + topARefinementsLength.put((NamedClass)currDomain, 0); + + // check whether using list or set makes more sense + // here; and whether HashSet or TreeSet should be used + // => TreeSet because duplicates are possible + Set<Description> refinements = new TreeSet<Description>(conceptComparator); + + // used as temporary variable + Set<Description> tmp = new HashSet<Description>(); + + if(description instanceof Thing) { + // extends top refinements if necessary + if(currDomain instanceof Thing) { + if(maxLength>topRefinementsLength) + computeTopRefinements(maxLength); + refinements = (TreeSet<Description>) topRefinementsCumulative.get(maxLength).clone(); + } else { + if(maxLength>topARefinementsLength.get(currDomain)) { + computeTopRefinements(maxLength, (NamedClass) currDomain); + } + refinements = (TreeSet<Description>) topARefinementsCumulative.get(currDomain).get(maxLength).clone(); + } +// refinements.addAll(subHierarchy.getMoreSpecialConcepts(description)); + } else if(description instanceof Nothing) { + // cannot be further refined + } else if(description instanceof NamedClass) { + refinements.addAll(subHierarchy.getSubClasses(description)); + refinements.remove(new Nothing()); + } else if (description instanceof Negation && description.getChild(0) instanceof NamedClass) { + + tmp = subHierarchy.getSuperClasses(description.getChild(0)); + + for(Description c : tmp) { + if(!(c instanceof Thing)) + refinements.add(new Negation(c)); + } + + } else if (description instanceof Intersection) { + + // refine one of the elements + for(Description child : description.getChildren()) { + + // refine the child; the new max length is the current max length minus + // the currently considered concept plus the length of the child + // TODO: add better explanation + tmp = refine(child, maxLength - description.getLength()+child.getLength(),null,currDomain); + + // create new intersection + for(Description c : tmp) { + List<Description> newChildren = (List<Description>)((LinkedList<Description>)description.getChildren()).clone(); + newChildren.add(c); + newChildren.remove(child); + Intersection mc = new Intersection(newChildren); + + // clean concept and transform it to ordered negation normal form + // (non-recursive variant because only depth 1 was modified) + ConceptTransformation.cleanConceptNonRecursive(mc); + ConceptTransformation.transformToOrderedNegationNormalFormNonRecursive(mc, conceptComparator); + + // check whether the intersection is OK (sanity checks), then add it + if(checkIntersection(mc)) + refinements.add(mc); + } + + } + + } else if (description instanceof Union) { + // refine one of the elements + for(Description child : description.getChildren()) { + +// System.out.println("union child: " + child + " " + maxLength + " " + description.getLength() + " " + child.getLength()); + + // refine child + tmp = refine(child, maxLength - description.getLength()+child.getLength(),null,currDomain); + + // construct intersection (see above) + for(Description c : tmp) { + List<Description> newChildren = new LinkedList<Description>(description.getChildren()); + newChildren.remove(child); + newChildren.add(c); + Union md = new Union(newChildren); + + // transform to ordered negation normal form + ConceptTransformation.transformToOrderedNegationNormalFormNonRecursive(md, conceptComparator); + // note that we do not have to call clean here because a disjunction will + // never be nested in another disjunction in this operator + + refinements.add(md); + } + + } + + // if enabled, we can remove elements of the disjunction + if(dropDisjuncts) { + // A1 OR A2 => {A1,A2} + if(description.getChildren().size() == 2) { + refinements.add(description.getChild(0)); + refinements.add(description.getChild(1)); + } else { + // copy children list and remove a different element in each turn + for(int i=0; i<description.getChildren().size(); i++) { + List<Description> newChildren = new LinkedList<Description>(description.getChildren()); + newChildren.remove(i); + Union md = new Union(newChildren); + refinements.add(md); + } + } + } + + } else if (description instanceof ObjectSomeRestriction) { + ObjectPropertyExpression role = ((ObjectQuantorRestriction)description).getRole(); + Description range = opRanges.get(role); + + // rule 1: EXISTS r.D => EXISTS r.E + tmp = refine(description.getChild(0), maxLength-2, null, range); + + for(Description c : tmp) + refinements.add(new ObjectSomeRestriction(((ObjectQuantorRestriction)description).getRole(),c)); + + // rule 2: EXISTS r.D => EXISTS s.D or EXISTS r^-1.D => EXISTS s^-1.D + // currently inverse roles are not supported + ObjectProperty ar = (ObjectProperty) role; + Set<ObjectProperty> moreSpecialRoles = reasoner.getSubProperties(ar); + for(ObjectProperty moreSpecialRole : moreSpecialRoles) + refinements.add(new ObjectSomeRestriction(moreSpecialRole, description.getChild(0))); + + // rule 3: EXISTS r.D => >= 2 r.D + // (length increases by 1 so we have to check whether max length is sufficient) + if(useCardinalityRestrictions) { + if(maxLength > description.getLength() && maxNrOfFillers.get(ar)>1) { + ObjectMinCardinalityRestriction min = new ObjectMinCardinalityRestriction(2,role,description.getChild(0)); + refinements.add(min); + } + } + + // rule 4: EXISTS r.TOP => EXISTS r.{value} + if(useHasValueConstructor && description.getChild(0) instanceof Thing) { + // watch out for frequent patterns + Set<Individual> frequentInds = frequentValues.get(role); + if(frequentInds != null) { + for(Individual ind : frequentInds) { + ObjectValueRestriction ovr = new ObjectValueRestriction((ObjectProperty)role, ind); + refinements.add(ovr); + } + } + } + + } else if (description instanceof ObjectAllRestriction) { + ObjectPropertyExpression role = ((ObjectQuantorRestriction)description).getRole(); + Description range = opRanges.get(role); + + // rule 1: ALL r.D => ALL r.E + tmp = refine(description.getChild(0), maxLength-2, null, range); + + for(Description c : tmp) { + refinements.add(new ObjectAllRestriction(((ObjectQuantorRestriction)description).getRole(),c)); + } + + // rule 2: ALL r.D => ALL r.BOTTOM if D is a most specific atomic concept + if(description.getChild(0) instanceof NamedClass && tmp.size()==0) { + refinements.add(new ObjectAllRestriction(((ObjectQuantorRestriction)description).getRole(),new Nothing())); + } + + // rule 3: ALL r.D => ALL s.D or ALL r^-1.D => ALL s^-1.D + // currently inverse roles are not supported + ObjectProperty ar = (ObjectProperty) role; + Set<ObjectProperty> moreSpecialRoles = reasoner.getSubProperties(ar); + for(ObjectProperty moreSpecialRole : moreSpecialRoles) { + refinements.add(new ObjectAllRestriction(moreSpecialRole, description.getChild(0))); + } + + // rule 4: ALL r.D => <= (maxFillers-1) r.D + // (length increases by 1 so we have to check whether max length is sufficient) + // => commented out because this is acutally not a downward refinement +// if(useCardinalityRestrictions) { +// if(maxLength > description.getLength() && maxNrOfFillers.get(ar)>1) { +// ObjectMaxCardinalityRestriction max = new ObjectMaxCardinalityRestriction(maxNrOfFillers.get(ar)-1,role,description.getChild(0)); +// refinements.add(max); +// } +// } + } else if (description instanceof ObjectCardinalityRestriction) { + ObjectPropertyExpression role = ((ObjectCardinalityRestriction)description).getRole(); + Description range = opRanges.get(role); + int number = ((ObjectCardinalityRestriction)description).getCardinality(); + if(description instanceof ObjectMaxCardinalityRestriction) { + // rule 1: <= x r.C => <= x r.D + tmp = refine(description.getChild(0), maxLength-3, null, range); + + for(Description d : tmp) { + refinements.add(new ObjectMaxCardinalityRestriction(number,role,d)); + } + + // rule 2: <= x r.C => <= (x-1) r.C + ObjectMaxCardinalityRestriction max = (ObjectMaxCardinalityRestriction) description; +// int number = max.getNumber(); + if(number > 1) + refinements.add(new ObjectMaxCardinalityRestriction(number-1,max.getRole(),max.getChild(0))); + + } else if(description instanceof ObjectMinCardinalityRestriction) { + tmp = refine(description.getChild(0), maxLength-3, null, range); + + for(Description d : tmp) { + refinements.add(new ObjectMinCardinalityRestriction(number,role,d)); + } + + // >= x r.C => >= (x+1) r.C + ObjectMinCardinalityRestriction min = (ObjectMinCardinalityRestriction) description; +// int number = min.getNumber(); + if(number < maxNrOfFillers.get(min.getRole())) + refinements.add(new ObjectMinCardinalityRestriction(number+1,min.getRole(),min.getChild(0))); + } + } else if (description instanceof DatatypeSomeRestriction) { + + DatatypeSomeRestriction dsr = (DatatypeSomeRestriction) description; + DatatypeProperty dp = (DatatypeProperty) dsr.getRestrictedPropertyExpression(); + DataRange dr = dsr.getDataRange(); + if(dr instanceof DoubleMaxValue) { + double value = ((DoubleMaxValue)dr).getValue(); + // find out which split value was used + int splitIndex = splits.get(dp).lastIndexOf(value); + if(splitIndex == -1) + throw new Error("split error"); + int newSplitIndex = splitIndex - 1; + if(newSplitIndex >= 0) { + DoubleMaxValue max = new DoubleMaxValue(splits.get(dp).get(newSplitIndex)); + DatatypeSomeRestriction newDSR = new DatatypeSomeRestriction(dp,max); + refinements.add(newDSR); +// System.out.println(description + " => " + newDSR); + } + } else if(dr instanceof DoubleMinValue) { + double value = ((DoubleMinValue)dr).getValue(); + // find out which split value was used + int splitIndex = splits.get(dp).lastIndexOf(value); + if(splitIndex == -1) + throw new Error("split error"); + int newSplitIndex = splitIndex + 1; + if(newSplitIndex < splits.get(dp).size()) { + DoubleMinValue min = new DoubleMinValue(splits.get(dp).get(newSplitIndex)); + DatatypeSomeRestriction newDSR = new DatatypeSomeRestriction(dp,min); + refinements.add(newDSR); + } + } + } else if (description instanceof StringValueRestriction) { + StringValueRestriction svr = (StringValueRestriction) description; + DatatypeProperty dp = svr.getRestrictedPropertyExpression(); + Set<DatatypeProperty> subDPs = reasoner.getSubProperties(dp); + for(DatatypeProperty subDP : subDPs) { + refinements.add(new StringValueRestriction(subDP, svr.getStringValue())); + } + } + + // if a refinement is not Bottom, Top, ALL r.Bottom a refinement of top can be appended + if(!(description instanceof Thing) && !(description instanceof Nothing) + && !(description instanceof ObjectAllRestriction && description.getChild(0) instanceof Nothing)) { + // -1 because of the AND symbol which is appended + int topRefLength = maxLength - description.getLength() - 1; + + // maybe we have to compute new top refinements here + if(currDomain instanceof Thing) { + if(topRefLength > topRefinementsLength) + computeTopRefinements(topRefLength); + } else if(topRefLength > topARefinementsLength.get(currDomain)) + computeTopRefinements(topRefLength,(NamedClass)currDomain); + + if(topRefLength>0) { + Set<Description> topRefs; + if(currDomain instanceof Thing) + topRefs = topRefinementsCumulative.get(topRefLength); + else + topRefs = topARefinementsCumulative.get(currDomain).get(topRefLength); + + for(Description c : topRefs) { + // true if refinement should be skipped due to filters, + // false otherwise + boolean skip = false; + + // if a refinement of of the form ALL r, we check whether ALL r + // does not occur already + if(applyAllFilter) { + if(c instanceof ObjectAllRestriction) { + for(Description child : description.getChildren()) { + if(child instanceof ObjectAllRestriction) { + ObjectPropertyExpression r1 = ((ObjectAllRestriction)c).getRole(); + ObjectPropertyExpression r2 = ((ObjectAllRestriction)child).getRole(); + if(r1.toString().equals(r2.toString())) + skip = true; + } + } + } + } + + // check for double datatype properties + /* + if(c instanceof DatatypeSomeRestriction && + description instanceof DatatypeSomeRestriction) { + DataRange dr = ((DatatypeSomeRestriction)c).getDataRange(); + DataRange dr2 = ((DatatypeSomeRestriction)description).getDataRange(); + // it does not make sense to have statements like height >= 1.8 AND height >= 1.7 + if((dr instanceof DoubleMaxValue && dr2 instanceof DoubleMaxValue) + ||(dr instanceof DoubleMinValue && dr2 instanceof DoubleMinValue)) + skip = true; + }*/ + + // perform a disjointness check when named classes are added; + // this can avoid a lot of superfluous computation in the algorithm e.g. + // when A1 looks good, so many refinements of the form (A1 OR (A2 AND A3)) + // are generated which are all equal to A1 due to disjointness of A2 and A3 + if(disjointChecks && c instanceof NamedClass && description instanceof NamedClass && isDisjoint(description, c)) { + skip = true; +// System.out.println(c + " ignored when refining " + description); + } + + if(!skip) { + Intersection mc = new Intersection(); + mc.addChild(description); + mc.addChild(c); + + // clean and transform to ordered negation normal form + ConceptTransformation.cleanConceptNonRecursive(mc); + ConceptTransformation.transformToOrderedNegationNormalFormNonRecursive(mc, conceptComparator); + + // last check before intersection is added + if(checkIntersection(mc)) + refinements.add(mc); + } + } + } + } + +// for(Description refinement : refinements) { +// if((refinement instanceof Intersection || refinement instanceof Union) && refinement.getChildren().size()<2) { +// System.out.println(description + " " + refinement + " " + currDomain + " " + maxLength); +// System.exit(0); +// } +// } + + return refinements; + } + + // when a child of an intersection is refined and reintegrated into the + // intersection, we can perform some sanity checks; + // method returns true if everything is OK and false otherwise + // TODO: can be implemented more efficiently if the newly added child + // is given as parameter + public static boolean checkIntersection(Intersection intersection) { + // rule 1: max. restrictions at most once + boolean maxDoubleOccurence = false; + // rule 2: min restrictions at most once + boolean minDoubleOccurence = false; + // rule 3: no double occurences of boolean datatypes + TreeSet<DatatypeProperty> occuredDP = new TreeSet<DatatypeProperty>(); + // rule 4: no double occurences of hasValue restrictions + TreeSet<ObjectProperty> occuredVR = new TreeSet<ObjectProperty>(); + + for(Description child : intersection.getChildren()) { + if(child instanceof DatatypeSomeRestriction) { + DataRange dr = ((DatatypeSomeRestriction)child).getDataRange(); + if(dr instanceof DoubleMaxValue) { + if(maxDoubleOccurence) + return false; + else + maxDoubleOccurence = true; + } else if(dr instanceof DoubleMinValue) { + if(minDoubleOccurence) + return false; + else + minDoubleOccurence = true; + } + } else if(child instanceof BooleanValueRestriction) { + DatatypeProperty dp = (DatatypeProperty) ((BooleanValueRestriction)child).getRestrictedPropertyExpression(); +// System.out.println("dp: " + dp); + // return false if the boolean property exists already + if(!occuredDP.add(dp)) + return false; + } else if(child instanceof ObjectValueRestriction) { + ObjectProperty op = (ObjectProperty) ((ObjectValueRestriction)child).getRestrictedPropertyExpression(); + if(!occuredVR.add(op)) + return false; + } +// System.out.println(child.getClass()); + } + return true; + } + + /** + * By default, the operator does not specialize e.g. (A or B) to A, because + * it only guarantees weak completeness. Under certain circumstances, e.g. + * refinement of a fixed given concept, it can be useful to allow such + * refinements, which can be done by passing the parameter true to this method. + * @param dropDisjuncts Whether to remove disjuncts in refinement process. + */ + public void setDropDisjuncts(boolean dropDisjuncts) { + this.dropDisjuncts = dropDisjuncts; + } + + private void computeTopRefinements(int maxLength) { + computeTopRefinements(maxLength, null); + } + + private void computeTopRefinements(int maxLength, NamedClass domain) { + long topComputationTimeStartNs = System.nanoTime(); + + if(domain == null && m.size() == 0) + computeM(); + + if(domain != null && !mA.containsKey(domain)) + computeM(domain); + + int refinementsLength; + + if(domain == null) { + refinementsLength = topRefinementsLength; + } else { + if(!topARefinementsLength.containsKey(domain)) + topARefinementsLength.put(domain,0); + + refinementsLength = topARefinementsLength.get(domain); + } + + // compute all possible combinations of the disjunction + for(int i = refinementsLength+1; i <= maxLength; i++) { + combos.put(i,MathOperations.getCombos(i, mMaxLength)); + + // initialise the refinements with empty sets + if(domain == null) { + topRefinements.put(i, new TreeSet<Description>(conceptComparator)); + } else { + if(!topARefinements.containsKey(domain)) + topARefinements.put(domain, new TreeMap<Integer,SortedSet<Description>>()); + topARefinements.get(domain).put(i, new TreeSet<Description>(conceptComparator)); + } + + for(List<Integer> combo : combos.get(i)) { + + // combination is a single number => try to use M + if(combo.size()==1) { + // note we cannot use "put" instead of "addAll" because there + // can be several combos for one length + if(domain == null) + topRefinements.get(i).addAll(m.get(i)); + else + topARefinements.get(domain).get(i).addAll(mA.get(domain).get(i)); + // combinations has several numbers => generate disjunct + } else { + + // check whether the combination makes sense, i.e. whether + // all lengths mentioned in it have corresponding elements + // e.g. when negation is deactivated there won't be elements of + // length 2 in M + boolean validCombo = true; + for(Integer j : combo) { + if((domain == null && m.get(j).size()==0) || + (domain != null && mA.get(domain).get(j).size()==0)) + validCombo = false; + } + + if(validCombo) { + + SortedSet<Union> baseSet = new TreeSet<Union>(conceptComparator); + for(Integer j : combo) { + if(domain == null) + baseSet = MathOperations.incCrossProduct(baseSet, m.get(j)); + else + baseSet = MathOperations.incCrossProduct(baseSet, mA.get(domain).get(j)); + } + + // convert all concepts in ordered negation normal form + for(Description concept : baseSet) { + ConceptTransformation.transformToOrderedForm(concept, conceptComparator); + } + + // apply the exists filter (throwing out all refinements with + // double \exists r for any r) + // TODO: similar filtering can be done for boolean datatype + // properties + if(applyExistsFilter) { + Iterator<Union> it = baseSet.iterator(); + while(it.hasNext()) { + if(MathOperations.containsDoubleObjectSomeRestriction(it.next())) + it.remove(); + } + } + + // add computed refinements + if(domain == null) + topRefinements.get(i).addAll(baseSet); + else + topARefinements.get(domain).get(i).addAll(baseSet); + + } + } + } + + // create cumulative versions of refinements such that they can + // be accessed easily + TreeSet<Description> cumulativeRefinements = new TreeSet<Description>(conceptComparator); + for(int j=1; j<=i; j++) { + if(domain == null) { + cumulativeRefinements.addAll(topRefinements.get(j)); + } else { + cumulativeRefinements.addAll(topARefinements.get(domain).get(j)); + } + } + + if(domain == null) { + topRefinementsCumulative.put(i, cumulativeRefinements); + } else { + if(!topARefinementsCumulative.containsKey(domain)) + topARefinementsCumulative.put(domain, new TreeMap<Integer, TreeSet<Description>>()); + topARefinementsCumulative.get(domain).put(i, cumulativeRefinements); + } + } + + // register new top refinements length + if(domain == null) + topRefinementsLength = maxLength; + else + topARefinementsLength.put(domain,maxLength); + + topComputationTimeNs += System.nanoTime() - topComputationTimeStartNs; + } + + // compute M_\top + private void computeM() { + long mComputationTimeStartNs = System.nanoTime(); + + // initialise all possible lengths (1 to 3) + for(int i=1; i<=mMaxLength; i++) { + m.put(i, new TreeSet<Description>(conceptComparator)); + } + + SortedSet<Description> m1 = subHierarchy.getSubClasses(new Thing()); + m.put(1,m1); + + SortedSet<Description> m2 = new TreeSet<Description>(conceptComparator); + if(useNegation) { + Set<Description> m2tmp = subHierarchy.getSuperClasses(new Nothing()); + for(Description c : m2tmp) { + if(!(c instanceof Thing)) { + m2.add(new Negation(c)); + } + } + } + + // boolean datatypes, e.g. testPositive = true + if(useBooleanDatatypes) { + Set<DatatypeProperty> booleanDPs = reasoner.getBooleanDatatypeProperties(); + for(DatatypeProperty dp : booleanDPs) { + m2.add(new BooleanValueRestriction(dp,true)); + m2.add(new BooleanValueRestriction(dp,false)); + } + } + m.put(2,m2); + + SortedSet<Description> m3 = new TreeSet<Description>(conceptComparator); + if(useExistsConstructor) { + // only uses most general roles + for(ObjectProperty r : reasoner.getMostGeneralProperties()) { + m3.add(new ObjectSomeRestriction(r, new Thing())); + } + } + + if(useAllConstructor) { + // we allow \forall r.\top here because otherwise the operator + // becomes too difficult to manage due to dependencies between + // M_A and M_A' where A'=ran(r) + for(ObjectProperty r : reasoner.getMostGeneralProperties()) { + m... [truncated message content] |
From: <lor...@us...> - 2011-09-21 21:49:17
|
Revision: 3281 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3281&view=rev Author: lorenz_b Date: 2011-09-21 21:49:10 +0000 (Wed, 21 Sep 2011) Log Message: ----------- Made some small changes for enrichment in ORE. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/core/owl/ClassHierarchy.java trunk/components-core/src/main/java/org/dllearner/core/owl/DisjointClassesAxiom.java trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLAPIAxiomConvertVisitor.java Added Paths: ----------- trunk/components-core/src/test/java/org/dllearner/test/junit/DisjointClassesLearningTest.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java 2011-09-20 09:55:53 UTC (rev 3280) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java 2011-09-21 21:49:10 UTC (rev 3281) @@ -21,11 +21,11 @@ import java.util.ArrayList; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; +import java.util.SortedSet; import java.util.TreeSet; import org.dllearner.core.AbstractAxiomLearningAlgorithm; @@ -186,9 +186,9 @@ @Override public List<EvaluatedAxiom> getCurrentlyBestEvaluatedAxioms(int nrOfAxioms) { List<EvaluatedAxiom> axioms = new ArrayList<EvaluatedAxiom>(); - Set<Description> descriptions; + List<Description> descriptions; for(EvaluatedDescription ed : getCurrentlyBestEvaluatedDescriptions(nrOfAxioms)){ - descriptions = new HashSet<Description>(); + descriptions = new ArrayList<Description>(); descriptions.add(classToDescribe); descriptions.add(ed.getDescription()); axioms.add(new EvaluatedAxiom(new DisjointClassesAxiom(descriptions), new AxiomScore(ed.getAccuracy()))); @@ -209,6 +209,9 @@ EvaluatedDescription evalDesc; //firstly, create disjoint classexpressions which not occur and give score of 1 + if(reasoner.isPrepared()){ + SortedSet<Description> mostGeneralClasses = reasoner.getClassHierarchy().getMostGeneralClasses(); + } for(NamedClass cls : completeDisjointclasses){ evalDesc = new EvaluatedDescription(cls, new AxiomScore(1)); evalDescs.add(evalDesc); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java 2011-09-20 09:55:53 UTC (rev 3280) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java 2011-09-21 21:49:10 UTC (rev 3281) @@ -208,7 +208,9 @@ SPARQLReasoner reasoner = new SPARQLReasoner(ks); reasoner.prepareSubsumptionHierarchy(); + System.out.println(reasoner.getClassHierarchy().getSubClasses(Thing.instance).size()); + ObjectPropertyDomainAxiomLearner l = new ObjectPropertyDomainAxiomLearner(ks); l.setReasoner(reasoner); l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/author")); Modified: trunk/components-core/src/main/java/org/dllearner/core/owl/ClassHierarchy.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/owl/ClassHierarchy.java 2011-09-20 09:55:53 UTC (rev 3280) +++ trunk/components-core/src/main/java/org/dllearner/core/owl/ClassHierarchy.java 2011-09-21 21:49:10 UTC (rev 3281) @@ -356,4 +356,21 @@ } return depth; } + + public SortedSet<Description> getMostGeneralClasses(){ + SortedSet<Description> generalClasses = new TreeSet<Description>(conceptComparator); + boolean add = false; + SortedSet<Description> superClasses; + for(Description sub : getSubClasses(Thing.instance)){ + superClasses = getSuperClasses(sub); + superClasses = new TreeSet<Description>(conceptComparator);superClasses.remove(Thing.instance); + if(superClasses.isEmpty()){ + add = true; + } + if(add){ + generalClasses.add(sub); + } + } + return generalClasses; + } } Modified: trunk/components-core/src/main/java/org/dllearner/core/owl/DisjointClassesAxiom.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/owl/DisjointClassesAxiom.java 2011-09-20 09:55:53 UTC (rev 3280) +++ trunk/components-core/src/main/java/org/dllearner/core/owl/DisjointClassesAxiom.java 2011-09-21 21:49:10 UTC (rev 3281) @@ -19,6 +19,7 @@ package org.dllearner.core.owl; +import java.util.Collection; import java.util.Iterator; import java.util.Map; import java.util.Set; @@ -33,9 +34,9 @@ * */ private static final long serialVersionUID = 7788863077013583508L; - private Set<Description> descriptions; + private Collection<Description> descriptions; - public DisjointClassesAxiom(Set<Description> descriptions) { + public DisjointClassesAxiom(Collection<Description> descriptions) { this.descriptions = descriptions; } @@ -94,7 +95,7 @@ /** * @return the descriptions */ - public Set<Description> getDescriptions() { + public Collection<Description> getDescriptions() { return descriptions; } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2011-09-20 09:55:53 UTC (rev 3280) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2011-09-21 21:49:10 UTC (rev 3281) @@ -106,13 +106,13 @@ // parents/children of top ... SortedSet<Description> tmp = getSubClasses(Thing.instance); - subsumptionHierarchyUp.put(Thing.instance, new TreeSet<Description>()); + subsumptionHierarchyUp.put(Thing.instance, new TreeSet<Description>(conceptComparator)); subsumptionHierarchyDown.put(Thing.instance, tmp); // ... bottom ... tmp = getSuperClasses(Nothing.instance); subsumptionHierarchyUp.put(Nothing.instance, tmp); - subsumptionHierarchyDown.put(Nothing.instance, new TreeSet<Description>()); + subsumptionHierarchyDown.put(Nothing.instance, new TreeSet<Description>(conceptComparator)); // ... and named classes Set<NamedClass> atomicConcepts = new SPARQLTasks(ks.getEndpoint()).getAllClasses(); @@ -813,6 +813,10 @@ return hierarchy != null; } + public void setCache(ExtractionDBCache cache) { + this.cache = cache; + } + private boolean executeAskQuery(String query){ QueryEngineHTTP queryExecution = new QueryEngineHTTP(ks.getEndpoint().getURL().toString(), query); for (String dgu : ks.getEndpoint().getDefaultGraphURIs()) { Modified: trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLAPIAxiomConvertVisitor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLAPIAxiomConvertVisitor.java 2011-09-20 09:55:53 UTC (rev 3280) +++ trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLAPIAxiomConvertVisitor.java 2011-09-21 21:49:10 UTC (rev 3281) @@ -21,6 +21,7 @@ import static org.dllearner.utilities.owl.OWLAPIDescriptionConvertVisitor.getOWLClassExpression; +import java.util.Collection; import java.util.HashSet; import java.util.Set; @@ -377,7 +378,7 @@ * @see org.dllearner.core.owl.TerminologicalAxiomVisitor#visit(org.dllearner.core.owl.DisjointClassesAxiom) */ public void visit(DisjointClassesAxiom axiom) { - Set<Description> descriptions = axiom.getDescriptions(); + Collection<Description> descriptions = axiom.getDescriptions(); Set<OWLClassExpression> owlAPIDescriptions = new HashSet<OWLClassExpression>(); for(Description description : descriptions) owlAPIDescriptions.add(getOWLClassExpression(description)); Added: trunk/components-core/src/test/java/org/dllearner/test/junit/DisjointClassesLearningTest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/junit/DisjointClassesLearningTest.java (rev 0) +++ trunk/components-core/src/test/java/org/dllearner/test/junit/DisjointClassesLearningTest.java 2011-09-21 21:49:10 UTC (rev 3281) @@ -0,0 +1,55 @@ +package org.dllearner.test.junit; + +import junit.framework.TestCase; + +import org.dllearner.algorithms.DisjointClassesLearner; +import org.dllearner.core.owl.Description; +import org.dllearner.core.owl.NamedClass; +import org.dllearner.kb.SparqlEndpointKS; +import org.dllearner.kb.sparql.SparqlEndpoint; +import org.dllearner.reasoning.SPARQLReasoner; + +public class DisjointClassesLearningTest extends TestCase{ + + private SparqlEndpointKS ks; + private SPARQLReasoner reasoner; + + private static final int maxExecutionTimeInSeconds = 10; + + @Override + protected void setUp() throws Exception { + super.setUp(); + ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveAKSW()); + + reasoner = new SPARQLReasoner(ks); + reasoner.prepareSubsumptionHierarchy(); + } + + public void testLearnSingleClass(){ + DisjointClassesLearner l = new DisjointClassesLearner(ks); + l.setReasoner(reasoner); + l.setMaxExecutionTimeInSeconds(maxExecutionTimeInSeconds); + l.setClassToDescribe(new NamedClass("http://dbpedia.org/ontology/Book")); + + l.start(); + + System.out.println(l.getCurrentlyBestAxioms(5)); + } + + public void testLearnForMostGeneralClasses(){ + DisjointClassesLearner l = new DisjointClassesLearner(ks); + l.setReasoner(reasoner); + l.setMaxExecutionTimeInSeconds(maxExecutionTimeInSeconds); + + for(Description cls : reasoner.getClassHierarchy().getMostGeneralClasses()){ + l.setClassToDescribe((NamedClass)cls); + + l.start(); + + System.out.println(l.getCurrentlyBestAxioms(5)); + } + } + + + +} Property changes on: trunk/components-core/src/test/java/org/dllearner/test/junit/DisjointClassesLearningTest.java ___________________________________________________________________ Added: svn:mime-type + text/plain This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2011-11-30 16:09:30
|
Revision: 3452 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3452&view=rev Author: jenslehmann Date: 2011-11-30 16:09:23 +0000 (Wed, 30 Nov 2011) Log Message: ----------- fixed another bug Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/OCEL.java trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java trunk/components-core/src/test/java/org/dllearner/test/junit/RefinementOperatorTests.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/OCEL.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/OCEL.java 2011-11-30 14:14:54 UTC (rev 3451) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/OCEL.java 2011-11-30 16:09:23 UTC (rev 3452) @@ -354,38 +354,17 @@ // create a refinement operator and pass all configuration // variables to it -// RhoDRDown if(operator == null) { // we use a default operator and inject the class hierarchy for now operator = new RhoDRDown(); ((RhoDRDown)operator).setReasoner(reasoner); - -// operator = new RhoDRDown( -// reasoner, -// classHierarchy, -//// configurator, -// applyAllFilter, -// applyExistsFilter, -//// useAllConstructor, -// true, -//// useExistsConstructor, -// true, -// useHasValueConstructor, -// valueFrequencyThreshold, -// useCardinalityRestrictions, -// useNegation, -// useBooleanDatatypes, -// useDoubleDatatypes, -// startClass, -// cardinalityLimit, -// useStringDatatypes, -// instanceBasedDisjoints -// ); + ((RhoDRDown)operator).init(); } + // TODO: find a better solution as this is quite difficult to debug ((RhoDRDown)operator).setSubHierarchy(classHierarchy); ((RhoDRDown)operator).setObjectPropertyHierarchy(reasoner.getObjectPropertyHierarchy()); ((RhoDRDown)operator).setDataPropertyHierarchy(reasoner.getDatatypePropertyHierarchy()); - ((RhoDRDown)operator).init(); + // create an algorithm object and pass all configuration // options to it Modified: trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java 2011-11-30 14:14:54 UTC (rev 3451) +++ trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java 2011-11-30 16:09:23 UTC (rev 3452) @@ -344,5 +344,9 @@ */ public static double getVersion(Component component){ return getVersion(component.getClass()); + } + + public static boolean addComponentClassName(String e) { + return componentClassNames.add(e); } } Modified: trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java 2011-11-30 14:14:54 UTC (rev 3451) +++ trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java 2011-11-30 16:09:23 UTC (rev 3452) @@ -37,6 +37,7 @@ import org.dllearner.core.AbstractReasonerComponent; import org.dllearner.core.Component; import org.dllearner.core.ComponentAnn; +import org.dllearner.core.ComponentInitException; import org.dllearner.core.config.BooleanEditor; import org.dllearner.core.config.ConfigOption; import org.dllearner.core.options.CommonConfigOptions; @@ -224,47 +225,52 @@ // private Map<NamedClass,Map<NamedClass,Boolean>> notABDisjoint = new TreeMap<NamedClass,Map<NamedClass,Boolean>>(); // private Map<NamedClass,Map<NamedClass,Boolean>> notABMeaningful = new TreeMap<NamedClass,Map<NamedClass,Boolean>>(); + private boolean isInitialised = false; + public RhoDRDown() { } - public RhoDRDown(AbstractReasonerComponent reasoningService) { -// this(reasoningService, reasoningService.getClassHierarchy(), null, true, true, true, true, true, 3, true, true, true, true, null); - this.reasoner = reasoningService; - this.subHierarchy = reasoner.getClassHierarchy(); - init(); - } +// public RhoDRDown(AbstractReasonerComponent reasoningService) { +//// this(reasoningService, reasoningService.getClassHierarchy(), null, true, true, true, true, true, 3, true, true, true, true, null); +// this.reasoner = reasoningService; +// this.subHierarchy = reasoner.getClassHierarchy(); +// init(); +// } // TODO constructor which takes a RhoDRDownConfigurator object; // this should be an interface implemented e.g. by ExampleBasedROLComponentConfigurator; // the goal is to use the configurator system while still being flexible enough to // use one refinement operator in several learning algorithms - public RhoDRDown(AbstractReasonerComponent reasoningService, ClassHierarchy subHierarchy, boolean applyAllFilter, boolean applyExistsFilter, boolean useAllConstructor, - boolean useExistsConstructor, boolean useHasValueConstructor, int valueFrequencyThreshold, boolean useCardinalityRestrictions,boolean useNegation, boolean useBooleanDatatypes, boolean useDoubleDatatypes, NamedClass startClass, - int cardinalityLimit, boolean useStringDatatypes, boolean instanceBasedDisjoints) { - this.reasoner = reasoningService; - this.subHierarchy = subHierarchy; - this.applyAllFilter = applyAllFilter; - this.applyExistsFilter = applyExistsFilter; - this.useAllConstructor = useAllConstructor; - this.useExistsConstructor = useExistsConstructor; - this.useHasValueConstructor = useHasValueConstructor; - this.frequencyThreshold = valueFrequencyThreshold; - this.useCardinalityRestrictions = useCardinalityRestrictions; - this.cardinalityLimit = cardinalityLimit; - this.useNegation = useNegation; - this.useBooleanDatatypes = useBooleanDatatypes; - this.useDoubleDatatypes = useDoubleDatatypes; - this.useStringDatatypes = useStringDatatypes; - this.instanceBasedDisjoints = instanceBasedDisjoints; - if(startClass != null) { - this.startClass = startClass; - } - init(); - } +// public RhoDRDown(AbstractReasonerComponent reasoningService, ClassHierarchy subHierarchy, boolean applyAllFilter, boolean applyExistsFilter, boolean useAllConstructor, +// boolean useExistsConstructor, boolean useHasValueConstructor, int valueFrequencyThreshold, boolean useCardinalityRestrictions,boolean useNegation, boolean useBooleanDatatypes, boolean useDoubleDatatypes, NamedClass startClass, +// int cardinalityLimit, boolean useStringDatatypes, boolean instanceBasedDisjoints) { +// this.reasoner = reasoningService; +// this.subHierarchy = subHierarchy; +// this.applyAllFilter = applyAllFilter; +// this.applyExistsFilter = applyExistsFilter; +// this.useAllConstructor = useAllConstructor; +// this.useExistsConstructor = useExistsConstructor; +// this.useHasValueConstructor = useHasValueConstructor; +// this.frequencyThreshold = valueFrequencyThreshold; +// this.useCardinalityRestrictions = useCardinalityRestrictions; +// this.cardinalityLimit = cardinalityLimit; +// this.useNegation = useNegation; +// this.useBooleanDatatypes = useBooleanDatatypes; +// this.useDoubleDatatypes = useDoubleDatatypes; +// this.useStringDatatypes = useStringDatatypes; +// this.instanceBasedDisjoints = instanceBasedDisjoints; +// if(startClass != null) { +// this.startClass = startClass; +// } +// init(); +// } // subHierarchy = rs.getClassHierarchy(); - public void init() { + public void init() throws ComponentInitException { + if(isInitialised) { + throw new ComponentInitException("Refinement operator cannot be nitialised twice."); + } // System.out.println("subHierarchy: " + subHierarchy); // System.out.println("object properties: " + ); @@ -347,6 +353,8 @@ valueFrequency = null; dataValueFrequency = null; + System.out.println("freqDataValues: " + frequentDataValues); + // compute splits for double datatype properties for(DatatypeProperty dp : reasoner.getDoubleDatatypeProperties()) { computeSplits(dp); @@ -368,6 +376,9 @@ } maxNrOfFillers.put(op, maxFillers); } + + isInitialised = true; + } /* Modified: trunk/components-core/src/test/java/org/dllearner/test/junit/RefinementOperatorTests.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/junit/RefinementOperatorTests.java 2011-11-30 14:14:54 UTC (rev 3451) +++ trunk/components-core/src/test/java/org/dllearner/test/junit/RefinementOperatorTests.java 2011-11-30 16:09:23 UTC (rev 3452) @@ -88,7 +88,8 @@ // rs.prepareSubsumptionHierarchy(); // rs.prepareRoleHierarchy(); - RhoDRDown op = new RhoDRDown(rc); + RhoDRDown op = new RhoDRDown(); + op.setReasoner(rc); Description concept = KBParser.parseConcept(uri("Compound")); Set<Description> results = op.refine(concept, 4, null); @@ -113,7 +114,8 @@ AbstractReasonerComponent reasoner = TestOntologies.getTestOntology(TestOntology.EPC_OE); baseURI = reasoner.getBaseURI(); - RhoDRDown op = new RhoDRDown(reasoner); + RhoDRDown op = new RhoDRDown(); + op.setReasoner(reasoner); Description concept = KBParser.parseConcept("(\"http://localhost/aris/sap_model.owl#EPC\" AND EXISTS \"http://localhost/aris/sap_model.owl#hasModelElements\".\"http://localhost/aris/sap_model.owl#Object\")"); Set<Description> results = op.refine(concept,10); @@ -148,9 +150,8 @@ classHierarchy.thinOutSubsumptionHierarchy(); System.out.println(" UNIT TEST INCOMPLETE AFTER FRAMEWORK CHANGE, BECAUSE CLASS HIERARCHY IS NOT PASSED TO REFINEMENT OPERATOR "); - RhoDRDown op = new RhoDRDown( - reasoner // TODO: pass class hierarchy here - ); + RhoDRDown op = new RhoDRDown(); + op.setReasoner(reasoner); Description concept = KBParser.parseConcept("EXISTS \"http://www.test.de/test#hasPiece\".EXISTS \"http://www.test.de/test#hasLowerRankThan\".(\"http://www.test.de/test#WRook\" AND TOP)"); Set<Description> results = op.refine(concept,8); @@ -192,7 +193,8 @@ @Test public void rhoDRDownTest4() throws ParseException, LearningProblemUnsupportedException { AbstractReasonerComponent rs = TestOntologies.getTestOntology(TestOntology.RHO1); - RefinementOperator operator = new RhoDRDown(rs); + RefinementOperator operator = new RhoDRDown(); + ((RhoDRDown)operator).setReasoner(rs); Description concept = KBParser.parseConcept("(car AND EXISTS hasOwner.person)"); // Description concept = Thing.instance; Set<Description> refinements = operator.refine(concept, 6); @@ -204,7 +206,8 @@ @Test public void rhoDRDownTest5() throws ParseException, LearningProblemUnsupportedException { AbstractReasonerComponent rs = TestOntologies.getTestOntology(TestOntology.SWORE); - RefinementOperator operator = new RhoDRDown(rs); + RefinementOperator operator = new RhoDRDown(); + ((RhoDRDown)operator).setReasoner(rs); // Description concept = KBParser.parseConcept("((NOT \"http://ns.softwiki.de/req/Requirement\") OR (ALL \"http://ns.softwiki.de/req/isCreatedBy\".(NOT \"http://ns.softwiki.de/req/Creditor\")))"); Description concept = KBParser.parseConcept("(NOT \"http://ns.softwiki.de/req/Requirement\" OR ALL \"http://ns.softwiki.de/req/isCreatedBy\".NOT \"http://ns.softwiki.de/req/Creditor\")"); System.out.println(concept); @@ -217,7 +220,8 @@ @Test public void invertedOperatorTest() throws ParseException { AbstractReasonerComponent rs = TestOntologies.getTestOntology(TestOntology.RHO1); - RhoDRDown rho = new RhoDRDown(rs); + RhoDRDown rho = new RhoDRDown(); + rho.setReasoner(rs); rho.setDropDisjuncts(true); RefinementOperator operator = new OperatorInverter(rho); Description concept = KBParser.parseConcept("(limo AND EXISTS hasOwner.man)"); @@ -234,7 +238,8 @@ public void rhoDownTestPellet() { Logger.getRootLogger().setLevel(Level.TRACE); AbstractReasonerComponent rs = TestOntologies.getTestOntology(TestOntology.FATHER); - RhoDRDown rho = new RhoDRDown(rs); + RhoDRDown rho = new RhoDRDown(); + rho.setReasoner(rs); NamedClass nc = new NamedClass("http://example.com/father#male"); Set<Description> refinements = rho.refine(nc, 5); for(Description refinement : refinements) { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <sha...@us...> - 2012-03-15 02:34:14
|
Revision: 3611 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3611&view=rev Author: shadowtm Date: 2012-03-15 02:34:07 +0000 (Thu, 15 Mar 2012) Log Message: ----------- Added support of the OWLAPIOntology to implement the OWLOntologyKnowledgeSource interface to provide thread safe access to its underlying ontology. Also added a convenience class in support of this which can convert an OWL Ontology to a byte array and back again. Using this we can ensure disconnection occurs from OWL Ontology Managers. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/OWLAPIOntology.java trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java trunk/components-core/src/main/java/org/dllearner/reasoning/PelletReasoner.java trunk/components-core/src/main/java/org/dllearner/reasoning/ProtegeReasoner.java trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/utilities/owl/OntologyToByteConverter.java trunk/components-core/src/main/java/org/dllearner/utilities/owl/SimpleOntologyToByteConverter.java trunk/components-core/src/test/java/org/dllearner/kb/ trunk/components-core/src/test/java/org/dllearner/kb/OWLAPIOntologyTest.java trunk/components-core/src/test/java/org/dllearner/utilities/ trunk/components-core/src/test/java/org/dllearner/utilities/owl/ trunk/components-core/src/test/java/org/dllearner/utilities/owl/OntologyToByteConverterTest.java trunk/components-core/src/test/java/org/dllearner/utilities/owl/SimpleOntologyToByteConverterTest.java trunk/components-core/src/test/resources/org/ trunk/components-core/src/test/resources/org/dllearner/ trunk/components-core/src/test/resources/org/dllearner/kb/ trunk/components-core/src/test/resources/org/dllearner/kb/owl-api-ontology-data.owl trunk/components-core/src/test/resources/org/dllearner/utilities/ trunk/components-core/src/test/resources/org/dllearner/utilities/owl/ trunk/components-core/src/test/resources/org/dllearner/utilities/owl/byte-conversion-data.owl Modified: trunk/components-core/src/main/java/org/dllearner/kb/OWLAPIOntology.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/OWLAPIOntology.java 2012-03-14 17:27:16 UTC (rev 3610) +++ trunk/components-core/src/main/java/org/dllearner/kb/OWLAPIOntology.java 2012-03-15 02:34:07 UTC (rev 3611) @@ -21,57 +21,45 @@ import java.io.File; import java.net.URI; -import java.util.Iterator; -import java.util.Set; import org.dllearner.core.AbstractKnowledgeSource; import org.dllearner.core.OntologyFormat; import org.dllearner.core.options.ConfigEntry; import org.dllearner.core.options.InvalidConfigOptionValueException; import org.dllearner.core.owl.KB; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLDataProperty; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLObjectProperty; -import org.semanticweb.owlapi.model.OWLOntology; +import org.dllearner.utilities.owl.OntologyToByteConverter; +import org.dllearner.utilities.owl.SimpleOntologyToByteConverter; +import org.semanticweb.owlapi.model.*; -public class OWLAPIOntology extends AbstractKnowledgeSource { +/** + * This class provides a wrapper around a single OWL Ontology. However, due to threading issues it is not safe + * to allow access to ontologies created with an Ontology Manager which we do not control. + */ +public class OWLAPIOntology extends AbstractKnowledgeSource implements OWLOntologyKnowledgeSource{ - private OWLOntology ontology; - private Set<OWLOntology> ontologies; - private Set<OWLClass> classes; - private Set<OWLObjectProperty> prop; - private Set<OWLDataProperty> dataProp; - private Set<OWLNamedIndividual> individuals; + private byte[] ontologyBytes; + private OntologyToByteConverter converter = new SimpleOntologyToByteConverter(); + - public OWLAPIOntology() { - this(null); - } + public OWLAPIOntology(OWLOntology onto) { + ontologyBytes = converter.convert(onto); + } - public OWLAPIOntology(OWLOntology onto) - { - this.ontology = onto; - classes = ontology.getClassesInSignature(); - prop = ontology.getObjectPropertiesInSignature(); - dataProp = ontology.getDataPropertiesInSignature(); - individuals = ontology.getIndividualsInSignature(); - } - public static String getName() { return "OWL API Ontology"; } - - @Override + + @Override + public OWLOntology createOWLOntology(OWLOntologyManager manager) { + return converter.convert(ontologyBytes, manager); + } + + @Override public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { } - public OWLOntology getOWLOntolgy() - { - return ontology; - } - @Override public KB toKB() { @@ -95,45 +83,22 @@ { return null; } - - public void setOWLOntologies(Set<OWLOntology> onto) { - ontologies = onto; - System.out.println("ONTO: " + ontologies); - Iterator<OWLOntology> it = ontologies.iterator(); - while(it.hasNext()) { - OWLOntology ont = it.next(); - if(ont.getClassesInSignature() != null) { - classes.addAll(ont.getClassesInSignature()); - } - if(ont.getObjectPropertiesInSignature() != null) { - prop.addAll(ont.getObjectPropertiesInSignature()); - } - if(ont.getDataPropertiesInSignature() != null) { - dataProp.addAll(ont.getDataPropertiesInSignature()); - } - if(ont.getIndividualsInSignature() != null) { - individuals.addAll(ont.getIndividualsInSignature()); - } - } - } - - public Set<OWLOntology> getOWLOnntologies() { - return ontologies; - } - - public Set<OWLClass> getOWLClasses() { - return classes; - } - - public Set<OWLObjectProperty> getOWLObjectProperies() { - return prop; - } - - public Set<OWLDataProperty> getOWLDataProperies() { - return dataProp; - } - - public Set<OWLNamedIndividual> getOWLIndividuals() { - return individuals; - } + + /** + * Get the OntologyToByteConverter associated with this object. + * + * @return The OntologyToByteConverter associated with this object. + */ + public OntologyToByteConverter getConverter() { + return converter; + } + + /** + * Set the OntologyToByteConverter associated with this object. + * + * @param converter the OntologyToByteConverter to associate with this object. + */ + public void setConverter(OntologyToByteConverter converter) { + this.converter = converter; + } } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java 2012-03-14 17:27:16 UTC (rev 3610) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java 2012-03-15 02:34:07 UTC (rev 3611) @@ -161,11 +161,7 @@ if (source instanceof OWLFile || source instanceof SparqlKnowledgeSource || source instanceof SparqlSimpleExtractor || source instanceof OWLAPIOntology) { - if (source instanceof OWLAPIOntology) { - ontology = ((OWLAPIOntology) source).getOWLOntolgy(); - manager = ontology.getOWLOntologyManager(); - owlAPIOntologies.add(ontology); - } else if (source instanceof SparqlKnowledgeSource) { + if (source instanceof SparqlKnowledgeSource) { ontology = ((SparqlKnowledgeSource) source).getOWLAPIOntology(); manager = ontology.getOWLOntologyManager(); owlAPIOntologies.add(ontology); Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/PelletReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/PelletReasoner.java 2012-03-14 17:27:16 UTC (rev 3610) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/PelletReasoner.java 2012-03-15 02:34:07 UTC (rev 3611) @@ -22,7 +22,6 @@ import java.io.File; import java.net.URI; import java.net.URISyntaxException; -import java.net.URL; import java.util.Collection; import java.util.Collections; import java.util.Comparator; @@ -78,6 +77,7 @@ import org.dllearner.core.owl.UntypedConstant; import org.dllearner.kb.OWLAPIOntology; import org.dllearner.kb.OWLFile; +import org.dllearner.kb.OWLOntologyKnowledgeSource; import org.dllearner.kb.sparql.SparqlKnowledgeSource; import org.dllearner.utilities.Helper; import org.dllearner.utilities.owl.ConceptComparator; @@ -200,35 +200,30 @@ for (AbstractKnowledgeSource source : sources) { + if (source instanceof OWLOntologyKnowledgeSource) { + ontology = ((OWLOntologyKnowledgeSource) source).createOWLOntology(manager); + owlAPIOntologies.add(ontology); + } + if (source instanceof OWLFile || source instanceof SparqlKnowledgeSource || source instanceof OWLAPIOntology) { - URL url = null; - if (source instanceof OWLFile) { - url = ((OWLFile) source).getURL(); - } -// try { + if (source instanceof SparqlKnowledgeSource) { + ontology = ((SparqlKnowledgeSource) source) + .getOWLAPIOntology(); + manager = ontology.getOWLOntologyManager(); + owlAPIOntologies.add(ontology); + } - if (source instanceof OWLAPIOntology) { - ontology = ((OWLAPIOntology) source).getOWLOntolgy(); - manager = ontology.getOWLOntologyManager(); - } else if (source instanceof SparqlKnowledgeSource) { - ontology = ((SparqlKnowledgeSource) source) - .getOWLAPIOntology(); - manager = ontology.getOWLOntologyManager(); - } else { - ontology = manager.loadOntologyFromOntologyDocument(IRI.create(url)); - } - - owlAPIOntologies.add(ontology); - // imports includes the ontology itself - Set<OWLOntology> imports = manager - .getImportsClosure(ontology); - allImports.addAll(imports); - loadedOntologies.addAll(imports); - // System.out.println(imports); - classes.addAll(ontology.getClassesInSignature(true)); + // imports includes the ontology itself + Set<OWLOntology> imports = manager + .getImportsClosure(ontology); + allImports.addAll(imports); + loadedOntologies.addAll(imports); + + // System.out.println(imports); + classes.addAll(ontology.getClassesInSignature(true)); owlObjectProperties.addAll(ontology.getObjectPropertiesInSignature(true)); owlDatatypeProperties.addAll(ontology.getDataPropertiesInSignature(true)); owlIndividuals.addAll(ontology.getIndividualsInSignature(true)); @@ -474,42 +469,26 @@ for (AbstractKnowledgeSource source : sources) { + if (source instanceof OWLOntologyKnowledgeSource) { + ontology = ((OWLOntologyKnowledgeSource) source).createOWLOntology(manager); + owlAPIOntologies.add(ontology); + } if (source instanceof OWLFile || source instanceof SparqlKnowledgeSource || source instanceof OWLAPIOntology) { - URL url = null; - if (source instanceof OWLFile) { - url = ((OWLFile) source).getURL(); - } -// try { + if (source instanceof SparqlKnowledgeSource) { + ontology = ((SparqlKnowledgeSource) source).getOWLAPIOntology(); + manager = ontology.getOWLOntologyManager(); + owlAPIOntologies.add(ontology); + } - if (source instanceof OWLAPIOntology) { - ontology = ((OWLAPIOntology) source).getOWLOntolgy(); - manager = ontology.getOWLOntologyManager(); - } else if (source instanceof SparqlKnowledgeSource) { - ontology = ((SparqlKnowledgeSource) source).getOWLAPIOntology(); - manager = ontology.getOWLOntologyManager(); - } else { - try { - ontology = manager.loadOntologyFromOntologyDocument(IRI.create(url - .toURI())); - } catch (OWLOntologyCreationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (URISyntaxException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - - owlAPIOntologies.add(ontology); - // imports includes the ontology itself - Set<OWLOntology> imports = manager - .getImportsClosure(ontology); - allImports.addAll(imports); - loadedOntologies.addAll(imports); - // System.out.println(imports); + // imports includes the ontology itself + Set<OWLOntology> imports = manager + .getImportsClosure(ontology); + allImports.addAll(imports); + loadedOntologies.addAll(imports); + // System.out.println(imports); classes.addAll(ontology.getClassesInSignature(true)); owlObjectProperties.addAll(ontology.getObjectPropertiesInSignature(true)); owlDatatypeProperties.addAll(ontology.getDataPropertiesInSignature(true)); Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/ProtegeReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/ProtegeReasoner.java 2012-03-14 17:27:16 UTC (rev 3610) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/ProtegeReasoner.java 2012-03-15 02:34:07 UTC (rev 3611) @@ -20,8 +20,6 @@ package org.dllearner.reasoning; import java.io.File; -import java.net.URISyntaxException; -import java.net.URL; import java.util.Collection; import java.util.Collections; import java.util.Comparator; @@ -73,6 +71,7 @@ import org.dllearner.core.owl.UntypedConstant; import org.dllearner.kb.OWLAPIOntology; import org.dllearner.kb.OWLFile; +import org.dllearner.kb.OWLOntologyKnowledgeSource; import org.dllearner.kb.sparql.SparqlKnowledgeSource; import org.dllearner.utilities.Helper; import org.dllearner.utilities.owl.ConceptComparator; @@ -250,36 +249,22 @@ for (AbstractKnowledgeSource source : sources) { + if (source instanceof OWLOntologyKnowledgeSource) { + ontology = ((OWLOntologyKnowledgeSource) source).createOWLOntology(manager); + owlAPIOntologies.add(ontology); + } + if (source instanceof OWLFile || source instanceof SparqlKnowledgeSource || source instanceof OWLAPIOntology) { - URL url = null; - if (source instanceof OWLFile) { - url = ((OWLFile) source).getURL(); - } -// try { + if (source instanceof SparqlKnowledgeSource) { + ontology = ((SparqlKnowledgeSource) source).getOWLAPIOntology(); + manager = ontology.getOWLOntologyManager(); + owlAPIOntologies.add(ontology); + } - if (source instanceof OWLAPIOntology) { - ontology = ((OWLAPIOntology) source).getOWLOntolgy(); - } else if (source instanceof SparqlKnowledgeSource) { - ontology = ((SparqlKnowledgeSource) source).getOWLAPIOntology(); - manager = ontology.getOWLOntologyManager(); - } else { - try { - ontology = manager.loadOntologyFromOntologyDocument(IRI.create(url - .toURI())); - } catch (OWLOntologyCreationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (URISyntaxException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - - owlAPIOntologies.add(ontology); - classes.addAll(ontology.getClassesInSignature(true)); + classes.addAll(ontology.getClassesInSignature(true)); owlObjectProperties.addAll(ontology.getObjectPropertiesInSignature(true)); owlDatatypeProperties.addAll(ontology.getDataPropertiesInSignature(true)); owlIndividuals.addAll(ontology.getIndividualsInSignature(true)); Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java 2012-03-14 17:27:16 UTC (rev 3610) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java 2012-03-15 02:34:07 UTC (rev 3611) @@ -22,7 +22,6 @@ import java.io.File; import java.net.MalformedURLException; import java.net.URI; -import java.net.URISyntaxException; import java.net.URL; import java.util.Collection; import java.util.Collections; @@ -66,6 +65,7 @@ import org.dllearner.core.owl.fuzzydll.FuzzyIndividual; import org.dllearner.kb.OWLAPIOntology; import org.dllearner.kb.OWLFile; +import org.dllearner.kb.OWLOntologyKnowledgeSource; import org.dllearner.kb.sparql.SparqlKnowledgeSource; import org.dllearner.reasoning.ReasonerType; import org.dllearner.utilities.owl.ConceptComparator; @@ -74,7 +74,6 @@ import org.dllearner.utilities.owl.OWLAPIConverter; import org.dllearner.utilities.owl.OWLAPIDescriptionConvertVisitor; import org.dllearner.utilities.owl.RoleComparator; -import org.semanticweb.HermiT.Reasoner.ReasonerFactory; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.AddAxiom; import org.semanticweb.owlapi.model.IRI; @@ -246,53 +245,43 @@ prefixes = new TreeMap<String,String>(); for(AbstractKnowledgeSource source : sources) { - + + if (source instanceof OWLOntologyKnowledgeSource) { + ontology = ((OWLOntologyKnowledgeSource) source).createOWLOntology(manager); + owlAPIOntologies.add(ontology); + } + if(source instanceof OWLFile || source instanceof SparqlKnowledgeSource || source instanceof OWLAPIOntology) { - URL url=null; - if(source instanceof OWLFile){ - url = ((OWLFile)source).getURL(); - } - try { - - if(source instanceof OWLAPIOntology) { - ontology = ((OWLAPIOntology)source).getOWLOntolgy(); - } else if (source instanceof SparqlKnowledgeSource) { - ontology = ((SparqlKnowledgeSource)source).getOWLAPIOntology(); - manager = ontology.getOWLOntologyManager(); - } else { - ontology = manager.loadOntologyFromOntologyDocument(IRI.create(url.toURI())); - } - - owlAPIOntologies.add(ontology); - // imports includes the ontology itself - Set<OWLOntology> imports = manager.getImportsClosure(ontology); - allImports.addAll(imports); + if (source instanceof SparqlKnowledgeSource) { + ontology = ((SparqlKnowledgeSource) source).getOWLAPIOntology(); + manager = ontology.getOWLOntologyManager(); + owlAPIOntologies.add(ontology); + } + + // imports includes the ontology itself + Set<OWLOntology> imports = manager.getImportsClosure(ontology); + allImports.addAll(imports); // System.out.println(imports); - for(OWLOntology ont : imports) { - classes.addAll(ont.getClassesInSignature()); - owlObjectProperties.addAll(ont.getObjectPropertiesInSignature()); - owlDatatypeProperties.addAll(ont.getDataPropertiesInSignature()); - owlIndividuals.addAll(ont.getIndividualsInSignature()); - } - - // if several knowledge sources are included, then we can only - // guarantee that the base URI is from one of those sources (there - // can't be more than one); but we will take care that all prefixes are - // correctly imported - OWLOntologyFormat format = manager.getOntologyFormat(ontology); - if(format instanceof PrefixOWLOntologyFormat) { - prefixes.putAll(((PrefixOWLOntologyFormat)format).getPrefixName2PrefixMap()); - baseURI = ((PrefixOWLOntologyFormat) format).getDefaultPrefix(); - prefixes.remove(""); - } - - } catch (OWLOntologyCreationException e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } - // all other sources are converted to KB and then to an + for (OWLOntology ont : imports) { + classes.addAll(ont.getClassesInSignature()); + owlObjectProperties.addAll(ont.getObjectPropertiesInSignature()); + owlDatatypeProperties.addAll(ont.getDataPropertiesInSignature()); + owlIndividuals.addAll(ont.getIndividualsInSignature()); + } + + // if several knowledge sources are included, then we can only + // guarantee that the base URI is from one of those sources (there + // can't be more than one); but we will take care that all prefixes are + // correctly imported + OWLOntologyFormat format = manager.getOntologyFormat(ontology); + if (format instanceof PrefixOWLOntologyFormat) { + prefixes.putAll(((PrefixOWLOntologyFormat) format).getPrefixName2PrefixMap()); + baseURI = ((PrefixOWLOntologyFormat) format).getDefaultPrefix(); + prefixes.remove(""); + } + + // all other sources are converted to KB and then to an // OWL API ontology } else { KB kb = source.toKB(); Added: trunk/components-core/src/main/java/org/dllearner/utilities/owl/OntologyToByteConverter.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/owl/OntologyToByteConverter.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/utilities/owl/OntologyToByteConverter.java 2012-03-15 02:34:07 UTC (rev 3611) @@ -0,0 +1,37 @@ +package org.dllearner.utilities.owl; + +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyManager; + +/** + * Created by IntelliJ IDEA. + * User: Chris Shellenbarger + * Date: 3/14/12 + * Time: 7:30 PM + * <p/> + * Interface to allow the conversion of an OWL Ontology into a byte array and back. + * <p/> + * The purpose of the interface is to allow the association of an OWLOntology object with a specified OWLOntologyManager. + * <p/> + * If someone hands us an OWLOntology, we may not want to use the associated OWLOntologyManager. Rather, we can serialize it out + * to a byte array and then read it back in with a different OWLOntologyManager. + */ +public interface OntologyToByteConverter { + + /** + * Convert the ontology into a byte array. + * + * @param ontology The ontology to convert to a byte array. + * @return The byte array representing the ontology + */ + byte[] convert(OWLOntology ontology); + + /** + * Convert bytes into an Ontology registered with manager. + * + * @param bytes The bytes to convert to an OWLOntology + * @param manager The Ontology Manager to load the ontology with. + * @return The ontology derived from bytes. + */ + OWLOntology convert(byte[] bytes, OWLOntologyManager manager); +} Added: trunk/components-core/src/main/java/org/dllearner/utilities/owl/SimpleOntologyToByteConverter.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/owl/SimpleOntologyToByteConverter.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/utilities/owl/SimpleOntologyToByteConverter.java 2012-03-15 02:34:07 UTC (rev 3611) @@ -0,0 +1,55 @@ +package org.dllearner.utilities.owl; + +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.model.OWLOntologyStorageException; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; + +/** + * Created by IntelliJ IDEA. + * User: Chris Shellenbarger + * Date: 3/13/12 + * Time: 6:24 PM + * + * A Byte Array based implementation of the OntologyToByteConverter interface. + */ +public class SimpleOntologyToByteConverter implements OntologyToByteConverter { + + @Override + public byte[] convert(OWLOntology ontology) { + + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + + OWLOntologyManager manager = ontology.getOWLOntologyManager(); + try { + manager.saveOntology(ontology,baos); + baos.close(); + } catch (OWLOntologyStorageException e) { + throw new RuntimeException(e); + } catch (IOException e) { + throw new RuntimeException(e); + } + + return baos.toByteArray(); + } + + @Override + public OWLOntology convert(byte[] bytes, OWLOntologyManager manager) { + + ByteArrayInputStream bais = new ByteArrayInputStream(bytes); + + try { + OWLOntology ontology = manager.loadOntologyFromOntologyDocument(bais); + bais.close(); + return ontology; + } catch (OWLOntologyCreationException e) { + throw new RuntimeException(e); + } catch (IOException e) { + throw new RuntimeException(e); + } + } +} Added: trunk/components-core/src/test/java/org/dllearner/kb/OWLAPIOntologyTest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/kb/OWLAPIOntologyTest.java (rev 0) +++ trunk/components-core/src/test/java/org/dllearner/kb/OWLAPIOntologyTest.java 2012-03-15 02:34:07 UTC (rev 3611) @@ -0,0 +1,47 @@ +package org.dllearner.kb; + +import org.junit.Test; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.Resource; +import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; + +import java.io.IOException; + +import static org.junit.Assert.*; + +/** + * Created by IntelliJ IDEA. + * User: Chris Shellenbarger + * Date: 3/14/12 + * Time: 7:57 PM + */ +public class OWLAPIOntologyTest { + + private OWLOntology createOntology() throws OWLOntologyCreationException, IOException { + // Set up the ontology here and hide its manager - the test needs to use a different ontology manager on reconstitution + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(new OWLDataFactoryImpl()); + Resource owlFile = new ClassPathResource("/org/dllearner/kb/owl-api-ontology-data.owl"); + return manager.loadOntologyFromOntologyDocument(owlFile.getInputStream()); + } + + + @Test + public void testMethods() throws Exception { + OWLOntology ontology = createOntology(); + assertNotNull(ontology); + + OWLAPIOntology testSubject = new OWLAPIOntology(ontology); + + OWLOntology result = testSubject.createOWLOntology(OWLManager.createOWLOntologyManager(new OWLDataFactoryImpl())); + + assertNotNull(result); + assertNotSame(ontology,result); + + // Basic Equality Check - for some reason axiom count is different - the result looks more complete than the original. + assertEquals(ontology.getIndividualsInSignature().size(), result.getIndividualsInSignature().size()); + } +} Added: trunk/components-core/src/test/java/org/dllearner/utilities/owl/OntologyToByteConverterTest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/utilities/owl/OntologyToByteConverterTest.java (rev 0) +++ trunk/components-core/src/test/java/org/dllearner/utilities/owl/OntologyToByteConverterTest.java 2012-03-15 02:34:07 UTC (rev 3611) @@ -0,0 +1,56 @@ +package org.dllearner.utilities.owl; + +import org.junit.Test; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.Resource; +import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; + +import java.io.IOException; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +/** + * Created by IntelliJ IDEA. + * User: Chris Shellenbarger + * Date: 3/14/12 + * Time: 7:29 PM + * + * Test the interface level apis for the ontology to byte converter. + */ +public abstract class OntologyToByteConverterTest { + + public abstract OntologyToByteConverter getInstance(); + + private OWLOntology createOntology() throws OWLOntologyCreationException, IOException { + // Set up the ontology here and hide its manager - the test needs to use a different ontology manager on reconstitution + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(new OWLDataFactoryImpl()); + Resource owlFile = new ClassPathResource("/org/dllearner/utilities/owl/byte-conversion-data.owl"); + return manager.loadOntologyFromOntologyDocument(owlFile.getInputStream()); + } + + @Test + public void testConversion() throws Exception { + OntologyToByteConverter converter = getInstance(); + OWLOntology ontology = createOntology(); + assertNotNull(ontology); + + byte[] bytes = converter.convert(ontology); + + assertNotNull(bytes); + assertTrue(bytes.length > 0); + + // Use a new manager so that the IRIs don't get messed up + OWLOntologyManager newManager = OWLManager.createOWLOntologyManager(new OWLDataFactoryImpl()); + OWLOntology result = converter.convert(bytes, newManager); + assertNotNull(result); + + // Basic Equality Check - for some reason axiom count is different - the result looks more complete than the original. + assertEquals(ontology.getIndividualsInSignature().size(), result.getIndividualsInSignature().size()); + } +} Added: trunk/components-core/src/test/java/org/dllearner/utilities/owl/SimpleOntologyToByteConverterTest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/utilities/owl/SimpleOntologyToByteConverterTest.java (rev 0) +++ trunk/components-core/src/test/java/org/dllearner/utilities/owl/SimpleOntologyToByteConverterTest.java 2012-03-15 02:34:07 UTC (rev 3611) @@ -0,0 +1,17 @@ +package org.dllearner.utilities.owl; + +/** + * Created by IntelliJ IDEA. + * User: Chris Shellenbarger + * Date: 3/13/12 + * Time: 6:28 PM + * + * Test instance for a particular implementation. + */ +public class SimpleOntologyToByteConverterTest extends OntologyToByteConverterTest{ + + @Override + public OntologyToByteConverter getInstance() { + return new SimpleOntologyToByteConverter(); + } +} Added: trunk/components-core/src/test/resources/org/dllearner/kb/owl-api-ontology-data.owl =================================================================== --- trunk/components-core/src/test/resources/org/dllearner/kb/owl-api-ontology-data.owl (rev 0) +++ trunk/components-core/src/test/resources/org/dllearner/kb/owl-api-ontology-data.owl 2012-03-15 02:34:07 UTC (rev 3611) @@ -0,0 +1,33 @@ +<?xml version="1.0"?> +<rdf:RDF + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns="http://example.com/father#" + xmlns:owl="http://www.w3.org/2002/07/owl#" + xml:base="http://example.com/father"> + <owl:Ontology rdf:about=""/> + <owl:Class rdf:ID="female"/> + <owl:Class rdf:ID="male"> + <owl:equivalentClass> + <owl:Class> + <owl:complementOf rdf:resource="#female"/> + </owl:Class> + </owl:equivalentClass> + </owl:Class> + <owl:ObjectProperty rdf:ID="hasChild"/> + <male rdf:ID="markus"> + <hasChild> + <female rdf:ID="anna"> + <hasChild> + <male rdf:ID="heinz"/> + </hasChild> + </female> + </hasChild> + </male> + <male rdf:ID="stefan"> + <hasChild rdf:resource="#markus"/> + </male> + <female rdf:ID="michelle"/> + <male rdf:ID="martin"> + <hasChild rdf:resource="#heinz"/> + </male> +</rdf:RDF> Added: trunk/components-core/src/test/resources/org/dllearner/utilities/owl/byte-conversion-data.owl =================================================================== --- trunk/components-core/src/test/resources/org/dllearner/utilities/owl/byte-conversion-data.owl (rev 0) +++ trunk/components-core/src/test/resources/org/dllearner/utilities/owl/byte-conversion-data.owl 2012-03-15 02:34:07 UTC (rev 3611) @@ -0,0 +1,35 @@ +<?xml version="1.0"?> +<rdf:RDF + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:xsd="http://www.w3.org/2001/XMLSchema#" + xmlns="http://example.com/father#" + xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#" + xmlns:owl="http://www.w3.org/2002/07/owl#" + xml:base="http://example.com/father"> + <owl:Ontology rdf:about=""/> + <owl:Class rdf:ID="female"/> + <owl:Class rdf:ID="male"> + <owl:equivalentClass> + <owl:Class> + <owl:complementOf rdf:resource="#female"/> + </owl:Class> + </owl:equivalentClass> + </owl:Class> + <owl:ObjectProperty rdf:ID="hasChild"/> + <male rdf:ID="markus"> + <hasChild> + <female rdf:ID="anna"> + <hasChild> + <male rdf:ID="heinz"/> + </hasChild> + </female> + </hasChild> + </male> + <male rdf:ID="stefan"> + <hasChild rdf:resource="#markus"/> + </male> + <female rdf:ID="michelle"/> + <male rdf:ID="martin"> + <hasChild rdf:resource="#heinz"/> + </male> +</rdf:RDF> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <sha...@us...> - 2012-03-15 03:17:19
|
Revision: 3612 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3612&view=rev Author: shadowtm Date: 2012-03-15 03:17:12 +0000 (Thu, 15 Mar 2012) Log Message: ----------- Updated SparqlKnowledgeSource and SparqlSimpleExtractor to implement the OWLOntologyKnowledgeSource interface in clean up the code within the reasoners. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlKnowledgeSource.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlSimpleExtractor.java trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java trunk/components-core/src/main/java/org/dllearner/reasoning/PelletReasoner.java trunk/components-core/src/main/java/org/dllearner/reasoning/ProtegeReasoner.java trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java Added Paths: ----------- trunk/components-core/src/test/java/org/dllearner/kb/sparql/ trunk/components-core/src/test/java/org/dllearner/kb/sparql/SparqlKnowledgeSourceTest.java Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlKnowledgeSource.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2012-03-15 02:34:07 UTC (rev 3611) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2012-03-15 03:17:12 UTC (rev 3612) @@ -47,6 +47,7 @@ import org.dllearner.core.options.StringTupleListConfigOption; import org.dllearner.core.options.URLConfigOption; import org.dllearner.core.owl.KB; +import org.dllearner.kb.OWLOntologyKnowledgeSource; import org.dllearner.kb.aquisitors.SparqlTupleAquisitor; import org.dllearner.kb.aquisitors.SparqlTupleAquisitorImproved; import org.dllearner.kb.aquisitors.TupleAquisitor; @@ -60,11 +61,14 @@ import org.dllearner.utilities.Files; import org.dllearner.utilities.JamonMonitorLogger; import org.dllearner.utilities.datastructures.StringTuple; +import org.dllearner.utilities.owl.OntologyToByteConverter; +import org.dllearner.utilities.owl.SimpleOntologyToByteConverter; import org.dllearner.utilities.statistics.SimpleClock; import org.semanticweb.owlapi.model.OWLOntology; import com.jamonapi.Monitor; import com.jamonapi.MonitorFactory; +import org.semanticweb.owlapi.model.OWLOntologyManager; /** * Represents the SPARQL Endpoint Component. @@ -74,12 +78,14 @@ * @author Sebastian Hellmann */ @ComponentAnn(name = "SPARQL endpoint fragment", shortName = "sparqlfrag", version = 0.5) -public class SparqlKnowledgeSource extends AbstractKnowledgeSource { +public class SparqlKnowledgeSource extends AbstractKnowledgeSource implements OWLOntologyKnowledgeSource{ private ProgressMonitor mon; private static final boolean debugExitAfterExtraction = false; // switches + private byte[] ontologyBytes; + private OntologyToByteConverter converter = new SimpleOntologyToByteConverter(); // private SparqlKnowledgeSourceConfigurator configurator; @@ -107,7 +113,6 @@ private URL ontologyFragmentURL; - private OWLOntology fragment; private Manipulator manipulator = null; @@ -337,9 +342,10 @@ }*/ extractionTime.stop(); - - fragment = m.getOWLAPIOntologyForNodes(seedNodes, saveExtractedFragment); - + // Do this so that we can support the OWLOntologyKnowledgeSource + // and can be thread safe. + OWLOntology fragment = m.getOWLAPIOntologyForNodes(seedNodes, saveExtractedFragment); + ontologyBytes = getConverter().convert(fragment); logger.info("Finished collecting fragment. needed "+extractionTime.getLastValue()+" ms"); @@ -362,8 +368,13 @@ System.exit(0); } } - - public List<Node> extractParallel(){ + + @Override + public OWLOntology createOWLOntology(OWLOntologyManager manager) { + return getConverter().convert(ontologyBytes, manager); + } + + public List<Node> extractParallel(){ return null; } @@ -537,10 +548,6 @@ return ontologyFragmentURL; } - public OWLOntology getOWLAPIOntology() { - return fragment; - } - public boolean isUseCache() { return useCache; } @@ -733,6 +740,39 @@ this.cacheDir = cacheDir; } - + /** + * Get the OntologyToByteConverter associated with this object. + * + * @return The OntologyToByteConverter associated with this object. + */ + public OntologyToByteConverter getConverter() { + return converter; + } + /** + * Set the OntologyToByteConverter associated with this object. + * + * @param converter the OntologyToByteConverter to associate with this object. + */ + public void setConverter(OntologyToByteConverter converter) { + this.converter = converter; + } + + /** + * Accessor for getting the Ontology Bytes + * + * @return Get the underlying ontology bytes. + */ + byte[] getOntologyBytes() { + return ontologyBytes; + } + + /** + * Set the ontology bytes. + * + * @param ontologyBytes The byte array representation of the fragment. + */ + void setOntologyBytes(byte[] ontologyBytes) { + this.ontologyBytes = ontologyBytes; + } } Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlSimpleExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlSimpleExtractor.java 2012-03-15 02:34:07 UTC (rev 3611) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlSimpleExtractor.java 2012-03-15 03:17:12 UTC (rev 3612) @@ -6,9 +6,12 @@ import org.dllearner.core.ComponentInitException; import org.dllearner.core.KnowledgeSource; import org.dllearner.core.config.ConfigOption; +import org.dllearner.kb.OWLOntologyKnowledgeSource; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyManager; @ComponentAnn(name="efficient SPARQL fragment extractor", shortName="sparqls", version=0.1) -public class SparqlSimpleExtractor implements KnowledgeSource { +public class SparqlSimpleExtractor implements KnowledgeSource, OWLOntologyKnowledgeSource { @ConfigOption(name="endpointURL", description="URL of the SPARQL endpoint", required=true) private URL endpointURL = null; @@ -38,5 +41,10 @@ public void setEndpointURL(URL endpointURL) { this.endpointURL = endpointURL; } - + + @Override + public OWLOntology createOWLOntology(OWLOntologyManager manager) { + //TODO Update this to return an ontology representation of what the reasoners should work with. Build with the passed in manager instance. + return null; + } } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java 2012-03-15 02:34:07 UTC (rev 3611) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java 2012-03-15 03:17:12 UTC (rev 3612) @@ -161,14 +161,6 @@ if (source instanceof OWLFile || source instanceof SparqlKnowledgeSource || source instanceof SparqlSimpleExtractor || source instanceof OWLAPIOntology) { - if (source instanceof SparqlKnowledgeSource) { - ontology = ((SparqlKnowledgeSource) source).getOWLAPIOntology(); - manager = ontology.getOWLOntologyManager(); - owlAPIOntologies.add(ontology); - } else if(source instanceof SparqlSimpleExtractor) { - // TODO - } - directImports.addAll(ontology.getImportsDeclarations()); try { // imports includes the ontology itself Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/PelletReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/PelletReasoner.java 2012-03-15 02:34:07 UTC (rev 3611) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/PelletReasoner.java 2012-03-15 03:17:12 UTC (rev 3612) @@ -209,13 +209,6 @@ || source instanceof SparqlKnowledgeSource || source instanceof OWLAPIOntology) { - if (source instanceof SparqlKnowledgeSource) { - ontology = ((SparqlKnowledgeSource) source) - .getOWLAPIOntology(); - manager = ontology.getOWLOntologyManager(); - owlAPIOntologies.add(ontology); - } - // imports includes the ontology itself Set<OWLOntology> imports = manager .getImportsClosure(ontology); @@ -477,12 +470,6 @@ || source instanceof SparqlKnowledgeSource || source instanceof OWLAPIOntology) { - if (source instanceof SparqlKnowledgeSource) { - ontology = ((SparqlKnowledgeSource) source).getOWLAPIOntology(); - manager = ontology.getOWLOntologyManager(); - owlAPIOntologies.add(ontology); - } - // imports includes the ontology itself Set<OWLOntology> imports = manager .getImportsClosure(ontology); Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/ProtegeReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/ProtegeReasoner.java 2012-03-15 02:34:07 UTC (rev 3611) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/ProtegeReasoner.java 2012-03-15 03:17:12 UTC (rev 3612) @@ -258,12 +258,6 @@ || source instanceof SparqlKnowledgeSource || source instanceof OWLAPIOntology) { - if (source instanceof SparqlKnowledgeSource) { - ontology = ((SparqlKnowledgeSource) source).getOWLAPIOntology(); - manager = ontology.getOWLOntologyManager(); - owlAPIOntologies.add(ontology); - } - classes.addAll(ontology.getClassesInSignature(true)); owlObjectProperties.addAll(ontology.getObjectPropertiesInSignature(true)); owlDatatypeProperties.addAll(ontology.getDataPropertiesInSignature(true)); Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java 2012-03-15 02:34:07 UTC (rev 3611) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java 2012-03-15 03:17:12 UTC (rev 3612) @@ -253,12 +253,6 @@ if(source instanceof OWLFile || source instanceof SparqlKnowledgeSource || source instanceof OWLAPIOntology) { - if (source instanceof SparqlKnowledgeSource) { - ontology = ((SparqlKnowledgeSource) source).getOWLAPIOntology(); - manager = ontology.getOWLOntologyManager(); - owlAPIOntologies.add(ontology); - } - // imports includes the ontology itself Set<OWLOntology> imports = manager.getImportsClosure(ontology); allImports.addAll(imports); Added: trunk/components-core/src/test/java/org/dllearner/kb/sparql/SparqlKnowledgeSourceTest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/kb/sparql/SparqlKnowledgeSourceTest.java (rev 0) +++ trunk/components-core/src/test/java/org/dllearner/kb/sparql/SparqlKnowledgeSourceTest.java 2012-03-15 03:17:12 UTC (rev 3612) @@ -0,0 +1,53 @@ +package org.dllearner.kb.sparql; + +import org.dllearner.utilities.owl.SimpleOntologyToByteConverter; +import org.junit.Test; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.Resource; +import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; + +import java.io.IOException; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNotSame; + +/** + * Created by IntelliJ IDEA. + * User: Chris Shellenbarger + * Date: 3/14/12 + * Time: 9:02 PM + * + * Basic test to test some components of the SparqlKnowledgeSource + */ +public class SparqlKnowledgeSourceTest { + + private OWLOntology createOntology() throws OWLOntologyCreationException, IOException { + // Set up the ontology here and hide its manager - the test needs to use a different ontology manager on reconstitution + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(new OWLDataFactoryImpl()); + Resource owlFile = new ClassPathResource("/org/dllearner/kb/owl-api-ontology-data.owl"); + return manager.loadOntologyFromOntologyDocument(owlFile.getInputStream()); + } + + + @Test + public void testMethods() throws Exception { + OWLOntology ontology = createOntology(); + assertNotNull(ontology); + + SparqlKnowledgeSource testSubject = new SparqlKnowledgeSource(); + testSubject.setOntologyBytes(new SimpleOntologyToByteConverter().convert(ontology)); + + OWLOntology result = testSubject.createOWLOntology(OWLManager.createOWLOntologyManager(new OWLDataFactoryImpl())); + + assertNotNull(result); + assertNotSame(ontology,result); + + // Basic Equality Check - for some reason axiom count is different - the result looks more complete than the original. + assertEquals(ontology.getIndividualsInSignature().size(), result.getIndividualsInSignature().size()); + } +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <sha...@us...> - 2012-04-18 03:45:42
|
Revision: 3639 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3639&view=rev Author: shadowtm Date: 2012-04-18 03:45:36 +0000 (Wed, 18 Apr 2012) Log Message: ----------- Added convenience method to AnnComponentManager Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java Added Paths: ----------- trunk/components-core/src/test/java/org/dllearner/core/ trunk/components-core/src/test/java/org/dllearner/core/AnnComponentManagerTest.java Modified: trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java 2012-04-18 02:26:42 UTC (rev 3638) +++ trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java 2012-04-18 03:45:36 UTC (rev 3639) @@ -136,7 +136,25 @@ public Collection<Class<? extends Component>> getComponents() { return components; } - + + /** + * Get registered components which are of the specified type. + * + * @param type The super type. + * @return All sub classes of type. + */ + public Collection<Class<? extends Component>> getComponentsOfType(Class type) { + + Collection<Class<? extends Component>> result = new ArrayList<Class<? extends Component>>(); + for (Class<? extends Component> component : components) { + if (type.isAssignableFrom(component)) { + result.add(component); + } + } + + return result; + } + /** * Convenience method, which returns a list of components along with * their name. Added: trunk/components-core/src/test/java/org/dllearner/core/AnnComponentManagerTest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/core/AnnComponentManagerTest.java (rev 0) +++ trunk/components-core/src/test/java/org/dllearner/core/AnnComponentManagerTest.java 2012-04-18 03:45:36 UTC (rev 3639) @@ -0,0 +1,25 @@ +package org.dllearner.core; + +import org.junit.Assert; +import org.junit.Test; + +import java.util.Collection; + +/** + * Created with IntelliJ IDEA. + * User: Chris + * Date: 4/17/12 + * Time: 9:16 PM + * + * Tests for the AnnComponentManager + */ +public class AnnComponentManagerTest { + + + @Test + public void testGetComponentsOfType() { + + Collection<Class<? extends Component>> components = AnnComponentManager.getInstance().getComponentsOfType(ReasonerComponent.class); + Assert.assertEquals(3,components.size()); + } +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-04-24 12:12:55
|
Revision: 3926 http://sourceforge.net/p/dl-learner/code/3926 Author: lorenz_b Date: 2013-04-24 12:12:51 +0000 (Wed, 24 Apr 2013) Log Message: ----------- Modified pattern analyzing class. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomPatternFinder.java trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java trunk/components-core/src/test/java/org/dllearner/algorithms/pattern/OWLPatternDetectionTest.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomPatternFinder.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomPatternFinder.java 2013-04-21 11:20:26 UTC (rev 3925) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomPatternFinder.java 2013-04-24 12:12:51 UTC (rev 3926) @@ -11,19 +11,20 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; +import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.LinkedList; +import java.util.List; import java.util.Queue; import java.util.prefs.Preferences; -import org.coode.owlapi.functionalrenderer.OWLFunctionalSyntaxRenderer; import org.dllearner.kb.dataset.OWLOntologyDataset; import org.dllearner.kb.repository.OntologyRepository; import org.dllearner.kb.repository.OntologyRepositoryEntry; import org.ini4j.IniPreferences; import org.ini4j.InvalidFileFormatException; import org.semanticweb.owlapi.apibinding.OWLManager; -import org.semanticweb.owlapi.io.AbstractOWLRenderer; import org.semanticweb.owlapi.io.OWLObjectRenderer; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLAxiom; @@ -31,16 +32,12 @@ import org.semanticweb.owlapi.model.OWLFunctionalDataPropertyAxiom; import org.semanticweb.owlapi.model.OWLLogicalAxiom; import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyChange; +import org.semanticweb.owlapi.model.OWLOntologyAlreadyExistsException; import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyID; import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.owlapi.reasoner.OWLReasoner; -import org.semanticweb.owlapi.reasoner.OWLReasonerFactory; import uk.ac.manchester.cs.owl.owlapi.mansyntaxrenderer.ManchesterOWLSyntaxOWLObjectRendererImpl; -import com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory; import com.google.common.collect.HashMultiset; import com.google.common.collect.Multiset; import com.google.common.collect.Multisets; @@ -78,6 +75,8 @@ private PreparedStatement insertOntologyPatternPs; private OWLObjectRenderer axiomRenderer = new ManchesterOWLSyntaxOWLObjectRendererImpl(); + + private boolean randomOrder = true; public OWLAxiomPatternFinder(OWLOntologyDataset dataset) { @@ -147,8 +146,8 @@ statement.execute("CREATE TABLE IF NOT EXISTS Pattern (" + "id MEDIUMINT NOT NULL AUTO_INCREMENT," - + "pattern VARCHAR(2000) NOT NULL," - + "pattern_pretty VARCHAR(2000) NOT NULL," + + "pattern VARCHAR(20000) NOT NULL," + + "pattern_pretty VARCHAR(20000) NOT NULL," + "PRIMARY KEY(id)," + "INDEX(pattern)) DEFAULT CHARSET=utf8"); @@ -164,8 +163,8 @@ + "ontology_id MEDIUMINT NOT NULL," + "pattern_id MEDIUMINT NOT NULL," + "occurrences INTEGER(8) NOT NULL," - + "FOREIGN KEY (ontology_id) REFERENCES Ontology(id)," - + "FOREIGN KEY (pattern_id) REFERENCES Pattern(id)," + + "FOREIGN KEY (ontology_id) REFERENCES Ontology(id) ON DELETE CASCADE," + + "FOREIGN KEY (pattern_id) REFERENCES Pattern(id) ON DELETE CASCADE," + "PRIMARY KEY(ontology_id, pattern_id)) DEFAULT CHARSET=utf8"); } catch (SQLException e) { e.printStackTrace(); @@ -191,6 +190,7 @@ insertPatternIdPs.setString(2, axiomRenderer.render(axiom)); insertPatternIdPs.execute(); } catch (SQLException e) { + System.out.println(axiomString.length()); e.printStackTrace(); } //get the auto generated ID @@ -218,6 +218,19 @@ return false; } + private void addOntologyError(URI physicalURI, Exception ex){ + String url = physicalURI.toString(); + //add ontology loading/parsing/... error entry + try { + insertOntologyPs.setString(1, url); + insertOntologyPs.setString(2, "ERROR:" + ex.getClass().getSimpleName() + "->" + ex.getMessage()); + insertOntologyPs.setString(3, repository.getName()); + insertOntologyPs.execute(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + private int addOntology(URI physicalURI, OWLOntology ontology){ String url = physicalURI.toString(); String ontologyIRI = ontology.getOntologyID().getOntologyIRI().toString(); @@ -264,7 +277,7 @@ insertOntologyPatternPs.setInt(3, occurrences); insertOntologyPatternPs.execute(); } catch (SQLException e) { - e.printStackTrace(); + System.err.println("Adding pattern\n" + pattern + "\nfailed." + e.getMessage()); } } } @@ -272,6 +285,11 @@ public void start() { OWLAxiomRenamer renamer = new OWLAxiomRenamer(dataFactory); Collection<OntologyRepositoryEntry> entries = repository.getEntries(); + if(randomOrder){ + List<OntologyRepositoryEntry> entryList = new ArrayList<OntologyRepositoryEntry>(repository.getEntries()); + Collections.shuffle(entryList); + entries = entryList; + } Multiset<OWLAxiom> allAxiomPatterns = HashMultiset.create(); for (OntologyRepositoryEntry entry : entries) { URI uri = entry.getPhysicalURI(); @@ -290,8 +308,10 @@ // System.out.println(owlAxiom + ": " + allAxiomPatterns.count(owlAxiom)); } manager.removeOntology(ontology); - } catch (OWLOntologyCreationException e) { + } catch (OWLOntologyAlreadyExistsException e) { e.printStackTrace(); + } catch (Exception e){ + addOntologyError(uri, e); } } Modified: trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java 2013-04-21 11:20:26 UTC (rev 3925) +++ trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java 2013-04-24 12:12:51 UTC (rev 3926) @@ -90,7 +90,7 @@ } public Query asQuery(String rootVariable, OWLClassExpression expr){ - String queryString = "SELECT " + rootVariable + " WHERE {"; + String queryString = "SELECT DISTINCT " + rootVariable + " WHERE {"; queryString += convert(rootVariable, expr); queryString += "}"; return QueryFactory.create(queryString, Syntax.syntaxARQ); @@ -149,7 +149,7 @@ private String triple(String subject, String predicate, OWLLiteral object){ return (subject.startsWith("?") ? subject : "<" + subject + ">") + " " + (predicate.startsWith("?") || predicate.equals("a") ? predicate : "<" + predicate + ">") + " " + - "\"" + object + "\"^^<" + object.getDatatype().toStringID() + ">.\n"; + render(object) + ".\n"; } private String triple(String subject, String predicate, OWLIndividual object){ @@ -157,6 +157,10 @@ (predicate.startsWith("?") || predicate.equals("a") ? predicate : "<" + predicate + ">") + " " + "<" + object.toStringID() + ">.\n"; } + + private String render(OWLLiteral literal){ + return "\"" + literal + "\"^^<" + literal.getDatatype().toStringID() + ">"; + } @Override public void visit(OWLObjectProperty property) { @@ -480,6 +484,20 @@ @Override public void visit(OWLDataOneOf node) { + String subject = variables.peek(); + if(modalDepth() == 1){ + sparql += triple(subject, "?p", "?o"); + } + sparql += "FILTER(" + subject + " IN ("; + String values = ""; + for (OWLLiteral value : node.getValues()) { + if(!values.isEmpty()){ + values += ","; + } + values += render(value); + } + sparql += values; + sparql += "))"; } @Override @@ -620,6 +638,10 @@ query = converter.asQuery(rootVar, expr).toString(); System.out.println(expr + "\n" + query); + expr = df.getOWLDataAllValuesFrom(dpT,df.getOWLDataOneOf(lit)); + query = converter.asQuery(rootVar, expr).toString(); + System.out.println(expr + "\n" + query); + } Modified: trunk/components-core/src/test/java/org/dllearner/algorithms/pattern/OWLPatternDetectionTest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/algorithms/pattern/OWLPatternDetectionTest.java 2013-04-21 11:20:26 UTC (rev 3925) +++ trunk/components-core/src/test/java/org/dllearner/algorithms/pattern/OWLPatternDetectionTest.java 2013-04-24 12:12:51 UTC (rev 3926) @@ -1,6 +1,7 @@ package org.dllearner.algorithms.pattern; import org.dllearner.kb.repository.OntologyRepository; +import org.dllearner.kb.repository.bioportal.BioPortalRepository; import org.dllearner.kb.repository.tones.TONESRepository; import org.junit.Before; import org.junit.Test; @@ -23,5 +24,13 @@ OWLAxiomPatternFinder patternFinder = new OWLAxiomPatternFinder(repository); patternFinder.start(); } + + @Test + public void testBioPortalRepository(){ + OntologyRepository repository = new BioPortalRepository(); + repository.initialize(); + OWLAxiomPatternFinder patternFinder = new OWLAxiomPatternFinder(repository); + patternFinder.start(); + } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-05-03 07:31:42
|
Revision: 3936 http://sourceforge.net/p/dl-learner/code/3936 Author: lorenz_b Date: 2013-05-03 07:31:37 +0000 (Fri, 03 May 2013) Log Message: ----------- Commented tests. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomPatternFinder.java trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java trunk/components-core/src/main/java/org/dllearner/utilities/datastructures/SetManipulation.java trunk/components-core/src/test/java/org/dllearner/algorithms/pattern/OWLPatternDetectionTest.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomPatternFinder.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomPatternFinder.java 2013-05-02 10:57:42 UTC (rev 3935) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomPatternFinder.java 2013-05-03 07:31:37 UTC (rev 3936) @@ -93,6 +93,19 @@ dataFactory = manager.getOWLDataFactory(); initDBConnection(); + prepare(); + } + + public OWLAxiomPatternFinder(OntologyRepository repository, Connection conn) { + this.repository = repository; + this.conn = conn; + manager = OWLManager.createOWLOntologyManager(); + dataFactory = manager.getOWLDataFactory(); + + prepare(); + } + + private void prepare(){ createTables(); try { selectOntologyIdPs = conn.prepareStatement("SELECT id FROM Ontology WHERE url=?"); Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java 2013-05-02 10:57:42 UTC (rev 3935) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java 2013-05-03 07:31:37 UTC (rev 3936) @@ -45,6 +45,7 @@ import org.dllearner.core.KnowledgeSource; import org.dllearner.core.config.ConfigOption; import org.dllearner.core.owl.Axiom; +import org.dllearner.core.owl.ClassHierarchy; import org.dllearner.core.owl.Constant; import org.dllearner.core.owl.Datatype; import org.dllearner.core.owl.DatatypeProperty; @@ -59,6 +60,7 @@ import org.dllearner.core.owl.Thing; import org.dllearner.core.owl.TypedConstant; import org.dllearner.core.owl.UntypedConstant; +import org.dllearner.kb.OWLAPIOntology; import org.dllearner.kb.OWLOntologyKnowledgeSource; import org.dllearner.utilities.owl.ConceptComparator; import org.dllearner.utilities.owl.DLLearnerDescriptionConvertVisitor; @@ -938,7 +940,11 @@ OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); try { OWLOntology ontology = manager.loadOntologyFromOntologyDocument(IRI.create(iri)); - ontology = manager.loadOntologyFromOntologyDocument(new File("../test/phaenotype/mp-equivalence-axioms-subq.owl")); + KnowledgeSource ks = new OWLAPIOntology(ontology); + OWLAPIReasoner reasoner = new OWLAPIReasoner(ks); + reasoner.init(); + ClassHierarchy classHierarchy = reasoner.getClassHierarchy(); + System.out.println(classHierarchy.toString(false)); // new PelletReasonerFactory().createReasoner(ontology); System.out.println("Reasoner loaded succesfully."); CelReasoner r = new CelReasoner(ontology); Modified: trunk/components-core/src/main/java/org/dllearner/utilities/datastructures/SetManipulation.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/datastructures/SetManipulation.java 2013-05-02 10:57:42 UTC (rev 3935) +++ trunk/components-core/src/main/java/org/dllearner/utilities/datastructures/SetManipulation.java 2013-05-03 07:31:37 UTC (rev 3936) @@ -19,6 +19,7 @@ package org.dllearner.utilities.datastructures; +import java.util.Collection; import java.util.List; import java.util.Random; import java.util.SortedSet; @@ -160,6 +161,14 @@ return ret; } + public static SortedSet<Individual> stringToInd(Collection<String> individualsAsString) { + SortedSet<Individual> ret = new TreeSet<Individual>(); + for (String ind : individualsAsString) { + ret.add(new Individual(ind)); + } + return ret; + } + public static SortedSet<String>indToString(SortedSet<Individual> set) { SortedSet<String> ret = new TreeSet<String>(); for (Individual ind : set) { Modified: trunk/components-core/src/test/java/org/dllearner/algorithms/pattern/OWLPatternDetectionTest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/algorithms/pattern/OWLPatternDetectionTest.java 2013-05-02 10:57:42 UTC (rev 3935) +++ trunk/components-core/src/test/java/org/dllearner/algorithms/pattern/OWLPatternDetectionTest.java 2013-05-03 07:31:37 UTC (rev 3936) @@ -17,7 +17,7 @@ ToStringRenderer.getInstance().setRenderer(new DLSyntaxObjectRenderer()); } - @Test +// @Test public void testTONESRepository(){ OntologyRepository repository = new TONESRepository(); repository.initialize(); @@ -25,7 +25,7 @@ patternFinder.start(); } - @Test +// @Test public void testBioPortalRepository(){ OntologyRepository repository = new BioPortalRepository(); repository.initialize(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2013-06-14 15:02:27
|
Revision: 4000 http://sourceforge.net/p/dl-learner/code/4000 Author: jenslehmann Date: 2013-06-14 15:02:24 +0000 (Fri, 14 Jun 2013) Log Message: ----------- added another semantic for all quantors to fast instance checker Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java trunk/components-core/src/test/java/org/dllearner/test/junit/SomeOnlyReasonerTest.java Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java 2013-06-14 12:41:49 UTC (rev 3999) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java 2013-06-14 15:02:24 UTC (rev 4000) @@ -139,7 +139,11 @@ "use those which have at least one r-filler and do not have an r-filler not in C.",defaultValue = "standard",propertyEditorClass = StringTrimmerEditor.class) private ForallSemantics forallSemantics = ForallSemantics.Standard; - public enum ForallSemantics { Standard, SomeOnly } + public enum ForallSemantics { + Standard, // standard all quantor + NonEmpty, // p only C for instance a returns false if there is no fact p(a,x) for any x + SomeOnly // p only C for instance a returns false if there is no fact p(a,x) with x \ in C + } /** * Creates an instance of the fast instance checker. @@ -401,6 +405,7 @@ return true; } SortedSet<Individual> roleFillers = opPos.get(op).get(individual); + if (roleFillers == null) { if(forallSemantics == ForallSemantics.Standard) { return true; @@ -408,12 +413,20 @@ return false; } } + boolean hasCorrectFiller = false; for (Individual roleFiller : roleFillers) { - if (!hasTypeImpl(child, roleFiller)) { + if (hasTypeImpl(child, roleFiller)) { + hasCorrectFiller = true; + } else { return false; - } + } } - return true; + + if(forallSemantics == ForallSemantics.SomeOnly) { + return hasCorrectFiller; + } else { + return true; + } } else if (description instanceof ObjectMinCardinalityRestriction) { ObjectPropertyExpression ope = ((ObjectCardinalityRestriction) description).getRole(); if (!(ope instanceof ObjectProperty)) { Modified: trunk/components-core/src/test/java/org/dllearner/test/junit/SomeOnlyReasonerTest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/junit/SomeOnlyReasonerTest.java 2013-06-14 12:41:49 UTC (rev 3999) +++ trunk/components-core/src/test/java/org/dllearner/test/junit/SomeOnlyReasonerTest.java 2013-06-14 15:02:24 UTC (rev 4000) @@ -31,6 +31,7 @@ * */ public class SomeOnlyReasonerTest { + @Test public void someOnlyTest() throws ComponentInitException, LearningProblemUnsupportedException { // TODO: use aksw-commons-sparql instead of sparql-scala This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-07-08 13:51:34
|
Revision: 4015 http://sourceforge.net/p/dl-learner/code/4015 Author: lorenz_b Date: 2013-07-08 13:51:31 +0000 (Mon, 08 Jul 2013) Log Message: ----------- First refactoring of ISLE algorithm. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/OEHeuristicRuntime.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/ISLE.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/LuceneSearcher.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/NLPHeuristic.java trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLAPIConverter.java Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/LabelEntityTextRetriever.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/LuceneBasedRelevance.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/OWLOntologyLuceneIndex.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMILuceneBasedRelevance.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMIRelevance.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMIRelevances.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/Relevance.java trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java Removed Paths: ------------- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/Relevances.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/OEHeuristicRuntime.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/OEHeuristicRuntime.java 2013-07-08 13:49:40 UTC (rev 4014) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/OEHeuristicRuntime.java 2013-07-08 13:51:31 UTC (rev 4015) @@ -25,7 +25,6 @@ import org.dllearner.core.ComponentAnn; import org.dllearner.core.ComponentInitException; import org.dllearner.core.config.ConfigOption; -import org.dllearner.core.config.DoubleEditor; import org.dllearner.utilities.owl.ConceptComparator; /** Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/ISLE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/ISLE.java 2013-07-08 13:49:40 UTC (rev 4014) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/ISLE.java 2013-07-08 13:51:31 UTC (rev 4015) @@ -19,6 +19,7 @@ package org.dllearner.algorithms.isle; +import java.io.File; import java.text.DecimalFormat; import java.util.Collection; import java.util.Iterator; @@ -35,11 +36,10 @@ import org.dllearner.core.AbstractCELA; import org.dllearner.core.AbstractLearningProblem; import org.dllearner.core.AbstractReasonerComponent; +import org.dllearner.core.ComponentAnn; import org.dllearner.core.ComponentInitException; import org.dllearner.core.EvaluatedDescription; -import org.dllearner.core.options.BooleanConfigOption; -import org.dllearner.core.options.CommonConfigOptions; -import org.dllearner.core.options.ConfigOption; +import org.dllearner.core.config.ConfigOption; import org.dllearner.core.owl.ClassHierarchy; import org.dllearner.core.owl.Description; import org.dllearner.core.owl.Individual; @@ -51,16 +51,20 @@ import org.dllearner.learningproblems.PosNegLP; import org.dllearner.learningproblems.PosNegLPStandard; import org.dllearner.learningproblems.PosOnlyLP; +import org.dllearner.refinementoperators.CustomHierarchyRefinementOperator; +import org.dllearner.refinementoperators.CustomStartRefinementOperator; import org.dllearner.refinementoperators.LengthLimitedRefinementOperator; import org.dllearner.refinementoperators.OperatorInverter; -import org.dllearner.refinementoperators.RefinementOperator; +import org.dllearner.refinementoperators.ReasoningBasedRefinementOperator; import org.dllearner.refinementoperators.RhoDRDown; +import org.dllearner.utilities.Files; import org.dllearner.utilities.Helper; import org.dllearner.utilities.owl.ConceptComparator; import org.dllearner.utilities.owl.ConceptTransformation; import org.dllearner.utilities.owl.DescriptionMinimizer; import org.dllearner.utilities.owl.EvaluatedDescriptionSet; import org.dllearner.utilities.owl.PropertyContext; +import org.springframework.beans.factory.annotation.Autowired; import com.jamonapi.Monitor; import com.jamonapi.MonitorFactory; @@ -71,9 +75,11 @@ * @author Jens Lehmann * */ +@ComponentAnn(name="ISLE", shortName="isle", version=0.5, description="CELOE is an adapted and extended version of the OCEL algorithm applied for the ontology engineering use case. See http://jens-lehmann.org/files/2011/celoe.pdf for reference.") public class ISLE extends AbstractCELA { private static Logger logger = Logger.getLogger(CELOE.class); +// private CELOEConfigurator configurator; private boolean isRunning = false; private boolean stop = false; @@ -83,13 +89,17 @@ private LengthLimitedRefinementOperator operator; private DescriptionMinimizer minimizer; + @ConfigOption(name="useMinimizer", defaultValue="true", description="Specifies whether returned expressions should be minimised by removing those parts, which are not needed. (Basically the minimiser tries to find the shortest expression which is equivalent to the learned expression). Turning this feature off may improve performance.") + private boolean useMinimizer = true; // all nodes in the search tree (used for selecting most promising node) private TreeSet<OENode> nodes; +// private OEHeuristicRuntime heuristic; // = new OEHeuristicRuntime(); private NLPHeuristic heuristic = new NLPHeuristic(); // root of search tree private OENode startNode; // the class with which we start the refinement process + @ConfigOption(name = "startClass", defaultValue="owl:Thing", description="You can specify a start class for the algorithm. To do this, you have to use Manchester OWL syntax without using prefixes.") private Description startClass; // all descriptions in the search tree plus those which were too weak (for fast redundancy check) @@ -99,6 +109,7 @@ // if true, then each solution is evaluated exactly instead of approximately // private boolean exactBestDescriptionEvaluation = false; + @ConfigOption(name = "singleSuggestionMode", defaultValue="false", description="Use this if you are interested in only one suggestion and your learning problem has many (more than 1000) examples.") private boolean singleSuggestionMode; private Description bestDescription; private double bestAccuracy = Double.MIN_VALUE; @@ -115,11 +126,16 @@ private long nanoStartTime; - // important parameters + // important parameters (non-config options but internal) private double noise; - private double maxDepth; - private boolean filterFollowsFromKB; + + private boolean filterFollowsFromKB; + // less important parameters + // forces that one solution cannot be subexpression of another expression; this option is useful to get diversity + // but it can also suppress quite useful expressions + private boolean forceMutualDifference = false; + // utility variables private String baseURI; private Map<String, String> prefixes; @@ -130,80 +146,165 @@ private int expressionTests = 0; private int minHorizExp = 0; private int maxHorizExp = 0; + + // TODO: turn those into config options + + // important: do not initialise those with empty sets + // null = no settings for allowance / ignorance + // empty set = allow / ignore nothing (it is often not desired to allow no class!) + Set<NamedClass> allowedConcepts = null; + Set<NamedClass> ignoredConcepts = null; - private double noisePercentage = 0.0; + @ConfigOption(name = "writeSearchTree", defaultValue="false", description="specifies whether to write a search tree") + private boolean writeSearchTree = false; + @ConfigOption(name = "searchTreeFile", defaultValue="log/searchTree.txt", description="file to use for the search tree") + private String searchTreeFile = "log/searchTree.txt"; + + @ConfigOption(name = "replaceSearchTree", defaultValue="false", description="specifies whether to replace the search tree in the log file after each run or append the new search tree") + private boolean replaceSearchTree = false; + + @ConfigOption(name = "maxNrOfResults", defaultValue="10", description="Sets the maximum number of results one is interested in. (Setting this to a lower value may increase performance as the learning algorithm has to store/evaluate/beautify less descriptions).") private int maxNrOfResults = 10; - private boolean filterDescriptionsFollowingFromKB = true; + @ConfigOption(name = "noisePercentage", defaultValue="0.0", description="the (approximated) percentage of noise within the examples") + private double noisePercentage = 0.0; - private long maxExecutionTimeInSeconds = 10; + @ConfigOption(name = "filterDescriptionsFollowingFromKB", defaultValue="false", description="If true, then the results will not contain suggestions, which already follow logically from the knowledge base. Be careful, since this requires a potentially expensive consistency check for candidate solutions.") + private boolean filterDescriptionsFollowingFromKB = false; + @ConfigOption(name = "reuseExistingDescription", defaultValue="false", description="If true, the algorithm tries to find a good starting point close to an existing definition/super class of the given class in the knowledge base.") private boolean reuseExistingDescription = false; + + @ConfigOption(name = "maxClassExpressionTests", defaultValue="0", description="The maximum number of candidate hypothesis the algorithm is allowed to test (0 = no limit). The algorithm will stop afterwards. (The real number of tests can be slightly higher, because this criterion usually won't be checked after each single test.)") + private int maxClassExpressionTests = 0; + + @ConfigOption(name = "maxClassExpressionTestsAfterImprovement", defaultValue="0", description = "The maximum number of candidate hypothesis the algorithm is allowed after an improvement in accuracy (0 = no limit). The algorithm will stop afterwards. (The real number of tests can be slightly higher, because this criterion usually won't be checked after each single test.)") + private int maxClassExpressionTestsAfterImprovement = 0; + @ConfigOption(defaultValue = "10", name = "maxExecutionTimeInSeconds", description = "maximum execution of the algorithm in seconds") + private int maxExecutionTimeInSeconds = 10; + + @ConfigOption(defaultValue = "0", name = "maxExecutionTimeInSecondsAfterImprovement", description = "maximum execution of the algorithm in seconds") + private int maxExecutionTimeInSecondsAfterImprovement = 0; + + @ConfigOption(name = "terminateOnNoiseReached", defaultValue="false", description="specifies whether to terminate when noise criterion is met") + private boolean terminateOnNoiseReached = false; + + @ConfigOption(name = "maxDepth", defaultValue="7", description="maximum depth of description") + private double maxDepth = 7; + + @ConfigOption(name = "stopOnFirstDefinition", defaultValue="false", description="algorithm will terminate immediately when a correct definition is found") + private boolean stopOnFirstDefinition = false; + + private int expressionTestCountLastImprovement; + + + @SuppressWarnings("unused") + private long timeLastImprovement = 0; + +// public CELOEConfigurator getConfigurator() { +// return configurator; +// } + + public ISLE() { + + } + public ISLE(AbstractLearningProblem problem, AbstractReasonerComponent reasoner) { super(problem, reasoner); +// configurator = new CELOEConfigurator(this); } public static Collection<Class<? extends AbstractLearningProblem>> supportedLearningProblems() { Collection<Class<? extends AbstractLearningProblem>> problems = new LinkedList<Class<? extends AbstractLearningProblem>>(); problems.add(AbstractLearningProblem.class); return problems; - } - - public static Collection<ConfigOption<?>> createConfigOptions() { - Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); - options.add(CommonConfigOptions.useAllConstructor()); - options.add(CommonConfigOptions.useExistsConstructor()); - options.add(CommonConfigOptions.useHasValueConstructor()); - options.add(CommonConfigOptions.useDataHasValueConstructor()); - options.add(CommonConfigOptions.valueFreqencyThreshold()); - options.add(CommonConfigOptions.useCardinalityRestrictions()); - options.add(CommonConfigOptions.cardinalityLimit()); - // by default, we do not use negation (should be configurable in GUI) - options.add(CommonConfigOptions.useNegation(false)); - options.add(CommonConfigOptions.useBooleanDatatypes()); - options.add(CommonConfigOptions.useDoubleDatatypes()); - options.add(CommonConfigOptions.maxExecutionTimeInSeconds(10)); - options.add(CommonConfigOptions.getNoisePercentage()); - options.add(CommonConfigOptions.getMaxDepth(7)); - options.add(CommonConfigOptions.maxNrOfResults(10)); - options.add(new BooleanConfigOption("singleSuggestionMode", "Use this if you are interested in only one suggestion and your learning problem has many (more than 1000) examples.", false)); - options.add(CommonConfigOptions.getInstanceBasedDisjoints()); - options.add(new BooleanConfigOption("filterDescriptionsFollowingFromKB", "If true, then the results will not contain suggestions, which already follow logically from the knowledge base. Be careful, since this requires a potentially expensive consistency check for candidate solutions.", false)); - options.add(new BooleanConfigOption("reuseExistingDescription", "If true, the algorithm tries to find a good starting point close to an existing definition/super class of the given class in the knowledge base.", false)); - return options; } public static String getName() { - return "ISLE"; + return "CELOE"; } @Override public void init() throws ComponentInitException { + + if(maxExecutionTimeInSeconds != 0 && maxExecutionTimeInSecondsAfterImprovement != 0) { + maxExecutionTimeInSeconds = Math.min(maxExecutionTimeInSeconds, maxExecutionTimeInSecondsAfterImprovement); + } + + // compute used concepts/roles from allowed/ignored + // concepts/roles + Set<NamedClass> usedConcepts; +// Set<NamedClass> allowedConcepts = configurator.getAllowedConcepts()==null ? null : CommonConfigMappings.getAtomicConceptSet(configurator.getAllowedConcepts()); +// Set<NamedClass> ignoredConcepts = configurator.getIgnoredConcepts()==null ? null : CommonConfigMappings.getAtomicConceptSet(configurator.getIgnoredConcepts()); + if(allowedConcepts != null) { + // sanity check to control if no non-existing concepts are in the list + Helper.checkConcepts(reasoner, allowedConcepts); + usedConcepts = allowedConcepts; + } else if(ignoredConcepts != null) { + usedConcepts = Helper.computeConceptsUsingIgnoreList(reasoner, ignoredConcepts); + } else { + usedConcepts = Helper.computeConcepts(reasoner); + } + // copy class hierarchy and modify it such that each class is only // reachable via a single path - ClassHierarchy classHierarchy = reasoner.getClassHierarchy().clone(); +// ClassHierarchy classHierarchy = reasoner.getClassHierarchy().clone(); + ClassHierarchy classHierarchy = reasoner.getClassHierarchy().cloneAndRestrict(usedConcepts); classHierarchy.thinOutSubsumptionHierarchy(); + + // if no one injected a heuristic, we use a default one + if(heuristic == null) { + heuristic = new NLPHeuristic(); + } minimizer = new DescriptionMinimizer(reasoner); - startClass = Thing.instance; + // start at owl:Thing by default + if(startClass == null) { + startClass = Thing.instance; + } // singleSuggestionMode = configurator.getSingleSuggestionMode(); - + /* // create refinement operator -// operator = new RhoDRDown(reasoner, classHierarchy, startClass, configurator); - // create refinement operator if(operator == null) { operator = new RhoDRDown(); ((RhoDRDown)operator).setStartClass(startClass); - ((RhoDRDown)operator).setSubHierarchy(classHierarchy); ((RhoDRDown)operator).setReasoner(reasoner); - ((RhoDRDown)operator).init(); - } + } + ((RhoDRDown)operator).setSubHierarchy(classHierarchy); + ((RhoDRDown)operator).setObjectPropertyHierarchy(reasoner.getObjectPropertyHierarchy()); + ((RhoDRDown)operator).setDataPropertyHierarchy(reasoner.getDatatypePropertyHierarchy()); + ((RhoDRDown)operator).init(); + */ + // create a refinement operator and pass all configuration + // variables to it + if(operator == null) { + // we use a default operator and inject the class hierarchy for now + operator = new RhoDRDown(); + if(operator instanceof CustomStartRefinementOperator) { + ((CustomStartRefinementOperator)operator).setStartClass(startClass); + } + if(operator instanceof ReasoningBasedRefinementOperator) { + ((ReasoningBasedRefinementOperator)operator).setReasoner(reasoner); + } + operator.init(); + } + if(operator instanceof CustomHierarchyRefinementOperator) { + ((CustomHierarchyRefinementOperator)operator).setClassHierarchy(classHierarchy); + ((CustomHierarchyRefinementOperator)operator).setObjectPropertyHierarchy(reasoner.getObjectPropertyHierarchy()); + ((CustomHierarchyRefinementOperator)operator).setDataPropertyHierarchy(reasoner.getDatatypePropertyHierarchy()); + } + +// operator = new RhoDRDown(reasoner, classHierarchy, startClass, configurator); baseURI = reasoner.getBaseURI(); prefixes = reasoner.getPrefixes(); + if(writeSearchTree) { + File f = new File(searchTreeFile ); + Files.clearFile(f); + } bestEvaluatedDescriptions = new EvaluatedDescriptionSet(maxNrOfResults); @@ -211,12 +312,18 @@ // we put important parameters in class variables noise = noisePercentage/100d; +// System.out.println("noise " + noise); // maxDepth = configurator.getMaxDepth(); // (filterFollowsFromKB is automatically set to false if the problem // is not a class learning problem - filterFollowsFromKB = filterDescriptionsFollowingFromKB - && isClassLearningProblem; + filterFollowsFromKB = filterDescriptionsFollowingFromKB && isClassLearningProblem; +// Set<Description> concepts = operator.refine(Thing.instance, 5); +// for(Description concept : concepts) { +// System.out.println(concept); +// } +// System.out.println("refinements of thing: " + concepts.size()); + // actions specific to ontology engineering if(isClassLearningProblem) { ClassLearningProblem problem = (ClassLearningProblem) learningProblem; @@ -230,7 +337,7 @@ // superfluous to add super classes in this case) if(isEquivalenceProblem) { Set<Description> existingDefinitions = reasoner.getAssertedDefinitions(classToDescribe); - if(reuseExistingDescription && (existingDefinitions.size() > 0)) { + if(reuseExistingDescription && (existingDefinitions.size() > 0)) { // the existing definition is reused, which in the simplest case means to // use it as a start class or, if it is already too specific, generalise it @@ -246,7 +353,10 @@ LinkedList<Description> startClassCandidates = new LinkedList<Description>(); startClassCandidates.add(existingDefinition); - ((RhoDRDown)operator).setDropDisjuncts(true); + // hack for RhoDRDown + if(operator instanceof RhoDRDown) { + ((RhoDRDown)operator).setDropDisjuncts(true); + } LengthLimitedRefinementOperator upwardOperator = (LengthLimitedRefinementOperator) new OperatorInverter(operator); // use upward refinement until we find an appropriate start class @@ -279,7 +389,9 @@ // System.out.println("existing def: " + existingDefinition); // System.out.println(reasoner.getIndividuals(existingDefinition)); - ((RhoDRDown)operator).setDropDisjuncts(false); + if(operator instanceof RhoDRDown) { + ((RhoDRDown)operator).setDropDisjuncts(false); + } } else { Set<Description> superClasses = reasoner.getClassHierarchy().getSuperClasses(classToDescribe); @@ -322,6 +434,10 @@ return bestEvaluatedDescriptions.getSet(); } + public double getCurrentlyBestAccuracy() { + return bestEvaluatedDescriptions.getBest().getAccuracy(); + } + @Override public void start() { // System.out.println(configurator.getMaxExecutionTimeInSeconds()); @@ -339,10 +455,13 @@ int loop = 0; while (!terminationCriteriaSatisfied()) { +// System.out.println("loop " + loop); if(!singleSuggestionMode && bestEvaluatedDescriptions.getBestAccuracy() > highestAccuracy) { highestAccuracy = bestEvaluatedDescriptions.getBestAccuracy(); - logger.info("more accurate (" + dfPercent.format(highestAccuracy) + ") class expression found: " + descriptionToString(bestEvaluatedDescriptions.getBest().getDescription())); + expressionTestCountLastImprovement = expressionTests; + timeLastImprovement = System.nanoTime(); + logger.info("more accurate (" + dfPercent.format(highestAccuracy) + ") class expression found: " + descriptionToString(bestEvaluatedDescriptions.getBest().getDescription())); } // chose best node according to heuristics @@ -358,12 +477,16 @@ // for(Description refinement : refinements) { // System.out.println("refinement: " + refinement); // } +// if((loop+1) % 500 == 0) { +// System.out.println(getMinimumHorizontalExpansion() + " - " + getMaximumHorizontalExpansion()); +// System.exit(0); +// } while(refinements.size() != 0) { // pick element from set Description refinement = refinements.pollFirst(); int length = refinement.getLength(); - + // we ignore all refinements with lower length and too high depth // (this also avoids duplicate node children) if(length > horizExp && refinement.getDepth() <= maxDepth) { @@ -385,6 +508,24 @@ updateMinMaxHorizExp(nextNode); + // writing the search tree (if configured) + if (writeSearchTree) { + String treeString = "best node: " + bestEvaluatedDescriptions.getBest() + "\n"; + if (refinements.size() > 1) { + treeString += "all expanded nodes:\n"; + for (Description n : refinements) { + treeString += " " + n + "\n"; + } + } + treeString += startNode.toTreeString(baseURI); + treeString += "\n"; + + if (replaceSearchTree) + Files.createFile(new File(searchTreeFile), treeString); + else + Files.appendToFile(new File(searchTreeFile), treeString); + } + // System.out.println(loop); loop++; } @@ -392,7 +533,7 @@ if (stop) { logger.info("Algorithm stopped ("+expressionTests+" descriptions tested). " + nodes.size() + " nodes in the search tree.\n"); } else { - logger.info("Algorithm terminated successfully ("+expressionTests+" descriptions tested). " + nodes.size() + " nodes in the search tree.\n"); + logger.info("Algorithm terminated successfully (time: " + Helper.prettyPrintNanoSeconds(System.nanoTime()-nanoStartTime) + ", "+expressionTests+" descriptions tested, " + nodes.size() + " nodes in the search tree).\n"); logger.info(reasoner.toString()); } @@ -445,7 +586,7 @@ // returns true if node was added and false otherwise private boolean addNode(Description description, OENode parentNode) { -// System.out.println(description); +// System.out.println("d: " + description); // redundancy check (return if redundant) boolean nonRedundant = descriptions.add(description); @@ -498,6 +639,8 @@ return true; } +// System.out.println("description " + description + " accuracy " + accuracy); + // maybe add to best descriptions (method keeps set size fixed); // we need to make sure that this does not get called more often than // necessary since rewriting is expensive @@ -510,30 +653,42 @@ (accuracy >= accThreshold && description.getLength() < worst.getDescriptionLength())); } +// System.out.println(isCandidate); + // System.out.println("Test4 " + new Date()); if(isCandidate) { + Description niceDescription = rewriteNode(node); ConceptTransformation.transformToOrderedForm(niceDescription, descriptionComparator); // Description niceDescription = node.getDescription(); // another test: none of the other suggested descriptions should be // a subdescription of this one unless accuracy is different + // => comment: on the one hand, this appears to be too strict, because once A is a solution then everything containing + // A is not a candidate; on the other hand this suppresses many meaningless extensions of A boolean shorterDescriptionExists = false; - for(EvaluatedDescription ed : bestEvaluatedDescriptions.getSet()) { - if(Math.abs(ed.getAccuracy()-accuracy) <= 0.00001 && ConceptTransformation.isSubdescription(niceDescription, ed.getDescription())) { - shorterDescriptionExists = true; - break; - } + if(forceMutualDifference) { + for(EvaluatedDescription ed : bestEvaluatedDescriptions.getSet()) { + if(Math.abs(ed.getAccuracy()-accuracy) <= 0.00001 && ConceptTransformation.isSubdescription(niceDescription, ed.getDescription())) { +// System.out.println("shorter: " + ed.getDescription()); + shorterDescriptionExists = true; + break; + } + } } +// System.out.println("shorter description? " + shorterDescriptionExists + " nice: " + niceDescription); + if(!shorterDescriptionExists) { if(!filterFollowsFromKB || !((ClassLearningProblem)learningProblem).followsFromKB(niceDescription)) { +// System.out.println("Test2"); bestEvaluatedDescriptions.add(niceDescription, accuracy, learningProblem); // System.out.println("acc: " + accuracy); // System.out.println(bestEvaluatedDescriptions); } } +// System.out.println(bestEvaluatedDescriptions.getSet().size()); } // System.out.println("Test5 " + new Date()); @@ -630,14 +785,26 @@ private Description rewriteNode(OENode node) { Description description = node.getDescription(); // minimize description (expensive!) - also performes some human friendly rewrites - Description niceDescription = minimizer.minimizeClone(description); + Description niceDescription; + if(useMinimizer) { + niceDescription = minimizer.minimizeClone(description); + } else { + niceDescription = description; + } // replace \exists r.\top with \exists r.range(r) which is easier to read for humans ConceptTransformation.replaceRange(niceDescription, reasoner); return niceDescription; } private boolean terminationCriteriaSatisfied() { - return stop || ((System.nanoTime() - nanoStartTime) >= (maxExecutionTimeInSeconds*1000000000l)); + return + stop || + (maxClassExpressionTestsAfterImprovement != 0 && (expressionTests - expressionTestCountLastImprovement >= maxClassExpressionTestsAfterImprovement)) || + (maxClassExpressionTests != 0 && (expressionTests >= maxClassExpressionTests)) || + (maxExecutionTimeInSecondsAfterImprovement != 0 && ((System.nanoTime() - nanoStartTime) >= (maxExecutionTimeInSecondsAfterImprovement*1000000000l))) || + (maxExecutionTimeInSeconds != 0 && ((System.nanoTime() - nanoStartTime) >= (maxExecutionTimeInSeconds*1000000000l))) || + (terminateOnNoiseReached && (100*getCurrentlyBestAccuracy()>=100-noisePercentage)) || + (stopOnFirstDefinition && (getCurrentlyBestAccuracy() >= 1)); } private void reset() { @@ -740,6 +907,196 @@ */ public int getClassExpressionTests() { return expressionTests; + } + + public LengthLimitedRefinementOperator getOperator() { + return operator; + } + + @Autowired(required=false) + public void setOperator(LengthLimitedRefinementOperator operator) { + this.operator = operator; + } + + public Description getStartClass() { + return startClass; + } + + public void setStartClass(Description startClass) { + this.startClass = startClass; + } + + public Set<NamedClass> getAllowedConcepts() { + return allowedConcepts; + } + + public void setAllowedConcepts(Set<NamedClass> allowedConcepts) { + this.allowedConcepts = allowedConcepts; + } + + public Set<NamedClass> getIgnoredConcepts() { + return ignoredConcepts; + } + + public void setIgnoredConcepts(Set<NamedClass> ignoredConcepts) { + this.ignoredConcepts = ignoredConcepts; + } + + public boolean isWriteSearchTree() { + return writeSearchTree; + } + + public void setWriteSearchTree(boolean writeSearchTree) { + this.writeSearchTree = writeSearchTree; + } + + public String getSearchTreeFile() { + return searchTreeFile; + } + + public void setSearchTreeFile(String searchTreeFile) { + this.searchTreeFile = searchTreeFile; + } + + public int getMaxNrOfResults() { + return maxNrOfResults; + } + + public void setMaxNrOfResults(int maxNrOfResults) { + this.maxNrOfResults = maxNrOfResults; + } + + public double getNoisePercentage() { + return noisePercentage; + } + + public void setNoisePercentage(double noisePercentage) { + this.noisePercentage = noisePercentage; + } + + public boolean isFilterDescriptionsFollowingFromKB() { + return filterDescriptionsFollowingFromKB; + } + + public void setFilterDescriptionsFollowingFromKB(boolean filterDescriptionsFollowingFromKB) { + this.filterDescriptionsFollowingFromKB = filterDescriptionsFollowingFromKB; + } + + public boolean isReplaceSearchTree() { + return replaceSearchTree; + } + + public void setReplaceSearchTree(boolean replaceSearchTree) { + this.replaceSearchTree = replaceSearchTree; + } + + public int getMaxClassDescriptionTests() { + return maxClassExpressionTests; + } + + public void setMaxClassDescriptionTests(int maxClassDescriptionTests) { + this.maxClassExpressionTests = maxClassDescriptionTests; + } + + public int getMaxExecutionTimeInSeconds() { + return maxExecutionTimeInSeconds; + } + + public void setMaxExecutionTimeInSeconds(int maxExecutionTimeInSeconds) { + this.maxExecutionTimeInSeconds = maxExecutionTimeInSeconds; + } + + public boolean isTerminateOnNoiseReached() { + return terminateOnNoiseReached; + } + + public void setTerminateOnNoiseReached(boolean terminateOnNoiseReached) { + this.terminateOnNoiseReached = terminateOnNoiseReached; + } + + public boolean isReuseExistingDescription() { + return reuseExistingDescription; + } + + public void setReuseExistingDescription(boolean reuseExistingDescription) { + this.reuseExistingDescription = reuseExistingDescription; + } + + public boolean isUseMinimizer() { + return useMinimizer; + } + + public void setUseMinimizer(boolean useMinimizer) { + this.useMinimizer = useMinimizer; + } + + public NLPHeuristic getHeuristic() { + return heuristic; + } + + @Autowired(required=false) + public void setHeuristic(NLPHeuristic heuristic) { + this.heuristic = heuristic; + } + + public int getMaxClassExpressionTestsWithoutImprovement() { + return maxClassExpressionTestsAfterImprovement; + } + + public void setMaxClassExpressionTestsWithoutImprovement( + int maxClassExpressionTestsWithoutImprovement) { + this.maxClassExpressionTestsAfterImprovement = maxClassExpressionTestsWithoutImprovement; + } + + public int getMaxExecutionTimeInSecondsAfterImprovement() { + return maxExecutionTimeInSecondsAfterImprovement; + } + + public void setMaxExecutionTimeInSecondsAfterImprovement( + int maxExecutionTimeInSecondsAfterImprovement) { + this.maxExecutionTimeInSecondsAfterImprovement = maxExecutionTimeInSecondsAfterImprovement; } + public boolean isSingleSuggestionMode() { + return singleSuggestionMode; + } + + public void setSingleSuggestionMode(boolean singleSuggestionMode) { + this.singleSuggestionMode = singleSuggestionMode; + } + + public int getMaxClassExpressionTests() { + return maxClassExpressionTests; + } + + public void setMaxClassExpressionTests(int maxClassExpressionTests) { + this.maxClassExpressionTests = maxClassExpressionTests; + } + + public int getMaxClassExpressionTestsAfterImprovement() { + return maxClassExpressionTestsAfterImprovement; + } + + public void setMaxClassExpressionTestsAfterImprovement( + int maxClassExpressionTestsAfterImprovement) { + this.maxClassExpressionTestsAfterImprovement = maxClassExpressionTestsAfterImprovement; + } + + public double getMaxDepth() { + return maxDepth; + } + + public void setMaxDepth(double maxDepth) { + this.maxDepth = maxDepth; + } + + + public boolean isStopOnFirstDefinition() { + return stopOnFirstDefinition; + } + + public void setStopOnFirstDefinition(boolean stopOnFirstDefinition) { + this.stopOnFirstDefinition = stopOnFirstDefinition; + } + } Added: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/LabelEntityTextRetriever.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/LabelEntityTextRetriever.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/LabelEntityTextRetriever.java 2013-07-08 13:51:31 UTC (rev 4015) @@ -0,0 +1,95 @@ +/** + * + */ +package org.dllearner.algorithms.isle; + +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import org.dllearner.core.owl.Entity; +import org.dllearner.kb.OWLAPIOntology; +import org.dllearner.utilities.owl.OWLAPIConverter; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLAnnotation; +import org.semanticweb.owlapi.model.OWLAnnotationProperty; +import org.semanticweb.owlapi.model.OWLDataFactory; +import org.semanticweb.owlapi.model.OWLEntity; +import org.semanticweb.owlapi.model.OWLLiteral; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.util.IRIShortFormProvider; +import org.semanticweb.owlapi.util.SimpleIRIShortFormProvider; +import org.semanticweb.owlapi.vocab.OWLRDFVocabulary; + +import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; + + +/** + * @author Lorenz Buehmann + * + */ +public class LabelEntityTextRetriever implements EntityTextRetriever{ + + private OWLOntology ontology; + private OWLOntologyManager manager; + private OWLDataFactory df = new OWLDataFactoryImpl(); + + private OWLAnnotationProperty label = df.getOWLAnnotationProperty(OWLRDFVocabulary.RDFS_LABEL.getIRI()); + + private String language = "en"; + private double weight = 1d; + + private boolean useShortFormFallback = true; + private IRIShortFormProvider sfp = new SimpleIRIShortFormProvider(); + + public LabelEntityTextRetriever(OWLOntology ontology) { + this.ontology = ontology; + } + + public LabelEntityTextRetriever(OWLAPIOntology ontology) { + this.ontology = ontology.createOWLOntology(manager); + } + + /** + * @param language the language to set + */ + public void setLanguage(String language) { + this.language = language; + } + + /** + * Whether to use the short form of the IRI as fallback, if no label is given. + * @param useShortFormFallback the useShortFormFallback to set + */ + public void setUseShortFormFallback(boolean useShortFormFallback) { + this.useShortFormFallback = useShortFormFallback; + } + + /* (non-Javadoc) + * @see org.dllearner.algorithms.isle.EntityTextRetriever#getRelevantText(org.dllearner.core.owl.Entity) + */ + @Override + public Map<String, Double> getRelevantText(Entity entity) { + Map<String, Double> textWithWeight = new HashMap<String, Double>(); + + OWLEntity e = OWLAPIConverter.getOWLAPIEntity(entity); + + Set<OWLAnnotation> annotations = e.getAnnotations(ontology, label); + for (OWLAnnotation annotation : annotations) { + if (annotation.getValue() instanceof OWLLiteral) { + OWLLiteral val = (OWLLiteral) annotation.getValue(); + if (val.hasLang(language)) { + String label = val.getLiteral(); + textWithWeight.put(label, weight); + } + } + } + + if(textWithWeight.isEmpty() && useShortFormFallback){ + textWithWeight.put(sfp.getShortForm(IRI.create(entity.getURI())), weight); + } + + return textWithWeight; + } +} Added: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/LuceneBasedRelevance.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/LuceneBasedRelevance.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/LuceneBasedRelevance.java 2013-07-08 13:51:31 UTC (rev 4015) @@ -0,0 +1,145 @@ +/** + * Copyright (C) 2007-2011, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + + +package org.dllearner.algorithms.isle; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import org.dllearner.core.owl.Entity; +import org.dllearner.utilities.owl.OWLAPIConverter; +import org.semanticweb.owlapi.model.OWLEntity; +import org.semanticweb.owlapi.model.OWLOntology; + + +public abstract class LuceneBasedRelevance implements Relevance{ + + private EntityTextRetriever textRetriever; + private LuceneSearcher searcher; + private OWLOntology ontology; + private Set<OWLEntity> entities; + +// public void printScores() throws Exception { +// for( OWLClass c: m_classes ) +// { +// Map<OWLEntity,Double> hmEntity2Score = getEntityRelevance(c); +// // normalization per class? +// hmEntity2Score = normalize( hmEntity2Score ); +// for( OWLEntity e : hmEntity2Score.keySet() ) +// { +// double dScore = hmEntity2Score.get(e); +// System.out.println( "P( "+ getLabel(c) +", "+ getLabel(e) +" ) = "+ dScore ); +// } +// } +// m_searcher.close(); +// } + + public LuceneBasedRelevance(OWLOntology ontology, LuceneSearcher searcher, EntityTextRetriever textRetriever) { + this.searcher = searcher; + this.ontology = ontology; + this.textRetriever = textRetriever; + + entities = new HashSet<OWLEntity>(); + entities.addAll(ontology.getClassesInSignature()); + entities.addAll(ontology.getObjectPropertiesInSignature()); + entities.addAll(ontology.getDataPropertiesInSignature()); + } + + public Map<OWLEntity,Double> normalizeMinMax( Map<OWLEntity,Double> hmEntity2Score ){ + Map<OWLEntity,Double> hmEntity2Norm = new HashMap<OWLEntity,Double>(); + double dMin = Double.MAX_VALUE; + Double dMax = Double.MIN_VALUE; + for( OWLEntity e : hmEntity2Score.keySet() ) + { + double dValue = hmEntity2Score.get(e); + if( dValue < dMin ){ + dMin = dValue; + } + else if( dValue > dMax ){ + dMax = dValue; + } + } + // System.out.println( "min="+ dMin +" max="+ dMax ); + for( OWLEntity e : hmEntity2Score.keySet() ) + { + double dValue = hmEntity2Score.get(e); + double dNorm = 0; + if( dMin == dMax ){ + dNorm = dValue; + } + else { + dNorm = ( dValue - dMin ) / ( dMax - dMin ); + } + hmEntity2Norm.put( e, dNorm ); + } + return hmEntity2Norm; + } + + @Override + public Map<Entity,Double> getEntityRelevance(Entity entity) throws Exception { + // computes relevance of entity for this class + // conditional probability: P(C,E)=f(C,E)/f(E) + // PMI(C,E)=log( P(C,E) / P(C) ) + Map<Entity, Double> hmEntity2Score = new HashMap<Entity, Double>(); + Map<String, Double> relevantText = textRetriever.getRelevantText(entity); + + for (Entry<String, Double> entry : relevantText.entrySet()) { + String text = entry.getKey(); + Double value = entry.getValue(); + + String sClass = text; + int nrOfDocumentsA = searcher.count(sClass); + int nrOfDocuments = searcher.indexSize(); + + for (OWLEntity otherEntity : entities) { + + Map<String, Double> otherRelevantText = textRetriever.getRelevantText(OWLAPIConverter + .getEntity(otherEntity)); + + for (Entry<String, Double> entry2 : otherRelevantText.entrySet()) { + String otherText = entry2.getKey(); + Double otherValue = entry2.getValue(); + + String sEntity = otherText; + int nrOfDocumentsB = searcher.count(sEntity); + int nrOfDocumentsAB = searcher.count(sClass + " AND " + sEntity); + // double dPEntity = (double)iEntity / (double)iAll; + + double score = computeScore(nrOfDocuments, nrOfDocumentsA, nrOfDocumentsB, nrOfDocumentsAB); + if (!Double.isNaN(score)){// && !Double.isInfinite(score)) { + hmEntity2Score.put(OWLAPIConverter.getEntity(otherEntity), score); + } + } + } + } + + return hmEntity2Score; + } + + /** + * Computes the score which is returned in {@link org.dllearner.algorithms.isle.LuceneBasedRelevance#getEntityRelevance} + * @return + */ + public abstract double computeScore(int nrOfDocuments, int nrOfDocumentsA, int nrOfDocumentsB, int nrOfDocumentsAB); + +} \ No newline at end of file Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/LuceneSearcher.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/LuceneSearcher.java 2013-07-08 13:49:40 UTC (rev 4014) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/LuceneSearcher.java 2013-07-08 13:51:31 UTC (rev 4015) @@ -40,13 +40,14 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; +import org.apache.lucene.store.Directory; import org.apache.lucene.store.FSDirectory; import org.apache.lucene.util.Version; public class LuceneSearcher { - private String INDEX = "index"; - private String FIELD = "contents"; + private String INDEX = "/home/me/DBpedia-Lucene-Index"; + private String FIELD = "short-abstract"; private IndexReader m_reader = null; private IndexSearcher m_searcher = null; @@ -61,12 +62,12 @@ LuceneSearcher searcher = new LuceneSearcher(); List<Document> docs = searcher.search( sQuery ); System.out.println( "\nquery='"+ sQuery +"' all="+ searcher.indexSize() +" hits="+ docs.size() ); - for( Document doc : docs ) - { -// String sDoc = doc.toString(); - float score = searcher.getScore( doc ); - System.out.println( "score="+ score +" doc="+ doc ); - } +// for( Document doc : docs ) +// { +//// String sDoc = doc.toString(); +// float score = searcher.getScore( doc ); +// System.out.println( "score="+ score +" doc="+ doc ); +// } } @SuppressWarnings("deprecation") @@ -77,6 +78,28 @@ m_parser = new QueryParser( Version.LUCENE_43, FIELD, m_analyzer ); } + public LuceneSearcher(IndexReader indexReader) throws Exception { + m_reader = indexReader; + m_searcher = new IndexSearcher( m_reader ); + m_analyzer = new StandardAnalyzer( Version.LUCENE_43); + m_parser = new QueryParser( Version.LUCENE_43, FIELD, m_analyzer ); + } + + public LuceneSearcher(Directory directory, String seachField) throws Exception { + this.FIELD = seachField; + m_reader = DirectoryReader.open(directory); + m_searcher = new IndexSearcher( m_reader ); + m_analyzer = new StandardAnalyzer( Version.LUCENE_43); + m_parser = new QueryParser( Version.LUCENE_43, FIELD, m_analyzer ); + } + + public LuceneSearcher(String indexDirectory) throws Exception { + m_reader = DirectoryReader.open(FSDirectory.open(new File(indexDirectory))); + m_searcher = new IndexSearcher( m_reader ); + m_analyzer = new StandardAnalyzer( Version.LUCENE_43); + m_parser = new QueryParser( Version.LUCENE_43, FIELD, m_analyzer ); + } + public void close() throws Exception { m_reader.close(); } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/NLPHeuristic.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/NLPHeuristic.java 2013-07-08 13:49:40 UTC (rev 4014) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/NLPHeuristic.java 2013-07-08 13:51:31 UTC (rev 4015) @@ -20,8 +20,13 @@ package org.dllearner.algorithms.isle; import java.util.Comparator; +import java.util.Map; import org.dllearner.algorithms.celoe.OENode; +import org.dllearner.core.Component; +import org.dllearner.core.ComponentInitException; +import org.dllearner.core.config.ConfigOption; +import org.dllearner.core.owl.Entity; import org.dllearner.utilities.owl.ConceptComparator; /** @@ -31,7 +36,8 @@ * @author Jens Lehmann * */ -public class NLPHeuristic implements Comparator<OENode> { +public class NLPHeuristic implements Component, Comparator<OENode>{ + // strong penalty for long descriptions private double expansionPenaltyFactor = 0.1; // bonus for being better than parent node @@ -42,7 +48,23 @@ // syntactic comparison as final comparison criterion private ConceptComparator conceptComparator = new ConceptComparator(); + @ConfigOption(name = "startNodeBonus", defaultValue="0.1") + private double startNodeBonus = 0.1; + + private Map<Entity, Double> entityRelevance; + + public NLPHeuristic() {} + + public NLPHeuristic(Map<Entity,Double> entityRelevance) { + this.entityRelevance = entityRelevance; + } + @Override + public void init() throws ComponentInitException { + + } + + @Override public int compare(OENode node1, OENode node2) { // System.out.println("node1 " + node1); // System.out.println("score: " + getNodeScore(node1)); @@ -67,6 +89,9 @@ if(!node.isRoot()) { double parentAccuracy = node.getParent().getAccuracy(); score += (parentAccuracy - score) * gainBonusFactor; + // the root node also gets a bonus to possibly spawn useful disjunctions + } else { + score += startNodeBonus; } // penalty for horizontal expansion score -= node.getHorizontalExpansion() * expansionPenaltyFactor; @@ -77,6 +102,48 @@ public double getExpansionPenaltyFactor() { return expansionPenaltyFactor; + } + + public double getGainBonusFactor() { + return gainBonusFactor; + } + + public void setGainBonusFactor(double gainBonusFactor) { + this.gainBonusFactor = gainBonusFactor; + } + + public double getNodeRefinementPenalty() { + return nodeRefinementPenalty; + } + + public void setNodeRefinementPenalty(double nodeRefinementPenalty) { + this.nodeRefinementPenalty = nodeRefinementPenalty; + } + + public void setExpansionPenaltyFactor(double expansionPenaltyFactor) { + this.expansionPenaltyFactor = expansionPenaltyFactor; + } + + public double getStartNodeBonus() { + return startNodeBonus; + } + + public void setStartNodeBonus(double startNodeBonus) { + this.startNodeBonus = startNodeBonus; } + + /** + * @param entityRelevance the entityRelevance to set + */ + public void setEntityRelevance(Map<Entity, Double> entityRelevance) { + this.entityRelevance = entityRelevance; + } + + /** + * @return the entityRelevance + */ + public Map<Entity, Double> getEntityRelevance() { + return entityRelevance; + } } Added: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/OWLOntologyLuceneIndex.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/OWLOntologyLuceneIndex.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/OWLOntologyLuceneIndex.java 2013-07-08 13:51:31 UTC (rev 4015) @@ -0,0 +1,141 @@ +/** + * + */ +package org.dllearner.algorithms.isle; + +import java.io.IOException; +import java.util.HashSet; +import java.util.Set; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.FieldType; +import org.apache.lucene.document.StringField; +import org.apache.lucene.document.TextField; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FSDirectory; +import org.apache.lucene.store.RAMDirectory; +import org.apache.lucene.util.Version; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLAnnotation; +import org.semanticweb.owlapi.model.OWLAnnotationProperty; +import org.semanticweb.owlapi.model.OWLDataFactory; +import org.semanticweb.owlapi.model.OWLEntity; +import org.semanticweb.owlapi.model.OWLLiteral; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.vocab.OWLRDFVocabulary; + +import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; + +/** + * @author Lorenz Buehmann + * + */ +public class OWLOntologyLuceneIndex { + + private Directory directory = new RAMDirectory(); + private OWLOntology ontology; + private Set<OWLEntity> schemaEntities; + + private OWLDataFactory df = new OWLDataFactoryImpl(); + private OWLAnnotationProperty annotationProperty = df.getOWLAnnotationProperty(OWLRDFVocabulary.RDFS_LABEL.getIRI()); + private String language = "en"; + private String searchField; + + public OWLOntologyLuceneIndex(OWLOntology ontology, String searchField) throws IOException { + this.ontology = ontology; + this.searchField = searchField; + + schemaEntities = new HashSet<OWLEntity>(); + schemaEntities.addAll(ontology.getClassesInSignature()); + schemaEntities.addAll(ontology.getObjectPropertiesInSignature()); + schemaEntities.addAll(ontology.getDataPropertiesInSignature()); + + buildIndex(); + } + + public OWLOntologyLuceneIndex(OWLOntology ontology, OWLAnnotationProperty annotationProperty) throws IOException { + this.ontology = ontology; + this.annotationProperty = annotationProperty; + + schemaEntities = new HashSet<OWLEntity>(); + schemaEntities.addAll(ontology.getClassesInSignature()); + schemaEntities.addAll(ontology.getObjectPropertiesInSignature()); + schemaEntities.addAll(ontology.getDataPropertiesInSignature()); + + buildIndex(); + } + + /** + * @return the ontology + */ + public OWLOntology getOntology() { + return ontology; + } + + /** + * @return the directory + */ + public Directory getDirectory() { + return directory; + } + + /** + * @param annotationProperty the annotationProperty to set + */ + public void setAnnotationProperty(OWLAnnotationProperty annotationProperty) { + this.annotationProperty = annotationProperty; + } + + /** + * @param annotationProperty the annotationProperty to set + */ + public void setAnnotationProperty(String annotationPropertyIRI) { + this.annotationProperty = df.getOWLAnnotationProperty(IRI.create(annotationPropertyIRI)); + } + + public void buildIndex() throws IOException{ + Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_43); + IndexWriterConfig indexWriterConfig = new IndexWriterConfig(Version.LUCENE_43, analyzer); + IndexWriter writer = new IndexWriter(directory, indexWriterConfig); + System.out.println( "Creating index ..." ); + + Set<Document> luceneDocuments = new HashSet<Document>(); + FieldType stringType = new FieldType(StringField.TYPE_STORED); + stringType.setStoreTermVectors(false); + FieldType textType = new FieldType(TextField.TYPE_STORED); + textType.setStoreTermVectors(false); + + for (OWLEntity entity : schemaEntities) { + String label = null; + Set<OWLAnnotation> annotations = entity.getAnnotations(ontology, annotationProperty); + for (OWLAnnotation annotation : annotations) { + if (annotation.getValue() instanceof OWLLiteral) { + OWLLiteral val = (OWLLiteral) annotation.getValue(); + if (val.hasLang(language)) { + label = val.getLiteral(); + } + } + } + + if(label != null){ + Document luceneDocument = new Document(); + luceneDocument.add(new Field("uri", entity.toStringID(), stringType)); + luceneDocument.add(new Field(searchField, label, textType)); + luceneDocuments.add(luceneDocument); + } + + } + writer.addDocuments(luceneDocuments); + + System.out.println("Done."); + writer.close(); + } + + + +} Added: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMILuceneBasedRelevance.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMILuceneBasedRelevance.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMILuceneBasedRelevance.java 2013-07-08 13:51:31 UTC (rev 4015) @@ -0,0 +1,48 @@ +/** + * Copyright (C) 2007-2011, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + + +package org.dllearner.algorithms.isle; + +import org.semanticweb.owlapi.model.OWLOntology; + + +public class PMILuceneBasedRelevance extends LuceneBasedRelevance{ + + /** + * @param ontology + * @param searcher + * @param textRetriever + */ + public PMILuceneBasedRelevance(OWLOntology ontology, LuceneSearcher searcher, EntityTextRetriever textRetriever) { + super(ontology, searcher, textRetriever); + + } + + /* (non-Javadoc) + * @see org.dllearner.algorithms.isle.LuceneBasedRelevance#computeScore(int, int, int, int) + */ + @Override + public double computeScore(int nrOfDocuments, int nrOfDocumentsA, int nrOfDocumentsB, int nrOfDocumentsAB) { + double dPClass = nrOfDocuments == 0 ? 0 : ((double) nrOfDocumentsA / (double) nrOfDocuments); + double dPClassEntity = nrOfDocumentsB == 0 ? 0 : (double) nrOfDocumentsAB / (double) nrOfDocumentsB; + double pmi = Math.log(dPClassEntity / dPClass); + return pmi; + } +} \ No newline at end of file Added: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMIRelevance.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMIRelevance.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMIRelevance.java 2013-07-08 13:51:31 UTC (rev 4015) @@ -0,0 +1,108 @@ +package org.dllearner.algorithms.isle; + + +import java.io.*; +import java.util.*; + +public class PMIRelevance { + + private LuceneSearcher m_searcher = null; + + private Set<String> m_classes; + private Set<String> m_individuals; + + + public static void main( String args[] ) throws Exception { + PMIRelevance relevance = new PMIRelevance( args[0], args[1] ); + relevance.printScores(); + } + + public void printScores() throws Exception { + for( String sInd: m_individuals ) + { + Map<String,Double> hmClass2Score = getClassRelevance( sInd ); + for( String sClass : hmClass2Score.keySet() ) + { + double dScore = hmClass2Score.get( sClass ); + if( dScore > 0 ){ + System.out.println( "PMI( "+ sInd +" , "+ sClass +" ) = "+ dScore ); + } + } + } + /* for( String sClass: m_classes ) + { + Map<String,Double> hmInd2Score = getIndividualRelevance( sClass ); + for( String sInd : hmInd2Score.keySet() ) + { + double dScore = hmInd2Score.get( sInd ); + if( dScore > 0 ){ + System.out.println( "P( "+ sClass +" | "+ sInd +" ) = "+ dScore ); + } + } + } */ + m_searcher.close(); + } + + public PMIRelevance( String sClasses, String sIndividuals ) throws Exception { + m_searcher = new LuceneSearcher(); + m_classes = read( sClasses ); + m_individuals = read( sIndividuals ); + } + + public Map<String,Double> getClassRelevance( String sIndividual ) throws Exception { + // computes relevance of classes for this individual + // conditional probability: P(I|C)=f(I,C)/f(C) + // PMI(I,C)=log( P(I|C) / P(I) ) + Map<String,Double> hmClass2Score = new HashMap<String,Double>(); + int iInd = m_searcher.count( sIndividual ); + int iAll = m_searcher.indexSize(); + double dPInd = (double) iInd / (double) iAll; + for( String sClass: m_classes ) + { + int iClass = m_searcher.count( sClass ); + int iIndClass = m_searcher.count( sIndividual +" AND "+ sClass ); + double dPIndClass = (double) iIndClass / (double)iClass; + double dPMI = Math.log( dPIndClass / dPInd ); + hmClass2Score.put( sClass, dPMI ); + } + return hmClass2Score; + } + + public Map<String,Double> getIndividualRelevance( String sClass ) throws Exception { + // computes relevance of individuals for this class + // conditional probability: P(C|I)=f(C,I)/f(I) + // PMI(C|I)=log( P(C|I) / P(C) ) + Map<String,Double> hmInd2Score = new HashMap<String,Double>(); + int iClass = m_searcher.count( sClass ); + int iAll = m_searcher.indexSize(); + double dPClass = (double) iClass / (double) iAll; + for( String sInd: m_individuals ) + { + int iInd = m_searcher.count( sInd ); + int iIndClass = m_searcher.count( sClass +" AND "+ sInd ); + double dPClassInd = (double) iIndClass / (double)iInd; + double dPMI = Math.log( dPClassInd / dPClass ); + hmInd2Score.put( sInd, dPMI ); + } + return hmInd2Score; + } + + private static Set<String> read( String sFile ) throws Exception { + File file = new File( sFile ); + Set<String> lines = new HashSet<String>(); + BufferedReader reader = null; + try { + reader = new BufferedReader( new FileReader( file ) ); + String sLine = null; + while( ( sLine = reader.readLine() ) != null ) { + lines.add( sLine.trim() ); + } + } + finally { + if( reader != null ) { + reader.close(); + } + } + return lines; + } +} \ No newline at end of file Added: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMIRelevances.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMIRelevances.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMIRelevances.java 2013-07-08 13:51:31 UTC (rev 4015) @@ -0,0 +1,165 @@ +/** + * Copyright (C) 2007-2011, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + + +package org.dllearner.algorithms.isle; + +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLClass; +import org.semanticweb.owlapi.model.OWLEntity; +import org.semanticweb.owlapi.model.OWLNamedObject; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.O... [truncated message content] |
From: <lor...@us...> - 2013-07-11 12:23:39
|
Revision: 4016 http://sourceforge.net/p/dl-learner/code/4016 Author: lorenz_b Date: 2013-07-11 12:23:36 +0000 (Thu, 11 Jul 2013) Log Message: ----------- Some work on ISLE. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/NLPHeuristic.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/QueryEngineHTTP.java trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLAPIConverter.java trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/NLPHeuristic.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/NLPHeuristic.java 2013-07-08 13:51:31 UTC (rev 4015) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/NLPHeuristic.java 2013-07-11 12:23:36 UTC (rev 4016) @@ -20,14 +20,20 @@ package org.dllearner.algorithms.isle; import java.util.Comparator; +import java.util.HashSet; import java.util.Map; +import java.util.Set; import org.dllearner.algorithms.celoe.OENode; import org.dllearner.core.Component; import org.dllearner.core.ComponentInitException; import org.dllearner.core.config.ConfigOption; +import org.dllearner.core.owl.Description; import org.dllearner.core.owl.Entity; import org.dllearner.utilities.owl.ConceptComparator; +import org.dllearner.utilities.owl.OWLAPIConverter; +import org.semanticweb.owlapi.model.OWLClassExpression; +import org.semanticweb.owlapi.model.OWLEntity; /** * @@ -51,6 +57,8 @@ @ConfigOption(name = "startNodeBonus", defaultValue="0.1") private double startNodeBonus = 0.1; + private double nlpBonusFactor = 0.0001; + private Map<Entity, Double> entityRelevance; public NLPHeuristic() {} @@ -97,6 +105,21 @@ score -= node.getHorizontalExpansion() * expansionPenaltyFactor; // penalty for having many child nodes (stuck prevention) score -= node.getRefinementCount() * nodeRefinementPenalty; + + + //the NLP based scoring + Description expression = node.getExpression(); + OWLClassExpression owlapiDescription = OWLAPIConverter.getOWLAPIDescription(expression); + Set<Entity> entities = OWLAPIConverter.getEntities(owlapiDescription.getSignature()); + double sum = 0; + for (Entity entity : entities) { + double relevance = entityRelevance.containsKey(entity) ? entityRelevance.get(entity) : 0; + if(!Double.isInfinite(relevance)){ + sum += relevance; + } + } + score += nlpBonusFactor * sum; + return score; } Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/QueryEngineHTTP.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/QueryEngineHTTP.java 2013-07-08 13:51:31 UTC (rev 4015) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/QueryEngineHTTP.java 2013-07-11 12:23:36 UTC (rev 4016) @@ -268,7 +268,7 @@ private Model execModel(Model model) { HttpQuery httpQuery = makeHttpQuery() ; - httpQuery.setAccept(WebContent.contentTypeTurtleAlt1) ; + httpQuery.setAccept(WebContent.contentTypeNTriplesAlt) ; InputStream in = httpQuery.exec() ; //Don't assume the endpoint actually gives back the content type we asked for @@ -284,7 +284,7 @@ //Try to select language appropriately here based on the model content type Lang lang = WebContent.contentTypeToLang(actualContentType); if (! RDFLanguages.isTriples(lang)) throw new QueryException("Endpoint returned Content Type: " + actualContentType + " which is not a valid RDF Graph syntax"); - model.read(in, null, "TURTLE") ; + model.read(in, null, Lang.NTRIPLES.getName()) ; return model ; } Modified: trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLAPIConverter.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLAPIConverter.java 2013-07-08 13:51:31 UTC (rev 4015) +++ trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLAPIConverter.java 2013-07-11 12:23:36 UTC (rev 4016) @@ -19,6 +19,7 @@ package org.dllearner.utilities.owl; +import java.util.HashSet; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; @@ -152,6 +153,22 @@ throw new Error("OWL API entity conversion for " + entity + " not supported."); } + public static Set<Entity> getEntities(Set<OWLEntity> owlEntities) { + Set<Entity> entities = new HashSet<Entity>(); + for (OWLEntity entity : owlEntities) { + if(entity instanceof OWLObjectProperty) { + entities.add(convertObjectProperty((OWLObjectProperty) entity)); + } else if(entity instanceof OWLDataProperty) { + entities.add(convertDatatypeProperty((OWLDataProperty) entity)); + } else if(entity instanceof OWLClass) { + entities.add(new NamedClass(entity.toStringID())); + } else if(entity instanceof OWLNamedIndividual) { + entities.add(convertIndividual((OWLNamedIndividual) entity)); + } + } + return entities; + } + public static Description convertClass(OWLClass owlClass) { if(owlClass.isOWLThing()) { return Thing.instance; Modified: trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java 2013-07-08 13:51:31 UTC (rev 4015) +++ trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java 2013-07-11 12:23:36 UTC (rev 4016) @@ -69,7 +69,7 @@ } @Test - public void testISLE() throws ComponentInitException { + public void testISLE() throws Exception { KnowledgeSource ks = new OWLAPIOntology(ontology); AbstractReasonerComponent reasoner = new FastInstanceChecker(ks); reasoner.init(); @@ -78,7 +78,11 @@ lp.setClassToDescribe(cls); lp.init(); + Map<Entity, Double> entityRelevance = relevance.getEntityRelevance(cls); + NLPHeuristic heuristic = new NLPHeuristic(entityRelevance); + ISLE isle = new ISLE(lp, reasoner); + isle.setHeuristic(heuristic); isle.init(); isle.start(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-07-12 09:03:25
|
Revision: 4018 http://sourceforge.net/p/dl-learner/code/4018 Author: lorenz_b Date: 2013-07-12 09:03:21 +0000 (Fri, 12 Jul 2013) Log Message: ----------- Cont. ISLE. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/OWLOntologyLuceneIndex.java trunk/components-core/src/main/java/org/dllearner/core/owl/Entity.java trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/AnnotationEntityTextRetriever.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/EntityExtraction.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/RDFSCommentEntityTextRetriever.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/RDFSLabelEntityTextRetriever.java Removed Paths: ------------- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/LabelEntityTextRetriever.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMIRelevance.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMIRelevances.java Added: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/AnnotationEntityTextRetriever.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/AnnotationEntityTextRetriever.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/AnnotationEntityTextRetriever.java 2013-07-12 09:03:21 UTC (rev 4018) @@ -0,0 +1,93 @@ +/** + * + */ +package org.dllearner.algorithms.isle; + +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import org.dllearner.core.owl.Entity; +import org.dllearner.kb.OWLAPIOntology; +import org.dllearner.utilities.owl.OWLAPIConverter; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLAnnotation; +import org.semanticweb.owlapi.model.OWLAnnotationProperty; +import org.semanticweb.owlapi.model.OWLEntity; +import org.semanticweb.owlapi.model.OWLLiteral; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.util.IRIShortFormProvider; +import org.semanticweb.owlapi.util.SimpleIRIShortFormProvider; + + +/** + * @author Lorenz Buehmann + * + */ +public class AnnotationEntityTextRetriever implements EntityTextRetriever{ + + private OWLOntology ontology; + private OWLOntologyManager manager; + + private String language = "en"; + private double weight = 1d; + + private boolean useShortFormFallback = true; + private IRIShortFormProvider sfp = new SimpleIRIShortFormProvider(); + + private OWLAnnotationProperty[] properties; + + public AnnotationEntityTextRetriever(OWLOntology ontology, OWLAnnotationProperty... properties) { + this.ontology = ontology; + this.properties = properties; + } + + public AnnotationEntityTextRetriever(OWLAPIOntology ontology, OWLAnnotationProperty... properties) { + this.ontology = ontology.createOWLOntology(manager); + } + + /** + * @param language the language to set + */ + public void setLanguage(String language) { + this.language = language; + } + + /** + * Whether to use the short form of the IRI as fallback, if no label is given. + * @param useShortFormFallback the useShortFormFallback to set + */ + public void setUseShortFormFallback(boolean useShortFormFallback) { + this.useShortFormFallback = useShortFormFallback; + } + + /* (non-Javadoc) + * @see org.dllearner.algorithms.isle.EntityTextRetriever#getRelevantText(org.dllearner.core.owl.Entity) + */ + @Override + public Map<String, Double> getRelevantText(Entity entity) { + Map<String, Double> textWithWeight = new HashMap<String, Double>(); + + OWLEntity e = OWLAPIConverter.getOWLAPIEntity(entity); + + for (OWLAnnotationProperty property : properties) { + Set<OWLAnnotation> annotations = e.getAnnotations(ontology, property); + for (OWLAnnotation annotation : annotations) { + if (annotation.getValue() instanceof OWLLiteral) { + OWLLiteral val = (OWLLiteral) annotation.getValue(); + if (val.hasLang(language)) { + String label = val.getLiteral(); + textWithWeight.put(label, weight); + } + } + } + } + + if(textWithWeight.isEmpty() && useShortFormFallback){ + textWithWeight.put(sfp.getShortForm(IRI.create(entity.getURI())), weight); + } + + return textWithWeight; + } +} Added: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/EntityExtraction.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/EntityExtraction.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/EntityExtraction.java 2013-07-12 09:03:21 UTC (rev 4018) @@ -0,0 +1,28 @@ +/** + * + */ +package org.dllearner.algorithms.isle; + +import java.util.Map; + +import org.dllearner.core.owl.Entity; + +/** + * @author Lorenz Buehmann + * + */ +public interface EntityExtraction { + + /** + * Extracts all entities contained in the working text with some confidence value. + * @return + */ + Map<Entity, Double> extractEntities(); + + /** + * Extracts all entities of the given <code>type</code> contained in the working text with some confidence value. + * @return + */ + Map<Entity, Double> extractEntities(Entity.Type type); + +} Deleted: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/LabelEntityTextRetriever.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/LabelEntityTextRetriever.java 2013-07-11 12:24:23 UTC (rev 4017) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/LabelEntityTextRetriever.java 2013-07-12 09:03:21 UTC (rev 4018) @@ -1,95 +0,0 @@ -/** - * - */ -package org.dllearner.algorithms.isle; - -import java.util.HashMap; -import java.util.Map; -import java.util.Set; - -import org.dllearner.core.owl.Entity; -import org.dllearner.kb.OWLAPIOntology; -import org.dllearner.utilities.owl.OWLAPIConverter; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLAnnotationProperty; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLEntity; -import org.semanticweb.owlapi.model.OWLLiteral; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.owlapi.util.IRIShortFormProvider; -import org.semanticweb.owlapi.util.SimpleIRIShortFormProvider; -import org.semanticweb.owlapi.vocab.OWLRDFVocabulary; - -import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; - - -/** - * @author Lorenz Buehmann - * - */ -public class LabelEntityTextRetriever implements EntityTextRetriever{ - - private OWLOntology ontology; - private OWLOntologyManager manager; - private OWLDataFactory df = new OWLDataFactoryImpl(); - - private OWLAnnotationProperty label = df.getOWLAnnotationProperty(OWLRDFVocabulary.RDFS_LABEL.getIRI()); - - private String language = "en"; - private double weight = 1d; - - private boolean useShortFormFallback = true; - private IRIShortFormProvider sfp = new SimpleIRIShortFormProvider(); - - public LabelEntityTextRetriever(OWLOntology ontology) { - this.ontology = ontology; - } - - public LabelEntityTextRetriever(OWLAPIOntology ontology) { - this.ontology = ontology.createOWLOntology(manager); - } - - /** - * @param language the language to set - */ - public void setLanguage(String language) { - this.language = language; - } - - /** - * Whether to use the short form of the IRI as fallback, if no label is given. - * @param useShortFormFallback the useShortFormFallback to set - */ - public void setUseShortFormFallback(boolean useShortFormFallback) { - this.useShortFormFallback = useShortFormFallback; - } - - /* (non-Javadoc) - * @see org.dllearner.algorithms.isle.EntityTextRetriever#getRelevantText(org.dllearner.core.owl.Entity) - */ - @Override - public Map<String, Double> getRelevantText(Entity entity) { - Map<String, Double> textWithWeight = new HashMap<String, Double>(); - - OWLEntity e = OWLAPIConverter.getOWLAPIEntity(entity); - - Set<OWLAnnotation> annotations = e.getAnnotations(ontology, label); - for (OWLAnnotation annotation : annotations) { - if (annotation.getValue() instanceof OWLLiteral) { - OWLLiteral val = (OWLLiteral) annotation.getValue(); - if (val.hasLang(language)) { - String label = val.getLiteral(); - textWithWeight.put(label, weight); - } - } - } - - if(textWithWeight.isEmpty() && useShortFormFallback){ - textWithWeight.put(sfp.getShortForm(IRI.create(entity.getURI())), weight); - } - - return textWithWeight; - } -} Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/OWLOntologyLuceneIndex.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/OWLOntologyLuceneIndex.java 2013-07-11 12:24:23 UTC (rev 4017) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/OWLOntologyLuceneIndex.java 2013-07-12 09:03:21 UTC (rev 4018) @@ -32,6 +32,7 @@ import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; /** + * Creates a Lucene Index for the labels if classes and properties. * @author Lorenz Buehmann * */ Deleted: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMIRelevance.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMIRelevance.java 2013-07-11 12:24:23 UTC (rev 4017) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMIRelevance.java 2013-07-12 09:03:21 UTC (rev 4018) @@ -1,108 +0,0 @@ -package org.dllearner.algorithms.isle; - - -import java.io.*; -import java.util.*; - -public class PMIRelevance { - - private LuceneSearcher m_searcher = null; - - private Set<String> m_classes; - private Set<String> m_individuals; - - - public static void main( String args[] ) throws Exception { - PMIRelevance relevance = new PMIRelevance( args[0], args[1] ); - relevance.printScores(); - } - - public void printScores() throws Exception { - for( String sInd: m_individuals ) - { - Map<String,Double> hmClass2Score = getClassRelevance( sInd ); - for( String sClass : hmClass2Score.keySet() ) - { - double dScore = hmClass2Score.get( sClass ); - if( dScore > 0 ){ - System.out.println( "PMI( "+ sInd +" , "+ sClass +" ) = "+ dScore ); - } - } - } - /* for( String sClass: m_classes ) - { - Map<String,Double> hmInd2Score = getIndividualRelevance( sClass ); - for( String sInd : hmInd2Score.keySet() ) - { - double dScore = hmInd2Score.get( sInd ); - if( dScore > 0 ){ - System.out.println( "P( "+ sClass +" | "+ sInd +" ) = "+ dScore ); - } - } - } */ - m_searcher.close(); - } - - public PMIRelevance( String sClasses, String sIndividuals ) throws Exception { - m_searcher = new LuceneSearcher(); - m_classes = read( sClasses ); - m_individuals = read( sIndividuals ); - } - - public Map<String,Double> getClassRelevance( String sIndividual ) throws Exception { - // computes relevance of classes for this individual - // conditional probability: P(I|C)=f(I,C)/f(C) - // PMI(I,C)=log( P(I|C) / P(I) ) - Map<String,Double> hmClass2Score = new HashMap<String,Double>(); - int iInd = m_searcher.count( sIndividual ); - int iAll = m_searcher.indexSize(); - double dPInd = (double) iInd / (double) iAll; - for( String sClass: m_classes ) - { - int iClass = m_searcher.count( sClass ); - int iIndClass = m_searcher.count( sIndividual +" AND "+ sClass ); - double dPIndClass = (double) iIndClass / (double)iClass; - double dPMI = Math.log( dPIndClass / dPInd ); - hmClass2Score.put( sClass, dPMI ); - } - return hmClass2Score; - } - - public Map<String,Double> getIndividualRelevance( String sClass ) throws Exception { - // computes relevance of individuals for this class - // conditional probability: P(C|I)=f(C,I)/f(I) - // PMI(C|I)=log( P(C|I) / P(C) ) - Map<String,Double> hmInd2Score = new HashMap<String,Double>(); - int iClass = m_searcher.count( sClass ); - int iAll = m_searcher.indexSize(); - double dPClass = (double) iClass / (double) iAll; - for( String sInd: m_individuals ) - { - int iInd = m_searcher.count( sInd ); - int iIndClass = m_searcher.count( sClass +" AND "+ sInd ); - double dPClassInd = (double) iIndClass / (double)iInd; - double dPMI = Math.log( dPClassInd / dPClass ); - hmInd2Score.put( sInd, dPMI ); - } - return hmInd2Score; - } - - private static Set<String> read( String sFile ) throws Exception { - File file = new File( sFile ); - Set<String> lines = new HashSet<String>(); - BufferedReader reader = null; - try { - reader = new BufferedReader( new FileReader( file ) ); - String sLine = null; - while( ( sLine = reader.readLine() ) != null ) { - lines.add( sLine.trim() ); - } - } - finally { - if( reader != null ) { - reader.close(); - } - } - return lines; - } -} \ No newline at end of file Deleted: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMIRelevances.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMIRelevances.java 2013-07-11 12:24:23 UTC (rev 4017) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/PMIRelevances.java 2013-07-12 09:03:21 UTC (rev 4018) @@ -1,165 +0,0 @@ -/** - * Copyright (C) 2007-2011, Jens Lehmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - */ - - -package org.dllearner.algorithms.isle; - -import java.util.HashMap; -import java.util.Map; -import java.util.Set; - -import org.semanticweb.owlapi.apibinding.OWLManager; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLEntity; -import org.semanticweb.owlapi.model.OWLNamedObject; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyManager; - - -public class PMIRelevances { - - private LuceneSearcher m_searcher = null; - - private OWLOntologyManager m_manager; - private OWLOntology m_ontology; - - private Set<OWLEntity> m_entities; - private Set<OWLClass> m_classes; - - - public static void main( String args[] ) throws Exception { - PMIRelevances relevances = new PMIRelevances( args[0] ); - relevances.printScores(); - } - - public void printScores() throws Exception { - for( OWLClass c: m_classes ) - { - Map<OWLEntity,Double> hmEntity2Score = getEntityRelevance(c); - // normalization per class? - hmEntity2Score = normalize( hmEntity2Score ); - for( OWLEntity e : hmEntity2Score.keySet() ) - { - double dScore = hmEntity2Score.get(e); - System.out.println( "P( "+ getLabel(c) +", "+ getLabel(e) +" ) = "+ dScore ); - } - } - m_searcher.close(); - } - - public PMIRelevances( String sOntologyURI ) throws Exception { - m_searcher = new LuceneSearcher(); - loadOntology( sOntologyURI ); - } - - public Map<OWLEntity,Double> normalize( Map<OWLEntity,Double> hmEntity2Score ){ - Map<OWLEntity,Double> hmEntity2Norm = new HashMap<OWLEntity,Double>(); - double dMin = Double.MAX_VALUE; - Double dMax = Double.MIN_VALUE; - for( OWLEntity e : hmEntity2Score.keySet() ) - { - double dValue = hmEntity2Score.get(e); - if( dValue < dMin ){ - dMin = dValue; - } - else if( dValue > dMax ){ - dMax = dValue; - } - } - // System.out.println( "min="+ dMin +" max="+ dMax ); - for( OWLEntity e : hmEntity2Score.keySet() ) - { - double dValue = hmEntity2Score.get(e); - double dNorm = 0; - if( dMin == dMax ){ - dNorm = dValue; - } - else { - dNorm = ( dValue - dMin ) / ( dMax - dMin ); - } - hmEntity2Norm.put( e, dNorm ); - } - return hmEntity2Norm; - } - - public Map<OWLEntity,Double> getEntityRelevance( OWLClass c ) throws Exception { - // computes relevance of entity for this class - // conditional probability: P(C,E)=f(C,E)/f(E) - // PMI(C,E)=log( P(C,E) / P(C) ) - Map<OWLEntity,Double> hmEntity2Score = new HashMap<OWLEntity,Double>(); - String sClass = getLabel(c); - int iClass = m_searcher.count( sClass ); - int iAll = m_searcher.indexSize(); - double dPClass = (double) iClass / (double) iAll; - for( OWLEntity e: m_entities ) - { - String sEntity = getLabel(e); - int iEntity = m_searcher.count( sEntity ); - int iEntityClass = m_searcher.count( sClass +" AND "+ sEntity ); -// double dPEntity = (double)iEntity / (double)iAll; - double dPClassEntity = (double) iEntityClass / (double)iEntity; - double dPMI = Math.log( dPClassEntity / dPClass ); - if( !Double.isNaN( dPMI ) && !Double.isInfinite( dPMI ) ){ - hmEntity2Score.put( e, dPMI ); - } - } - return hmEntity2Score; - } - - /* private String getLabel( OWLEntity e ){ - System.out.println( "getLabel: "+ e ); - OWLDataFactory factory = m_manager.getOWLDataFactory(); - OWLAnnotationProperty label = factory.getOWLAnnotationProperty( OWLRDFVocabulary.RDFS_LABEL.getIRI() ); - Set<OWLAnnotation> anns = e.getAnnotations( m_ontology, label ); - for( OWLAnnotation annotation: anns ) - { - System.out.println( "annotation="+ annotation ); - if( annotation.getValue() instanceof OWLLiteral ) - { - OWLLiteral val = (OWLLiteral) annotation.getValue(); - if( !val.isOWLTypedLiteral() ){ - if (val.asOWLStringLiteral().getLang().equals("en")) { - return val.getLiteral(); - } - } - return val.getLiteral(); - } - } - return null; - } */ - - private String getLabel( OWLEntity e ){ - if( e instanceof OWLNamedObject ){ - String sIRI = ((OWLNamedObject)e).getIRI().toString(); - return sIRI.substring( sIRI.indexOf( "#" )+1 ); - } - return null; - } - - private void loadOntology( String sOntologyURI ) throws Exception { - m_manager = OWLManager.createOWLOntologyManager(); - IRI ontologyIRI = IRI.create( sOntologyURI ); - m_ontology = m_manager.loadOntology( ontologyIRI ); - m_classes = m_ontology.getClassesInSignature(); - m_entities = m_ontology.getSignature(); - System.out.println( "classes="+ m_classes.size() +" entities="+ m_entities.size() ); - // m_manager.removeOntology( ontology ); - } -} \ No newline at end of file Added: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/RDFSCommentEntityTextRetriever.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/RDFSCommentEntityTextRetriever.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/RDFSCommentEntityTextRetriever.java 2013-07-12 09:03:21 UTC (rev 4018) @@ -0,0 +1,26 @@ +/** + * + */ +package org.dllearner.algorithms.isle; + +import org.dllearner.kb.OWLAPIOntology; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.vocab.OWLRDFVocabulary; + +import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; + + +/** + * @author Lorenz Buehmann + * + */ +public class RDFSCommentEntityTextRetriever extends AnnotationEntityTextRetriever{ + + public RDFSCommentEntityTextRetriever(OWLOntology ontology) { + super(ontology, new OWLDataFactoryImpl().getOWLAnnotationProperty(OWLRDFVocabulary.RDFS_COMMENT.getIRI())); + } + + public RDFSCommentEntityTextRetriever(OWLAPIOntology ontology) { + super(ontology, new OWLDataFactoryImpl().getOWLAnnotationProperty(OWLRDFVocabulary.RDFS_COMMENT.getIRI())); + } +} Added: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/RDFSLabelEntityTextRetriever.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/RDFSLabelEntityTextRetriever.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/RDFSLabelEntityTextRetriever.java 2013-07-12 09:03:21 UTC (rev 4018) @@ -0,0 +1,26 @@ +/** + * + */ +package org.dllearner.algorithms.isle; + +import org.dllearner.kb.OWLAPIOntology; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.vocab.OWLRDFVocabulary; + +import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; + + +/** + * @author Lorenz Buehmann + * + */ +public class RDFSLabelEntityTextRetriever extends AnnotationEntityTextRetriever{ + + public RDFSLabelEntityTextRetriever(OWLOntology ontology) { + super(ontology, new OWLDataFactoryImpl().getOWLAnnotationProperty(OWLRDFVocabulary.RDFS_LABEL.getIRI())); + } + + public RDFSLabelEntityTextRetriever(OWLAPIOntology ontology) { + super(ontology, new OWLDataFactoryImpl().getOWLAnnotationProperty(OWLRDFVocabulary.RDFS_LABEL.getIRI())); + } +} Modified: trunk/components-core/src/main/java/org/dllearner/core/owl/Entity.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/owl/Entity.java 2013-07-11 12:24:23 UTC (rev 4017) +++ trunk/components-core/src/main/java/org/dllearner/core/owl/Entity.java 2013-07-12 09:03:21 UTC (rev 4018) @@ -28,6 +28,10 @@ * */ public interface Entity extends NamedKBElement { + + public enum Type{ + CLASS, OBJECT_PROPERTY, DATA_PROPERTY; + } public URI getURI(); Modified: trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java 2013-07-11 12:24:23 UTC (rev 4017) +++ trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java 2013-07-12 09:03:21 UTC (rev 4018) @@ -3,15 +3,10 @@ */ package org.dllearner.algorithms.isle; -import static org.junit.Assert.*; - import java.io.File; import java.util.Map; -import java.util.Map.Entry; -import org.dllearner.core.AbstractLearningProblem; import org.dllearner.core.AbstractReasonerComponent; -import org.dllearner.core.ComponentInitException; import org.dllearner.core.KnowledgeSource; import org.dllearner.core.owl.Entity; import org.dllearner.core.owl.NamedClass; @@ -39,21 +34,28 @@ private LuceneSearcher searcher; private Relevance relevance; private String searchField = "label"; - + /** - * @throws java.lang.Exception + * */ - @Before - public void setUp() throws Exception { + public ISLETest() throws Exception{ manager = OWLManager.createOWLOntologyManager(); ontology = manager.loadOntologyFromOntologyDocument(new File("../examples/isle/father_labeled.owl")); cls = new NamedClass("http://example.com/father#father"); - textRetriever = new LabelEntityTextRetriever(ontology); + textRetriever = new RDFSLabelEntityTextRetriever(ontology); OWLOntologyLuceneIndex index = new OWLOntologyLuceneIndex(ontology, searchField); searcher = new LuceneSearcher(index.getDirectory(), searchField); relevance = new PMILuceneBasedRelevance(ontology, searcher, textRetriever); } + /** + * @throws java.lang.Exception + */ + @Before + public void setUp() throws Exception{ + + } + @Test public void testTextRetrieval() { System.out.println("Text for entity " + cls + ":"); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-07-29 08:12:38
|
Revision: 4022 http://sourceforge.net/p/dl-learner/code/4022 Author: lorenz_b Date: 2013-07-29 08:12:30 +0000 (Mon, 29 Jul 2013) Log Message: ----------- Cont. ISLE Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/LuceneSyntacticIndex.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/SimpleSemanticIndex.java trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/lgg/NoiseSensitiveLGG.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/QueryEngineHTTP.java trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/LuceneSyntacticIndex.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/LuceneSyntacticIndex.java 2013-07-17 11:44:41 UTC (rev 4021) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/LuceneSyntacticIndex.java 2013-07-29 08:12:30 UTC (rev 4022) @@ -67,7 +67,7 @@ } catch (IOException e) { e.printStackTrace(); } - return null; + return documents; } /* (non-Javadoc) Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/SimpleSemanticIndex.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/SimpleSemanticIndex.java 2013-07-17 11:44:41 UTC (rev 4021) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/SimpleSemanticIndex.java 2013-07-29 08:12:30 UTC (rev 4022) @@ -3,9 +3,14 @@ */ package org.dllearner.algorithms.isle.index; +import java.util.HashSet; +import java.util.Map; +import java.util.Map.Entry; import java.util.Set; +import org.dllearner.algorithms.isle.textretrieval.RDFSLabelEntityTextRetriever; import org.dllearner.core.owl.Entity; +import org.semanticweb.owlapi.model.OWLOntology; /** * @author Lorenz Buehmann @@ -13,13 +18,32 @@ */ public class SimpleSemanticIndex implements SemanticIndex{ + private SyntacticIndex syntacticIndex; + private RDFSLabelEntityTextRetriever labelRetriever; + /** + * + */ + public SimpleSemanticIndex(OWLOntology ontology, SyntacticIndex syntacticIndex) { + this.syntacticIndex = syntacticIndex; + labelRetriever = new RDFSLabelEntityTextRetriever(ontology); + } + + /* (non-Javadoc) * @see org.dllearner.algorithms.isle.SemanticIndex#getDocuments(org.dllearner.core.owl.Entity) */ @Override public Set<String> getDocuments(Entity entity) { - return null; + Set<String> documents = new HashSet<String>(); + Map<String, Double> relevantText = labelRetriever.getRelevantText(entity); + + for (Entry<String, Double> entry : relevantText.entrySet()) { + String label = entry.getKey(); + documents.addAll(syntacticIndex.getDocuments(label)); + } + + return documents; } /* (non-Javadoc) @@ -27,7 +51,7 @@ */ @Override public int count(Entity entity) { - return 0; + return getDocuments(entity).size(); } /* (non-Javadoc) @@ -35,7 +59,7 @@ */ @Override public int getSize() { - return 0; + return syntacticIndex.getSize(); } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/lgg/NoiseSensitiveLGG.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/lgg/NoiseSensitiveLGG.java 2013-07-17 11:44:41 UTC (rev 4021) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/lgg/NoiseSensitiveLGG.java 2013-07-29 08:12:30 UTC (rev 4022) @@ -8,6 +8,7 @@ import java.util.SortedSet; import java.util.TreeSet; +import org.apache.log4j.Logger; import org.dllearner.algorithms.qtl.datastructures.QueryTree; import org.dllearner.learningproblems.Heuristics; @@ -16,6 +17,9 @@ public class NoiseSensitiveLGG<N> { + + private static final Logger logger = Logger.getLogger(NoiseSensitiveLGG.class.getName()); + private LGGGenerator<N> lggGenerator = new LGGGeneratorImpl<N>(); private Queue<EvaluatedQueryTree<N>> todoList; @@ -29,7 +33,8 @@ Monitor lggMon = MonitorFactory.getTimeMonitor("lgg-mon"); init(trees); EvaluatedQueryTree<N> currentElement; - do{System.out.println("TODO list size: " + todoList.size()); + do{ + logger.trace("TODO list size: " + todoList.size()); //pick best element from todo list currentElement = todoList.poll(); for (QueryTree<N> example : currentElement.getUncoveredExamples()) { @@ -39,33 +44,36 @@ QueryTree<N> lgg = lggGenerator.getLGG(tree, example); lggMon.stop(); //compute examples which are not covered by LGG - Collection<QueryTree<N>> uncoveredExamples = new ArrayList<QueryTree<N>>(); - for (QueryTree<N> queryTree : trees) { - subMon.start(); - boolean subsumed = queryTree.isSubsumedBy(lgg); - subMon.stop(); - if(!subsumed){ - uncoveredExamples.add(queryTree); - } - } + Collection<QueryTree<N>> uncoveredExamples = getUncoveredTrees(lgg, trees); //compute score double score = Heuristics.getConfidenceInterval95WaldAverage(trees.size(), trees.size() - uncoveredExamples.size()); //add to todo list, if not already contained in todo list or solution list EvaluatedQueryTree<N> solution = new EvaluatedQueryTree<N>(lgg, uncoveredExamples, score); todo(solution); } - System.out.println("LGG time: " + lggMon.getTotal() + "ms"); - System.out.println("Avg. LGG time: " + lggMon.getAvg() + "ms"); - System.out.println("#LGG computations: " + lggMon.getHits()); - System.out.println("Subsumption test time: " + subMon.getTotal() + "ms"); - System.out.println("Avg. subsumption test time: " + subMon.getAvg() + "ms"); - System.out.println("#Subsumption tests: " + subMon.getHits()); solutions.add(currentElement); // todoList.remove(currentElement); } while(!terminationCriteriaSatisfied()); + logger.trace("LGG time: " + lggMon.getTotal() + "ms"); + logger.trace("Avg. LGG time: " + lggMon.getAvg() + "ms"); + logger.trace("#LGG computations: " + lggMon.getHits()); + logger.trace("Subsumption test time: " + subMon.getTotal() + "ms"); + logger.trace("Avg. subsumption test time: " + subMon.getAvg() + "ms"); + logger.trace("#Subsumption tests: " + subMon.getHits()); return new ArrayList<EvaluatedQueryTree<N>>(solutions); } + private Collection<QueryTree<N>> getUncoveredTrees(QueryTree<N> tree, List<QueryTree<N>> allTrees){ + Collection<QueryTree<N>> uncoveredTrees = new ArrayList<QueryTree<N>>(); + for (QueryTree<N> queryTree : allTrees) { + boolean subsumed = queryTree.isSubsumedBy(tree); + if(!subsumed){ + uncoveredTrees.add(queryTree); + } + } + return uncoveredTrees; + } + private void init(List<QueryTree<N>> trees){ todoList = new PriorityQueue<EvaluatedQueryTree<N>>(); solutions = new TreeSet<EvaluatedQueryTree<N>>(); @@ -76,9 +84,11 @@ for (QueryTree<N> queryTree : trees) {//System.out.println(queryTree.getStringRepresentation()); boolean distinct = true; for (QueryTree<N> otherTree : distinctTrees) { - if(queryTree.isSubsumedBy(otherTree)){ - distinct = false; - break; + if(!queryTree.equals(otherTree)){ + if(queryTree.isSameTreeAs(otherTree)){ + distinct = false; + break; + } } } if(distinct){ @@ -86,9 +96,8 @@ } } for (QueryTree<N> queryTree : distinctTrees) { - Collection<QueryTree<N>> uncoveredExamples = new ArrayList<QueryTree<N>>(distinctTrees); - uncoveredExamples.remove(queryTree); - double score = (trees.size() - uncoveredExamples.size()) / (double)trees.size(); + Collection<QueryTree<N>> uncoveredExamples = getUncoveredTrees(queryTree, trees); + double score = Heuristics.getConfidenceInterval95WaldAverage(trees.size(), trees.size() - uncoveredExamples.size()); todoList.add(new EvaluatedQueryTree<N>(queryTree, uncoveredExamples, score)); } } Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/QueryEngineHTTP.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/QueryEngineHTTP.java 2013-07-17 11:44:41 UTC (rev 4021) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/QueryEngineHTTP.java 2013-07-29 08:12:30 UTC (rev 4022) @@ -268,7 +268,7 @@ private Model execModel(Model model) { HttpQuery httpQuery = makeHttpQuery() ; - httpQuery.setAccept(WebContent.contentTypeNTriplesAlt) ; + httpQuery.setAccept(WebContent.contentTypeTurtleAlt1) ; InputStream in = httpQuery.exec() ; //Don't assume the endpoint actually gives back the content type we asked for @@ -284,7 +284,7 @@ //Try to select language appropriately here based on the model content type Lang lang = WebContent.contentTypeToLang(actualContentType); if (! RDFLanguages.isTriples(lang)) throw new QueryException("Endpoint returned Content Type: " + actualContentType + " which is not a valid RDF Graph syntax"); - model.read(in, null, Lang.NTRIPLES.getName()) ; + model.read(in, null, Lang.TURTLE.getName()) ; return model ; } Modified: trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java 2013-07-17 11:44:41 UTC (rev 4021) +++ trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java 2013-07-29 08:12:30 UTC (rev 4022) @@ -82,6 +82,8 @@ private Set<? extends OWLEntity> variableEntities = new HashSet<OWLEntity>(); private VariablesMapping mapping; + private boolean ignoreGenericTypeStatements = true; + private OWLClassExpression expr; public OWLClassExpressionToSPARQLConverter(VariablesMapping mapping) { this.mapping = mapping; @@ -96,6 +98,7 @@ } public String convert(String rootVariable, OWLClassExpression expr){ + this.expr = expr; reset(); variables.push(rootVariable); expr.accept(this); @@ -261,7 +264,9 @@ @Override public void visit(OWLClass ce) { - sparql += triple(variables.peek(), "a", render(ce)); + if(ce.equals(expr) || (ignoreGenericTypeStatements && !ce.isOWLThing())){ + sparql += triple(variables.peek(), "a", render(ce)); + } } @Override @@ -577,7 +582,9 @@ @Override public void visit(OWLDatatype node) { - sparql += "FILTER(DATATYPE(" + variables.peek() + "=<" + node.getIRI().toString() + ">))"; + if(ignoreGenericTypeStatements && !node.isRDFPlainLiteral() && !node.isTopDatatype()){ + sparql += "FILTER(DATATYPE(" + variables.peek() + "=<" + node.getIRI().toString() + ">))"; + } } @Override Modified: trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java 2013-07-17 11:44:41 UTC (rev 4021) +++ trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java 2013-07-29 08:12:30 UTC (rev 4022) @@ -6,6 +6,15 @@ import java.io.File; import java.util.Map; +import org.dllearner.algorithms.isle.index.OWLOntologyLuceneSyntacticIndexCreator; +import org.dllearner.algorithms.isle.index.SemanticIndex; +import org.dllearner.algorithms.isle.index.SimpleSemanticIndex; +import org.dllearner.algorithms.isle.index.SyntacticIndex; +import org.dllearner.algorithms.isle.metrics.PMIRelevanceMetric; +import org.dllearner.algorithms.isle.metrics.RelevanceMetric; +import org.dllearner.algorithms.isle.metrics.RelevanceUtils; +import org.dllearner.algorithms.isle.textretrieval.EntityTextRetriever; +import org.dllearner.algorithms.isle.textretrieval.RDFSLabelEntityTextRetriever; import org.dllearner.core.AbstractReasonerComponent; import org.dllearner.core.KnowledgeSource; import org.dllearner.core.owl.Entity; @@ -16,9 +25,12 @@ import org.junit.Before; import org.junit.Test; import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyManager; +import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; + import com.google.common.base.Joiner; /** @@ -29,10 +41,10 @@ private OWLOntologyManager manager; private OWLOntology ontology; + private OWLDataFactory df = new OWLDataFactoryImpl(); private NamedClass cls; private EntityTextRetriever textRetriever; - private LuceneSearcher searcher; - private Relevance relevance; + private RelevanceMetric relevance; private String searchField = "label"; /** @@ -43,9 +55,9 @@ ontology = manager.loadOntologyFromOntologyDocument(new File("../examples/isle/father_labeled.owl")); cls = new NamedClass("http://example.com/father#father"); textRetriever = new RDFSLabelEntityTextRetriever(ontology); - OWLOntologyLuceneIndex index = new OWLOntologyLuceneIndex(ontology, searchField); - searcher = new LuceneSearcher(index.getDirectory(), searchField); - relevance = new PMILuceneBasedRelevance(ontology, searcher, textRetriever); + SyntacticIndex syntacticIndex = new OWLOntologyLuceneSyntacticIndexCreator(ontology, df.getRDFSLabel(), searchField).buildIndex(); + SemanticIndex semanticIndex = new SimpleSemanticIndex(ontology, syntacticIndex); + relevance = new PMIRelevanceMetric(semanticIndex); } /** @@ -66,7 +78,7 @@ @Test public void testEntityRelevance() throws Exception { System.out.println("Relevant entities for entity " + cls + ":"); - Map<Entity, Double> entityRelevance = relevance.getEntityRelevance(cls); + Map<Entity, Double> entityRelevance = RelevanceUtils.getRelevantEntities(cls, ontology, relevance); System.out.println(Joiner.on("\n").join(entityRelevance.entrySet())); } @@ -80,7 +92,7 @@ lp.setClassToDescribe(cls); lp.init(); - Map<Entity, Double> entityRelevance = relevance.getEntityRelevance(cls); + Map<Entity, Double> entityRelevance = RelevanceUtils.getRelevantEntities(cls, ontology, relevance); NLPHeuristic heuristic = new NLPHeuristic(entityRelevance); ISLE isle = new ISLE(lp, reasoner); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |