You can subscribe to this list here.
2007 |
Jan
|
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
|
Aug
(120) |
Sep
(36) |
Oct
(116) |
Nov
(17) |
Dec
(44) |
---|---|---|---|---|---|---|---|---|---|---|---|---|
2008 |
Jan
(143) |
Feb
(192) |
Mar
(74) |
Apr
(84) |
May
(105) |
Jun
(64) |
Jul
(49) |
Aug
(120) |
Sep
(159) |
Oct
(156) |
Nov
(51) |
Dec
(28) |
2009 |
Jan
(17) |
Feb
(55) |
Mar
(33) |
Apr
(57) |
May
(54) |
Jun
(28) |
Jul
(6) |
Aug
(16) |
Sep
(38) |
Oct
(30) |
Nov
(26) |
Dec
(52) |
2010 |
Jan
(7) |
Feb
(91) |
Mar
(65) |
Apr
(2) |
May
(14) |
Jun
(25) |
Jul
(38) |
Aug
(48) |
Sep
(80) |
Oct
(70) |
Nov
(75) |
Dec
(77) |
2011 |
Jan
(68) |
Feb
(53) |
Mar
(51) |
Apr
(35) |
May
(65) |
Jun
(101) |
Jul
(29) |
Aug
(230) |
Sep
(95) |
Oct
(49) |
Nov
(110) |
Dec
(63) |
2012 |
Jan
(41) |
Feb
(42) |
Mar
(25) |
Apr
(46) |
May
(51) |
Jun
(44) |
Jul
(45) |
Aug
(29) |
Sep
(12) |
Oct
(9) |
Nov
(17) |
Dec
(2) |
2013 |
Jan
(12) |
Feb
(14) |
Mar
(7) |
Apr
(16) |
May
(54) |
Jun
(27) |
Jul
(11) |
Aug
(5) |
Sep
(85) |
Oct
(27) |
Nov
(37) |
Dec
(32) |
2014 |
Jan
(8) |
Feb
(29) |
Mar
(5) |
Apr
(3) |
May
(22) |
Jun
(3) |
Jul
(4) |
Aug
(3) |
Sep
|
Oct
|
Nov
|
Dec
|
From: <tc...@us...> - 2012-05-14 03:55:49
|
Revision: 3707 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3707&view=rev Author: tcanvn Date: 2012-05-14 03:55:41 +0000 (Mon, 14 May 2012) Log Message: ----------- Add learning configuration files for PADCEL algorithm for the following datasets: carcinogenesis, family benchmark (7 datasets), forte uncle, poker-straight, moral-simple, showering duration (new). Modified Paths: -------------- trunk/examples/forte/uncle_owl_large.conf Added Paths: ----------- trunk/examples/carcinogenesis/carcinogenesis_padcel.conf trunk/examples/family-benchmark/Aunt_padcel.conf trunk/examples/family-benchmark/Brother_padcel.conf trunk/examples/family-benchmark/Cousin_padcel.conf trunk/examples/family-benchmark/Daughter_padcel.conf trunk/examples/family-benchmark/Father_padcel.conf trunk/examples/family-benchmark/Grandson_padcel.conf trunk/examples/family-benchmark/Uncle_padcel.conf trunk/examples/forte/uncle_owl_large_padcel.conf trunk/examples/moral_reasoner/moral_all_examples_simple_owl_padcel.conf trunk/examples/poker/straight_owl_padcel.conf trunk/examples/showering-duration/ trunk/examples/showering-duration/abd.muse.massey.ac.nz.owl trunk/examples/showering-duration/uca1_150.owl trunk/examples/showering-duration/uca1_150_celoe.conf trunk/examples/showering-duration/uca1_150_padcel.conf trunk/examples/showering-duration/uca2_300.owl trunk/examples/showering-duration/uca2_300_celoe.conf trunk/examples/showering-duration/uca2_300_padcel.conf Added: trunk/examples/carcinogenesis/carcinogenesis_padcel.conf =================================================================== --- trunk/examples/carcinogenesis/carcinogenesis_padcel.conf (rev 0) +++ trunk/examples/carcinogenesis/carcinogenesis_padcel.conf 2012-05-14 03:55:41 UTC (rev 3707) @@ -0,0 +1,381 @@ +/* +- CarcinoGennesis example +- Positive examples: 182, negative examples: 155 +- PDCCEL algorithm +- PDCCEL splitter + + @author An C. Tran +*/ + +prefixes = [ ("kb","http://dl-learner.org/carcinogenesis#") ] + + +/* run 10-fold cross validation */ +cli.type = "org.dllearner.cli.CLI" +cli.writeSpringConfiguration = false +cli.performCrossValidation = true +cli.nrOfFolds = 10 + + +/* PDCCEL algorithm */ +alg.type = "org.dllearner.algorithms.PADCEL.PADCELearner" +alg.numberOfWorkers = "4" +alg.maxExecutionTimeInSeconds = "15" +alg.maxNoOfSplits = "40" +alg.splitter = splitter + +/* use PDCCEL splitter for splitting datatype properties */ +splitter.type = "org.dllearner.algorithms.PADCEL.split.PADCELDoubleSplitterV1" + + +/* knowledge source definition */ +ks.type = "OWL File" +ks.fileName = "carcinogenesis.owl" + +/* reasoner component */ +reasoner.type = "fast instance checker" +reasoner.sources = { ks } + +/* learning problem */ +lp.type = "org.dllearner.algorithms.PADCEL.PADCELPosNegLP" +lp.positiveExamples = { +"kb:d1", +"kb:d10", +"kb:d101", +"kb:d102", +"kb:d103", +"kb:d106", +"kb:d107", +"kb:d108", +"kb:d11", +"kb:d12", +"kb:d13", +"kb:d134", +"kb:d135", +"kb:d136", +"kb:d138", +"kb:d140", +"kb:d141", +"kb:d144", +"kb:d145", +"kb:d146", +"kb:d147", +"kb:d15", +"kb:d17", +"kb:d19", +"kb:d192", +"kb:d193", +"kb:d195", +"kb:d196", +"kb:d197", +"kb:d198", +"kb:d199", +"kb:d2", +"kb:d20", +"kb:d200", +"kb:d201", +"kb:d202", +"kb:d203", +"kb:d204", +"kb:d205", +"kb:d21", +"kb:d22", +"kb:d226", +"kb:d227", +"kb:d228", +"kb:d229", +"kb:d231", +"kb:d232", +"kb:d234", +"kb:d236", +"kb:d239", +"kb:d23_2", +"kb:d242", +"kb:d245", +"kb:d247", +"kb:d249", +"kb:d25", +"kb:d252", +"kb:d253", +"kb:d254", +"kb:d255", +"kb:d26", +"kb:d272", +"kb:d275", +"kb:d277", +"kb:d279", +"kb:d28", +"kb:d281", +"kb:d283", +"kb:d284", +"kb:d288", +"kb:d29", +"kb:d290", +"kb:d291", +"kb:d292", +"kb:d30", +"kb:d31", +"kb:d32", +"kb:d33", +"kb:d34", +"kb:d35", +"kb:d36", +"kb:d37", +"kb:d38", +"kb:d42", +"kb:d43", +"kb:d44", +"kb:d45", +"kb:d46", +"kb:d47", +"kb:d48", +"kb:d49", +"kb:d5", +"kb:d51", +"kb:d52", +"kb:d53", +"kb:d55", +"kb:d58", +"kb:d6", +"kb:d7", +"kb:d84", +"kb:d85_2", +"kb:d86", +"kb:d87", +"kb:d88", +"kb:d89", +"kb:d9", +"kb:d91", +"kb:d92", +"kb:d93", +"kb:d95", +"kb:d96", +"kb:d98", +"kb:d99", +"kb:d100", +"kb:d104", +"kb:d105", +"kb:d109", +"kb:d137", +"kb:d139", +"kb:d14", +"kb:d142", +"kb:d143", +"kb:d148", +"kb:d16", +"kb:d18", +"kb:d191", +"kb:d206", +"kb:d230", +"kb:d233", +"kb:d235", +"kb:d237", +"kb:d238", +"kb:d23_1", +"kb:d24", +"kb:d240", +"kb:d241", +"kb:d243", +"kb:d244", +"kb:d246", +"kb:d248", +"kb:d250", +"kb:d251", +"kb:d27", +"kb:d273", +"kb:d274", +"kb:d278", +"kb:d286", +"kb:d289", +"kb:d3", +"kb:d39", +"kb:d4", +"kb:d40", +"kb:d41", +"kb:d50", +"kb:d54", +"kb:d56", +"kb:d57", +"kb:d8", +"kb:d85_1", +"kb:d90", +"kb:d94", +"kb:d97", +"kb:d296", +"kb:d305", +"kb:d306", +"kb:d307", +"kb:d308", +"kb:d311", +"kb:d314", +"kb:d315", +"kb:d316", +"kb:d320", +"kb:d322", +"kb:d323", +"kb:d325", +"kb:d329", +"kb:d330", +"kb:d331", +"kb:d332", +"kb:d333", +"kb:d336", +"kb:d337" +} +lp.negativeExamples = { +"kb:d110", +"kb:d111", +"kb:d114", +"kb:d116", +"kb:d117", +"kb:d119", +"kb:d121", +"kb:d123", +"kb:d124", +"kb:d125", +"kb:d127", +"kb:d128", +"kb:d130", +"kb:d133", +"kb:d150", +"kb:d151", +"kb:d154", +"kb:d155", +"kb:d156", +"kb:d159", +"kb:d160", +"kb:d161", +"kb:d162", +"kb:d163", +"kb:d164", +"kb:d165", +"kb:d166", +"kb:d169", +"kb:d170", +"kb:d171", +"kb:d172", +"kb:d173", +"kb:d174", +"kb:d178", +"kb:d179", +"kb:d180", +"kb:d181", +"kb:d183", +"kb:d184", +"kb:d185", +"kb:d186", +"kb:d188", +"kb:d190", +"kb:d194", +"kb:d207", +"kb:d208_1", +"kb:d209", +"kb:d210", +"kb:d211", +"kb:d212", +"kb:d213", +"kb:d214", +"kb:d215", +"kb:d217", +"kb:d218", +"kb:d219", +"kb:d220", +"kb:d224", +"kb:d256", +"kb:d257", +"kb:d258", +"kb:d261", +"kb:d262", +"kb:d263", +"kb:d264", +"kb:d265", +"kb:d266", +"kb:d267", +"kb:d269", +"kb:d271", +"kb:d276", +"kb:d280", +"kb:d285", +"kb:d287", +"kb:d293", +"kb:d294", +"kb:d59", +"kb:d60", +"kb:d61", +"kb:d63", +"kb:d64", +"kb:d65", +"kb:d69", +"kb:d70", +"kb:d71", +"kb:d72", +"kb:d73", +"kb:d74", +"kb:d75", +"kb:d76", +"kb:d77", +"kb:d78", +"kb:d79", +"kb:d80", +"kb:d81", +"kb:d82", +"kb:d112", +"kb:d113", +"kb:d115", +"kb:d118", +"kb:d120", +"kb:d122", +"kb:d126", +"kb:d129", +"kb:d131", +"kb:d132", +"kb:d149", +"kb:d152", +"kb:d153", +"kb:d157", +"kb:d158", +"kb:d167", +"kb:d168", +"kb:d175", +"kb:d176", +"kb:d177", +"kb:d182", +"kb:d187", +"kb:d189", +"kb:d208_2", +"kb:d216", +"kb:d221", +"kb:d222", +"kb:d223", +"kb:d225", +"kb:d259", +"kb:d260", +"kb:d268", +"kb:d270", +"kb:d282", +"kb:d295", +"kb:d62", +"kb:d66", +"kb:d67", +"kb:d68", +"kb:d83", +"kb:d297", +"kb:d298", +"kb:d299", +"kb:d300", +"kb:d302", +"kb:d303", +"kb:d304", +"kb:d309", +"kb:d312", +"kb:d313", +"kb:d317", +"kb:d318", +"kb:d319", +"kb:d324", +"kb:d326", +"kb:d327", +"kb:d328", +"kb:d334", +"kb:d335" +} Added: trunk/examples/family-benchmark/Aunt_padcel.conf =================================================================== --- trunk/examples/family-benchmark/Aunt_padcel.conf (rev 0) +++ trunk/examples/family-benchmark/Aunt_padcel.conf 2012-05-14 03:55:41 UTC (rev 3707) @@ -0,0 +1,110 @@ + +cli.type = "org.dllearner.cli.CLI" +cli.writeSpringConfiguration = false +cli.performCrossValidation = true +cli.nrOfFolds = 10 + +ks.type = "OWL File" +ks.fileName = "family-benchmark.owl" + +alg.type = "org.dllearner.algorithms.PADCEL.PADCELearner" +alg.numberOfWorkers = "4" +alg.maxExecutionTimeInSeconds = "120" +alg.maxNoOfSplits = "40" +alg.splitter = splitter + +splitter.type = "org.dllearner.algorithms.PADCEL.split.PADCELDoubleSplitterV1" + +// reasoner +reasoner.type = "fast instance checker" +reasoner.sources = { ks } + +// learning problem +lp.type = "org.dllearner.algorithms.PADCEL.PADCELPosNegLP" +lp.positiveExamples = { +"http://www.benchmark.org/family#F2F14" +,"http://www.benchmark.org/family#F2F12" +,"http://www.benchmark.org/family#F2F19" +,"http://www.benchmark.org/family#F2F26" +,"http://www.benchmark.org/family#F2F28" +,"http://www.benchmark.org/family#F2F36" +,"http://www.benchmark.org/family#F3F52" +,"http://www.benchmark.org/family#F3F53" +,"http://www.benchmark.org/family#F5F62" +,"http://www.benchmark.org/family#F6F72" +,"http://www.benchmark.org/family#F6F79" +,"http://www.benchmark.org/family#F6F77" +,"http://www.benchmark.org/family#F6F86" +,"http://www.benchmark.org/family#F6F91" +,"http://www.benchmark.org/family#F6F84" +,"http://www.benchmark.org/family#F6F96" +,"http://www.benchmark.org/family#F6F101" +,"http://www.benchmark.org/family#F6F93" +,"http://www.benchmark.org/family#F7F114" +,"http://www.benchmark.org/family#F7F106" +,"http://www.benchmark.org/family#F7F116" +,"http://www.benchmark.org/family#F7F119" +,"http://www.benchmark.org/family#F7F126" +,"http://www.benchmark.org/family#F7F121" +,"http://www.benchmark.org/family#F9F148" +,"http://www.benchmark.org/family#F9F150" +,"http://www.benchmark.org/family#F9F143" +,"http://www.benchmark.org/family#F9F152" +,"http://www.benchmark.org/family#F9F154" +,"http://www.benchmark.org/family#F9F141" +,"http://www.benchmark.org/family#F9F160" +,"http://www.benchmark.org/family#F9F163" +,"http://www.benchmark.org/family#F9F158" +,"http://www.benchmark.org/family#F9F168" +,"http://www.benchmark.org/family#F10F174" +,"http://www.benchmark.org/family#F10F179" +,"http://www.benchmark.org/family#F10F181" +,"http://www.benchmark.org/family#F10F192" +,"http://www.benchmark.org/family#F10F193" +,"http://www.benchmark.org/family#F10F186" +,"http://www.benchmark.org/family#F10F195" +} + +lp.negativeExamples = { +"http://www.benchmark.org/family#F6M99" +,"http://www.benchmark.org/family#F10F200" +,"http://www.benchmark.org/family#F9F156" +,"http://www.benchmark.org/family#F6M69" +,"http://www.benchmark.org/family#F2F15" +,"http://www.benchmark.org/family#F6M100" +,"http://www.benchmark.org/family#F8F133" +,"http://www.benchmark.org/family#F3F48" +,"http://www.benchmark.org/family#F2F30" +,"http://www.benchmark.org/family#F4F55" +,"http://www.benchmark.org/family#F6F74" +,"http://www.benchmark.org/family#F10M199" +,"http://www.benchmark.org/family#F7M104" +,"http://www.benchmark.org/family#F9M146" +,"http://www.benchmark.org/family#F6M71" +,"http://www.benchmark.org/family#F2F22" +,"http://www.benchmark.org/family#F2M13" +,"http://www.benchmark.org/family#F9F169" +,"http://www.benchmark.org/family#F5F65" +,"http://www.benchmark.org/family#F6M81" +,"http://www.benchmark.org/family#F7M131" +,"http://www.benchmark.org/family#F7F129" +,"http://www.benchmark.org/family#F7M107" +,"http://www.benchmark.org/family#F10F189" +,"http://www.benchmark.org/family#F8F135" +,"http://www.benchmark.org/family#F8M136" +,"http://www.benchmark.org/family#F10M188" +,"http://www.benchmark.org/family#F9F164" +,"http://www.benchmark.org/family#F7F118" +,"http://www.benchmark.org/family#F2F10" +,"http://www.benchmark.org/family#F6F97" +,"http://www.benchmark.org/family#F7F111" +,"http://www.benchmark.org/family#F9M151" +,"http://www.benchmark.org/family#F4M59" +,"http://www.benchmark.org/family#F2M37" +,"http://www.benchmark.org/family#F1M1" +,"http://www.benchmark.org/family#F9M142" +,"http://www.benchmark.org/family#F4M57" +,"http://www.benchmark.org/family#F9M170" +,"http://www.benchmark.org/family#F5M66" +,"http://www.benchmark.org/family#F9F145" +} \ No newline at end of file Added: trunk/examples/family-benchmark/Brother_padcel.conf =================================================================== --- trunk/examples/family-benchmark/Brother_padcel.conf (rev 0) +++ trunk/examples/family-benchmark/Brother_padcel.conf 2012-05-14 03:55:41 UTC (rev 3707) @@ -0,0 +1,87 @@ +cli.type = "org.dllearner.cli.CLI" +cli.writeSpringConfiguration = false +cli.performCrossValidation = true +cli.nrOfFolds = 10 + +ks.type = "OWL File" +ks.fileName = "family-benchmark.owl" + +alg.type = "org.dllearner.algorithms.PADCEL.PADCELearner" +alg.numberOfWorkers = "4" +alg.maxExecutionTimeInSeconds = "120" +alg.maxNoOfSplits = "40" +alg.splitter = splitter + +splitter.type = "org.dllearner.algorithms.PADCEL.split.PADCELDoubleSplitterV1" + +// reasoner +reasoner.type = "fast instance checker" +reasoner.sources = { ks } + +// learning problem +lp.type = "org.dllearner.algorithms.PADCEL.PADCELPosNegLP" +lp.positiveExamples = { +"http://www.benchmark.org/family#F2M13" +,"http://www.benchmark.org/family#F2M18" +,"http://www.benchmark.org/family#F2M11" +,"http://www.benchmark.org/family#F2M32" +,"http://www.benchmark.org/family#F3M44" +,"http://www.benchmark.org/family#F3M45" +,"http://www.benchmark.org/family#F5M64" +,"http://www.benchmark.org/family#F6M71" +,"http://www.benchmark.org/family#F6M81" +,"http://www.benchmark.org/family#F6M90" +,"http://www.benchmark.org/family#F6M100" +,"http://www.benchmark.org/family#F6M92" +,"http://www.benchmark.org/family#F7M113" +,"http://www.benchmark.org/family#F7M117" +,"http://www.benchmark.org/family#F7M115" +,"http://www.benchmark.org/family#F7M125" +,"http://www.benchmark.org/family#F7M123" +,"http://www.benchmark.org/family#F7M131" +,"http://www.benchmark.org/family#F9M151" +,"http://www.benchmark.org/family#F9M153" +,"http://www.benchmark.org/family#F9M159" +,"http://www.benchmark.org/family#F9M166" +,"http://www.benchmark.org/family#F9M162" +,"http://www.benchmark.org/family#F9M157" +,"http://www.benchmark.org/family#F9M167" +,"http://www.benchmark.org/family#F10M173" +,"http://www.benchmark.org/family#F10M183" +,"http://www.benchmark.org/family#F10M184" +,"http://www.benchmark.org/family#F10M188" +,"http://www.benchmark.org/family#F10M199" +} + +lp.negativeExamples = { +"http://www.benchmark.org/family#F10M196" +,"http://www.benchmark.org/family#F1M8" +,"http://www.benchmark.org/family#F7F103" +,"http://www.benchmark.org/family#F3F41" +,"http://www.benchmark.org/family#F1M1" +,"http://www.benchmark.org/family#F9F164" +,"http://www.benchmark.org/family#F9M149" +,"http://www.benchmark.org/family#F9M147" +,"http://www.benchmark.org/family#F9F158" +,"http://www.benchmark.org/family#F2F12" +,"http://www.benchmark.org/family#F1F5" +,"http://www.benchmark.org/family#F6M88" +,"http://www.benchmark.org/family#F7M104" +,"http://www.benchmark.org/family#F7M109" +,"http://www.benchmark.org/family#F7M120" +,"http://www.benchmark.org/family#F6F83" +,"http://www.benchmark.org/family#F6M78" +,"http://www.benchmark.org/family#F3M47" +,"http://www.benchmark.org/family#F10F174" +,"http://www.benchmark.org/family#F6F76" +,"http://www.benchmark.org/family#F2F26" +,"http://www.benchmark.org/family#F6F89" +,"http://www.benchmark.org/family#F3M50" +,"http://www.benchmark.org/family#F3F42" +,"http://www.benchmark.org/family#F6F79" +,"http://www.benchmark.org/family#F10M194" +,"http://www.benchmark.org/family#F2F19" +,"http://www.benchmark.org/family#F2F24" +,"http://www.benchmark.org/family#F9F154" +,"http://www.benchmark.org/family#F4F58" +} Added: trunk/examples/family-benchmark/Cousin_padcel.conf =================================================================== --- trunk/examples/family-benchmark/Cousin_padcel.conf (rev 0) +++ trunk/examples/family-benchmark/Cousin_padcel.conf 2012-05-14 03:55:41 UTC (rev 3707) @@ -0,0 +1,171 @@ + +cli.type = "org.dllearner.cli.CLI" +cli.writeSpringConfiguration = false +cli.performCrossValidation = true +cli.nrOfFolds = 10 + +ks.type = "OWL File" +ks.fileName = "family-benchmark.owl" + +alg.type = "org.dllearner.algorithms.PADCEL.PADCELearner" +alg.numberOfWorkers = "4" +alg.maxExecutionTimeInSeconds = "120" +alg.maxNoOfSplits = "40" +alg.splitter = splitter + +splitter.type = "org.dllearner.algorithms.PADCEL.split.PADCELDoubleSplitterV1" + +// reasoner +reasoner.type = "fast instance checker" +reasoner.sources = { ks } + +// learning problem +lp.type = "org.dllearner.algorithms.PADCEL.PADCELPosNegLP" +lp.positiveExamples = { +"http://www.benchmark.org/family#F2M13" +,"http://www.benchmark.org/family#F2F14" +,"http://www.benchmark.org/family#F2F15" +,"http://www.benchmark.org/family#F2M16" +,"http://www.benchmark.org/family#F2M21" +,"http://www.benchmark.org/family#F2F22" +,"http://www.benchmark.org/family#F2F30" +,"http://www.benchmark.org/family#F2M31" +,"http://www.benchmark.org/family#F2F38" +,"http://www.benchmark.org/family#F2M39" +,"http://www.benchmark.org/family#F6M73" +,"http://www.benchmark.org/family#F6F74" +,"http://www.benchmark.org/family#F6F79" +,"http://www.benchmark.org/family#F6M80" +,"http://www.benchmark.org/family#F6M81" +,"http://www.benchmark.org/family#F6F82" +,"http://www.benchmark.org/family#F6F86" +,"http://www.benchmark.org/family#F6F87" +,"http://www.benchmark.org/family#F6M88" +,"http://www.benchmark.org/family#F6M90" +,"http://www.benchmark.org/family#F6F91" +,"http://www.benchmark.org/family#F6F94" +,"http://www.benchmark.org/family#F6M95" +,"http://www.benchmark.org/family#F6M100" +,"http://www.benchmark.org/family#F6F101" +,"http://www.benchmark.org/family#F7F108" +,"http://www.benchmark.org/family#F7M109" +,"http://www.benchmark.org/family#F7M113" +,"http://www.benchmark.org/family#F7F114" +,"http://www.benchmark.org/family#F7M117" +,"http://www.benchmark.org/family#F7F118" +,"http://www.benchmark.org/family#F7M123" +,"http://www.benchmark.org/family#F7F124" +,"http://www.benchmark.org/family#F7M131" +,"http://www.benchmark.org/family#F9F145" +,"http://www.benchmark.org/family#F9M146" +,"http://www.benchmark.org/family#F9F148" +,"http://www.benchmark.org/family#F9M149" +,"http://www.benchmark.org/family#F9F150" +,"http://www.benchmark.org/family#F9F143" +,"http://www.benchmark.org/family#F9M144" +,"http://www.benchmark.org/family#F9M151" +,"http://www.benchmark.org/family#F9F152" +,"http://www.benchmark.org/family#F9M155" +,"http://www.benchmark.org/family#F9F156" +,"http://www.benchmark.org/family#F9M153" +,"http://www.benchmark.org/family#F9F154" +,"http://www.benchmark.org/family#F9M161" +,"http://www.benchmark.org/family#F9M159" +,"http://www.benchmark.org/family#F9F160" +,"http://www.benchmark.org/family#F9F164" +,"http://www.benchmark.org/family#F9M165" +,"http://www.benchmark.org/family#F9M166" +,"http://www.benchmark.org/family#F9M162" +,"http://www.benchmark.org/family#F9F163" +,"http://www.benchmark.org/family#F9F169" +,"http://www.benchmark.org/family#F9M170" +,"http://www.benchmark.org/family#F10F175" +,"http://www.benchmark.org/family#F10M176" +,"http://www.benchmark.org/family#F10F177" +,"http://www.benchmark.org/family#F10M178" +,"http://www.benchmark.org/family#F10M183" +,"http://www.benchmark.org/family#F10M184" +,"http://www.benchmark.org/family#F10F185" +,"http://www.benchmark.org/family#F10M188" +,"http://www.benchmark.org/family#F10F189" +,"http://www.benchmark.org/family#F10F192" +,"http://www.benchmark.org/family#F10F193" +,"http://www.benchmark.org/family#F10M194" +,"http://www.benchmark.org/family#F10M197" +,"http://www.benchmark.org/family#F10F198" +} + + +lp.negativeExamples = { +"http://www.benchmark.org/family#F7M128" +,"http://www.benchmark.org/family#F7F111" +,"http://www.benchmark.org/family#F8M132" +,"http://www.benchmark.org/family#F6F97" +,"http://www.benchmark.org/family#F9F140" +,"http://www.benchmark.org/family#F2M23" +,"http://www.benchmark.org/family#F6M71" +,"http://www.benchmark.org/family#F7M130" +,"http://www.benchmark.org/family#F4F58" +,"http://www.benchmark.org/family#F6M75" +,"http://www.benchmark.org/family#F2F12" +,"http://www.benchmark.org/family#F3F48" +,"http://www.benchmark.org/family#F5M63" +,"http://www.benchmark.org/family#F3M40" +,"http://www.benchmark.org/family#F7M107" +,"http://www.benchmark.org/family#F3F46" +,"http://www.benchmark.org/family#F10F179" +,"http://www.benchmark.org/family#F7F105" +,"http://www.benchmark.org/family#F10M187" +,"http://www.benchmark.org/family#F3M50" +,"http://www.benchmark.org/family#F3M51" +,"http://www.benchmark.org/family#F7F119" +,"http://www.benchmark.org/family#F2F33" +,"http://www.benchmark.org/family#F9M142" +,"http://www.benchmark.org/family#F10M173" +,"http://www.benchmark.org/family#F4M57" +,"http://www.benchmark.org/family#F9F168" +,"http://www.benchmark.org/family#F5M68" +,"http://www.benchmark.org/family#F8M134" +,"http://www.benchmark.org/family#F2M9" +,"http://www.benchmark.org/family#F5F67" +,"http://www.benchmark.org/family#F2M29" +,"http://www.benchmark.org/family#F8F133" +,"http://www.benchmark.org/family#F9M147" +,"http://www.benchmark.org/family#F5M60" +,"http://www.benchmark.org/family#F2M27" +,"http://www.benchmark.org/family#F7M115" +,"http://www.benchmark.org/family#F2M25" +,"http://www.benchmark.org/family#F1M6" +,"http://www.benchmark.org/family#F2M11" +,"http://www.benchmark.org/family#F7M120" +,"http://www.benchmark.org/family#F4F56" +,"http://www.benchmark.org/family#F2M34" +,"http://www.benchmark.org/family#F2F26" +,"http://www.benchmark.org/family#F6F93" +,"http://www.benchmark.org/family#F1F2" +,"http://www.benchmark.org/family#F2F24" +,"http://www.benchmark.org/family#F3F53" +,"http://www.benchmark.org/family#F7F129" +,"http://www.benchmark.org/family#F8M136" +,"http://www.benchmark.org/family#F3F52" +,"http://www.benchmark.org/family#F4M54" +,"http://www.benchmark.org/family#F3M45" +,"http://www.benchmark.org/family#F3F42" +,"http://www.benchmark.org/family#F8M138" +,"http://www.benchmark.org/family#F10F195" +,"http://www.benchmark.org/family#F1F7" +,"http://www.benchmark.org/family#F10M190" +,"http://www.benchmark.org/family#F3F41" +,"http://www.benchmark.org/family#F2M35" +,"http://www.benchmark.org/family#F2F10" +,"http://www.benchmark.org/family#F6F96" +,"http://www.benchmark.org/family#F3F49" +,"http://www.benchmark.org/family#F6M85" +,"http://www.benchmark.org/family#F3M47" +,"http://www.benchmark.org/family#F5F65" +,"http://www.benchmark.org/family#F8F137" +,"http://www.benchmark.org/family#F10M196" +,"http://www.benchmark.org/family#F10M180" +,"http://www.benchmark.org/family#F7F116" +,"http://www.benchmark.org/family#F6F70" +} \ No newline at end of file Added: trunk/examples/family-benchmark/Daughter_padcel.conf =================================================================== --- trunk/examples/family-benchmark/Daughter_padcel.conf (rev 0) +++ trunk/examples/family-benchmark/Daughter_padcel.conf 2012-05-14 03:55:41 UTC (rev 3707) @@ -0,0 +1,133 @@ + +cli.type = "org.dllearner.cli.CLI" +cli.writeSpringConfiguration = false +cli.performCrossValidation = true +cli.nrOfFolds = 10 + +ks.type = "OWL File" +ks.fileName = "family-benchmark.owl" + +alg.type = "org.dllearner.algorithms.PADCEL.PADCELearner" +alg.numberOfWorkers = "4" +alg.maxExecutionTimeInSeconds = "120" +alg.maxNoOfSplits = "40" +alg.splitter = splitter + +splitter.type = "org.dllearner.algorithms.PADCEL.split.PADCELDoubleSplitterV1" + +// reasoner +reasoner.type = "fast instance checker" +reasoner.sources = { ks } + +// learning problem +lp.type = "org.dllearner.algorithms.PADCEL.PADCELPosNegLP" +lp.positiveExamples = { +"http://www.benchmark.org/family#F1F5" +,"http://www.benchmark.org/family#F1F7" +,"http://www.benchmark.org/family#F1F3" +,"http://www.benchmark.org/family#F2F17" +,"http://www.benchmark.org/family#F2F15" +,"http://www.benchmark.org/family#F2F19" +,"http://www.benchmark.org/family#F2F26" +,"http://www.benchmark.org/family#F2F33" +,"http://www.benchmark.org/family#F2F30" +,"http://www.benchmark.org/family#F2F28" +,"http://www.benchmark.org/family#F2F38" +,"http://www.benchmark.org/family#F2F36" +,"http://www.benchmark.org/family#F3F49" +,"http://www.benchmark.org/family#F3F52" +,"http://www.benchmark.org/family#F3F53" +,"http://www.benchmark.org/family#F3F42" +,"http://www.benchmark.org/family#F4F58" +,"http://www.benchmark.org/family#F4F56" +,"http://www.benchmark.org/family#F5F62" +,"http://www.benchmark.org/family#F6F79" +,"http://www.benchmark.org/family#F6F83" +,"http://www.benchmark.org/family#F6F77" +,"http://www.benchmark.org/family#F6F86" +,"http://www.benchmark.org/family#F6F89" +,"http://www.benchmark.org/family#F6F87" +,"http://www.benchmark.org/family#F6F84" +,"http://www.benchmark.org/family#F6F96" +,"http://www.benchmark.org/family#F6F97" +,"http://www.benchmark.org/family#F6F94" +,"http://www.benchmark.org/family#F7F108" +,"http://www.benchmark.org/family#F7F106" +,"http://www.benchmark.org/family#F7F118" +,"http://www.benchmark.org/family#F7F119" +,"http://www.benchmark.org/family#F7F129" +,"http://www.benchmark.org/family#F7F127" +,"http://www.benchmark.org/family#F7F121" +,"http://www.benchmark.org/family#F9F145" +,"http://www.benchmark.org/family#F9F148" +,"http://www.benchmark.org/family#F9F150" +,"http://www.benchmark.org/family#F9F143" +,"http://www.benchmark.org/family#F9F141" +,"http://www.benchmark.org/family#F9F164" +,"http://www.benchmark.org/family#F9F169" +,"http://www.benchmark.org/family#F10F175" +,"http://www.benchmark.org/family#F10F177" +,"http://www.benchmark.org/family#F10F179" +,"http://www.benchmark.org/family#F10F181" +,"http://www.benchmark.org/family#F10F192" +,"http://www.benchmark.org/family#F10F193" +,"http://www.benchmark.org/family#F10F186" +,"http://www.benchmark.org/family#F10F201" +,"http://www.benchmark.org/family#F10F195" +} + + +lp.negativeExamples = { +"http://www.benchmark.org/family#F7M123" +,"http://www.benchmark.org/family#F6M80" +,"http://www.benchmark.org/family#F2M37" +,"http://www.benchmark.org/family#F9M167" +,"http://www.benchmark.org/family#F10F189" +,"http://www.benchmark.org/family#F5M64" +,"http://www.benchmark.org/family#F6M90" +,"http://www.benchmark.org/family#F2M39" +,"http://www.benchmark.org/family#F3M43" +,"http://www.benchmark.org/family#F7M117" +,"http://www.benchmark.org/family#F2M32" +,"http://www.benchmark.org/family#F7F114" +,"http://www.benchmark.org/family#F10M190" +,"http://www.benchmark.org/family#F2M13" +,"http://www.benchmark.org/family#F6M73" +,"http://www.benchmark.org/family#F3M44" +,"http://www.benchmark.org/family#F6M95" +,"http://www.benchmark.org/family#F9F163" +,"http://www.benchmark.org/family#F10F198" +,"http://www.benchmark.org/family#F2M25" +,"http://www.benchmark.org/family#F2M18" +,"http://www.benchmark.org/family#F8M132" +,"http://www.benchmark.org/family#F1M1" +,"http://www.benchmark.org/family#F6F76" +,"http://www.benchmark.org/family#F10M184" +,"http://www.benchmark.org/family#F3F41" +,"http://www.benchmark.org/family#F7M107" +,"http://www.benchmark.org/family#F9F160" +,"http://www.benchmark.org/family#F2M9" +,"http://www.benchmark.org/family#F6M71" +,"http://www.benchmark.org/family#F8F133" +,"http://www.benchmark.org/family#F1M6" +,"http://www.benchmark.org/family#F10M176" +,"http://www.benchmark.org/family#F4F55" +,"http://www.benchmark.org/family#F9M157" +,"http://www.benchmark.org/family#F9M151" +,"http://www.benchmark.org/family#F6M98" +,"http://www.benchmark.org/family#F4M54" +,"http://www.benchmark.org/family#F9M170" +,"http://www.benchmark.org/family#F6F70" +,"http://www.benchmark.org/family#F10M171" +,"http://www.benchmark.org/family#F10M199" +,"http://www.benchmark.org/family#F8M136" +,"http://www.benchmark.org/family#F4M59" +,"http://www.benchmark.org/family#F6M88" +,"http://www.benchmark.org/family#F9M162" +,"http://www.benchmark.org/family#F10M197" +,"http://www.benchmark.org/family#F7M131" +,"http://www.benchmark.org/family#F7M113" +,"http://www.benchmark.org/family#F9M149" +,"http://www.benchmark.org/family#F7M109" +,"http://www.benchmark.org/family#F10M202" +} \ No newline at end of file Added: trunk/examples/family-benchmark/Father_padcel.conf =================================================================== --- trunk/examples/family-benchmark/Father_padcel.conf (rev 0) +++ trunk/examples/family-benchmark/Father_padcel.conf 2012-05-14 03:55:41 UTC (rev 3707) @@ -0,0 +1,148 @@ + +cli.type = "org.dllearner.cli.CLI" +cli.writeSpringConfiguration = false +cli.performCrossValidation = true +cli.nrOfFolds = 10 + +ks.type = "OWL File" +ks.fileName = "family-benchmark.owl" + +alg.type = "org.dllearner.algorithms.PADCEL.PADCELearner" +alg.numberOfWorkers = "4" +alg.maxExecutionTimeInSeconds = "120" +alg.maxNoOfSplits = "40" +alg.splitter = splitter + +splitter.type = "org.dllearner.algorithms.PADCEL.split.PADCELDoubleSplitterV1" + +// reasoner +reasoner.type = "fast instance checker" +reasoner.sources = { ks } + +// learning problem +lp.type = "org.dllearner.algorithms.PADCEL.PADCELPosNegLP" +lp.positiveExamples = { +"http://www.benchmark.org/family#F1M4" +,"http://www.benchmark.org/family#F1M1" +,"http://www.benchmark.org/family#F2M16" +,"http://www.benchmark.org/family#F2M11" +,"http://www.benchmark.org/family#F2M23" +,"http://www.benchmark.org/family#F2M21" +,"http://www.benchmark.org/family#F2M20" +,"http://www.benchmark.org/family#F2M34" +,"http://www.benchmark.org/family#F2M31" +,"http://www.benchmark.org/family#F2M29" +,"http://www.benchmark.org/family#F2M37" +,"http://www.benchmark.org/family#F2M9" +,"http://www.benchmark.org/family#F3M50" +,"http://www.benchmark.org/family#F3M47" +,"http://www.benchmark.org/family#F3M45" +,"http://www.benchmark.org/family#F3M43" +,"http://www.benchmark.org/family#F3M40" +,"http://www.benchmark.org/family#F4M57" +,"http://www.benchmark.org/family#F4M54" +,"http://www.benchmark.org/family#F5M66" +,"http://www.benchmark.org/family#F5M64" +,"http://www.benchmark.org/family#F5M60" +,"http://www.benchmark.org/family#F6M73" +,"http://www.benchmark.org/family#F6M71" +,"http://www.benchmark.org/family#F6M81" +,"http://www.benchmark.org/family#F6M78" +,"http://www.benchmark.org/family#F6M88" +,"http://www.benchmark.org/family#F6M85" +,"http://www.benchmark.org/family#F6M98" +,"http://www.benchmark.org/family#F6M95" +,"http://www.benchmark.org/family#F6M92" +,"http://www.benchmark.org/family#F6M69" +,"http://www.benchmark.org/family#F7M110" +,"http://www.benchmark.org/family#F7M109" +,"http://www.benchmark.org/family#F7M107" +,"http://www.benchmark.org/family#F7M115" +,"http://www.benchmark.org/family#F7M128" +,"http://www.benchmark.org/family#F7M123" +,"http://www.benchmark.org/family#F7M122" +,"http://www.benchmark.org/family#F7M104" +,"http://www.benchmark.org/family#F7M102" +,"http://www.benchmark.org/family#F8M136" +,"http://www.benchmark.org/family#F8M134" +,"http://www.benchmark.org/family#F8M132" +,"http://www.benchmark.org/family#F9M146" +,"http://www.benchmark.org/family#F9M144" +,"http://www.benchmark.org/family#F9M153" +,"http://www.benchmark.org/family#F9M142" +,"http://www.benchmark.org/family#F9M159" +,"http://www.benchmark.org/family#F9M162" +,"http://www.benchmark.org/family#F9M157" +,"http://www.benchmark.org/family#F9M167" +,"http://www.benchmark.org/family#F9M139" +,"http://www.benchmark.org/family#F10M173" +,"http://www.benchmark.org/family#F10M182" +,"http://www.benchmark.org/family#F10M188" +,"http://www.benchmark.org/family#F10M187" +,"http://www.benchmark.org/family#F10M197" +,"http://www.benchmark.org/family#F10M196" +,"http://www.benchmark.org/family#F10M171" +} + +lp.negativeExamples = { +"http://www.benchmark.org/family#F5M63" +,"http://www.benchmark.org/family#F10M199" +,"http://www.benchmark.org/family#F10M176" +,"http://www.benchmark.org/family#F6F86" +,"http://www.benchmark.org/family#F9M161" +,"http://www.benchmark.org/family#F3F53" +,"http://www.benchmark.org/family#F10F195" +,"http://www.benchmark.org/family#F5F61" +,"http://www.benchmark.org/family#F7F103" +,"http://www.benchmark.org/family#F6F77" +,"http://www.benchmark.org/family#F9F156" +,"http://www.benchmark.org/family#F9M155" +,"http://www.benchmark.org/family#F6F70" +,"http://www.benchmark.org/family#F3F46" +,"http://www.benchmark.org/family#F6F79" +,"http://www.benchmark.org/family#F2M27" +,"http://www.benchmark.org/family#F7F108" +,"http://www.benchmark.org/family#F10F189" +,"http://www.benchmark.org/family#F7M113" +,"http://www.benchmark.org/family#F7F111" +,"http://www.benchmark.org/family#F10M190" +,"http://www.benchmark.org/family#F8M138" +,"http://www.benchmark.org/family#F9M165" +,"http://www.benchmark.org/family#F3M44" +,"http://www.benchmark.org/family#F9F160" +,"http://www.benchmark.org/family#F6F74" +,"http://www.benchmark.org/family#F10F198" +,"http://www.benchmark.org/family#F5F62" +,"http://www.benchmark.org/family#F10F175" +,"http://www.benchmark.org/family#F10F201" +,"http://www.benchmark.org/family#F6F89" +,"http://www.benchmark.org/family#F9F143" +,"http://www.benchmark.org/family#F9M170" +,"http://www.benchmark.org/family#F6M90" +,"http://www.benchmark.org/family#F9M149" +,"http://www.benchmark.org/family#F3F52" +,"http://www.benchmark.org/family#F10F181" +,"http://www.benchmark.org/family#F9F163" +,"http://www.benchmark.org/family#F9F169" +,"http://www.benchmark.org/family#F9F158" +,"http://www.benchmark.org/family#F10F179" +,"http://www.benchmark.org/family#F4F55" +,"http://www.benchmark.org/family#F2F15" +,"http://www.benchmark.org/family#F5M68" +,"http://www.benchmark.org/family#F6F93" +,"http://www.benchmark.org/family#F10F192" +,"http://www.benchmark.org/family#F7M120" +,"http://www.benchmark.org/family#F7M131" +,"http://www.benchmark.org/family#F8F133" +,"http://www.benchmark.org/family#F7M125" +,"http://www.benchmark.org/family#F7M130" +,"http://www.benchmark.org/family#F2M18" +,"http://www.benchmark.org/family#F10F191" +,"http://www.benchmark.org/family#F9F152" +,"http://www.benchmark.org/family#F9F154" +,"http://www.benchmark.org/family#F2F22" +,"http://www.benchmark.org/family#F6F97" +,"http://www.benchmark.org/family#F7F127" +,"http://www.benchmark.org/family#F2F36" +,"http://www.benchmark.org/family#F5F67" +} \ No newline at end of file Added: trunk/examples/family-benchmark/Grandson_padcel.conf =================================================================== --- trunk/examples/family-benchmark/Grandson_padcel.conf (rev 0) +++ trunk/examples/family-benchmark/Grandson_padcel.conf 2012-05-14 03:55:41 UTC (rev 3707) @@ -0,0 +1,114 @@ + +cli.type = "org.dllearner.cli.CLI" +cli.writeSpringConfiguration = false +cli.performCrossValidation = true +cli.nrOfFolds = 10 + +ks.type = "OWL File" +ks.fileName = "family-benchmark.owl" + +alg.type = "org.dllearner.algorithms.PADCEL.PADCELearner" +alg.numberOfWorkers = "4" +alg.maxExecutionTimeInSeconds = "120" +alg.maxNoOfSplits = "40" +alg.splitter = splitter + +splitter.type = "org.dllearner.algorithms.PADCEL.split.PADCELDoubleSplitterV1" + +// reasoner +reasoner.type = "fast instance checker" +reasoner.sources = { ks } + +// learning problem +lp.type = "org.dllearner.algorithms.PADCEL.PADCELPosNegLP" +lp.positiveExamples = { +"http://www.benchmark.org/family#F2M13" +,"http://www.benchmark.org/family#F2M18" +,"http://www.benchmark.org/family#F2M25" +,"http://www.benchmark.org/family#F2M23" +,"http://www.benchmark.org/family#F2M21" +,"http://www.benchmark.org/family#F2M32" +,"http://www.benchmark.org/family#F2M35" +,"http://www.benchmark.org/family#F3M44" +,"http://www.benchmark.org/family#F3M51" +,"http://www.benchmark.org/family#F3M47" +,"http://www.benchmark.org/family#F3M45" +,"http://www.benchmark.org/family#F5M68" +,"http://www.benchmark.org/family#F5M66" +,"http://www.benchmark.org/family#F6M75" +,"http://www.benchmark.org/family#F6M73" +,"http://www.benchmark.org/family#F6M81" +,"http://www.benchmark.org/family#F6M90" +,"http://www.benchmark.org/family#F6M99" +,"http://www.benchmark.org/family#F6M100" +,"http://www.benchmark.org/family#F7M112" +,"http://www.benchmark.org/family#F7M110" +,"http://www.benchmark.org/family#F7M113" +,"http://www.benchmark.org/family#F7M117" +,"http://www.benchmark.org/family#F7M115" +,"http://www.benchmark.org/family#F7M125" +,"http://www.benchmark.org/family#F7M123" +,"http://www.benchmark.org/family#F7M131" +,"http://www.benchmark.org/family#F8M138" +,"http://www.benchmark.org/family#F8M136" +,"http://www.benchmark.org/family#F9M147" +,"http://www.benchmark.org/family#F9M151" +,"http://www.benchmark.org/family#F9M155" +,"http://www.benchmark.org/family#F9M153" +,"http://www.benchmark.org/family#F9M161" +,"http://www.benchmark.org/family#F9M159" +,"http://www.benchmark.org/family#F9M166" +,"http://www.benchmark.org/family#F9M162" +,"http://www.benchmark.org/family#F10M183" +,"http://www.benchmark.org/family#F10M184" +,"http://www.benchmark.org/family#F10M190" +,"http://www.benchmark.org/family#F10M188" +,"http://www.benchmark.org/family#F10M199" +,"http://www.benchmark.org/family#F10M197" +} + +lp.negativeExamples = { +"http://www.benchmark.org/family#F6F83" +,"http://www.benchmark.org/family#F4M57" +,"http://www.benchmark.org/family#F1M8" +,"http://www.benchmark.org/family#F9F140" +,"http://www.benchmark.org/family#F4F58" +,"http://www.benchmark.org/family#F2M29" +,"http://www.benchmark.org/family#F9M170" +,"http://www.benchmark.org/family#F7F118" +,"http://www.benchmark.org/family#F2F19" +,"http://www.benchmark.org/family#F2M16" +,"http://www.benchmark.org/family#F2M34" +,"http://www.benchmark.org/family#F10M182" +,"http://www.benchmark.org/family#F7M120" +,"http://www.benchmark.org/family#F8M134" +,"http://www.benchmark.org/family#F6F74" +,"http://www.benchmark.org/family#F10F192" +,"http://www.benchmark.org/family#F6F86" +,"http://www.benchmark.org/family#F2F28" +,"http://www.benchmark.org/family#F9M139" +,"http://www.benchmark.org/family#F10M194" +,"http://www.benchmark.org/family#F1F5" +,"http://www.benchmark.org/family#F4F56" +,"http://www.benchmark.org/family#F6F89" +,"http://www.benchmark.org/family#F2F33" +,"http://www.benchmark.org/family#F10F174" +,"http://www.benchmark.org/family#F7M128" +,"http://www.benchmark.org/family#F7F129" +,"http://www.benchmark.org/family#F9F158" +,"http://www.benchmark.org/family#F3M50" +,"http://www.benchmark.org/family#F6F94" +,"http://www.benchmark.org/family#F7F114" +,"http://www.benchmark.org/family#F6F72" +,"http://www.benchmark.org/family#F7F124" +,"http://www.benchmark.org/family#F9F150" +,"http://www.benchmark.org/family#F4F55" +,"http://www.benchmark.org/family#F10F175" +,"http://www.benchmark.org/family#F1F7" +,"http://www.benchmark.org/family#F4M54" +,"http://www.benchmark.org/family#F7F108" +,"http://www.benchmark.org/family#F6M92" +,"http://www.benchmark.org/family#F9F152" +,"http://www.benchmark.org/family#F6M85" +,"http://www.benchmark.org/family#F2F36" +} Added: trunk/examples/family-benchmark/Uncle_padcel.conf =================================================================== --- trunk/examples/family-benchmark/Uncle_padcel.conf (rev 0) +++ trunk/examples/family-benchmark/Uncle_padcel.conf 2012-05-14 03:55:41 UTC (rev 3707) @@ -0,0 +1,104 @@ + +cli.type = "org.dllearner.cli.CLI" +cli.writeSpringConfiguration = false +cli.performCrossValidation = true +cli.nrOfFolds = 10 + +ks.type = "OWL File" +ks.fileName = "family-benchmark.owl" + +alg.type = "org.dllearner.algorithms.PADCEL.PADCELearner" +alg.numberOfWorkers = "4" +alg.maxExecutionTimeInSeconds = "120" +alg.maxNoOfSplits = "40" +alg.splitter = splitter + +splitter.type = "org.dllearner.algorithms.PADCEL.split.PADCELDoubleSplitterV1" + +// reasoner +reasoner.type = "fast instance checker" +reasoner.sources = { ks } + +// learning problem +lp.type = "org.dllearner.algorithms.PADCEL.PADCELPosNegLP" +lp.positiveExamples = { +"http://www.benchmark.org/family#F2M13" +,"http://www.benchmark.org/family#F2M11" +,"http://www.benchmark.org/family#F2M20" +,"http://www.benchmark.org/family#F2M27" +,"http://www.benchmark.org/family#F2M32" +,"http://www.benchmark.org/family#F2M29" +,"http://www.benchmark.org/family#F2M37" +,"http://www.benchmark.org/family#F3M44" +,"http://www.benchmark.org/family#F5M63" +,"http://www.benchmark.org/family#F6M71" +,"http://www.benchmark.org/family#F6M80" +,"http://www.benchmark.org/family#F6M78" +,"http://www.benchmark.org/family#F6M90" +,"http://www.benchmark.org/family#F6M85" +,"http://www.benchmark.org/family#F6M100" +,"http://www.benchmark.org/family#F6M92" +,"http://www.benchmark.org/family#F7M113" +,"http://www.benchmark.org/family#F7M107" +,"http://www.benchmark.org/family#F7M115" +,"http://www.benchmark.org/family#F7M120" +,"http://www.benchmark.org/family#F7M125" +,"http://www.benchmark.org/family#F7M131" +,"http://www.benchmark.org/family#F7M122" +,"http://www.benchmark.org/family#F9M149" +,"http://www.benchmark.org/family#F9M144" +,"http://www.benchmark.org/family#F9M151" +,"http://www.benchmark.org/family#F9M153" +,"http://www.benchmark.org/family#F9M142" +,"http://www.benchmark.org/family#F9M159" +,"http://www.benchmark.org/family#F9M162" +,"http://www.benchmark.org/family#F9M157" +,"http://www.benchmark.org/family#F9M167" +,"http://www.benchmark.org/family#F10M173" +,"http://www.benchmark.org/family#F10M180" +,"http://www.benchmark.org/family#F10M182" +,"http://www.benchmark.org/family#F10M194" +,"http://www.benchmark.org/family#F10M187" +,"http://www.benchmark.org/family#F10M196" +} + +lp.negativeExamples = { +"http://www.benchmark.org/family#F10F198" +,"http://www.benchmark.org/family#F7F108" +,"http://www.benchmark.org/family#F9M165" +,"http://www.benchmark.org/family#F6F82" +,"http://www.benchmark.org/family#F9F148" +,"http://www.benchmark.org/family#F3M43" +,"http://www.benchmark.org/family#F7F103" +,"http://www.benchmark.org/family#F10M188" +,"http://www.benchmark.org/family#F1F3" +,"http://www.benchmark.org/family#F9F156" +,"http://www.benchmark.org/family#F9M147" +,"http://www.benchmark.org/family#F10F191" +,"http://www.benchmark.org/family#F9F160" +,"http://www.benchmark.org/family#F6M95" +,"http://www.benchmark.org/family#F2F14" +,"http://www.benchmark.org/family#F6F94" +,"http://www.benchmark.org/family#F1F2" +,"http://www.benchmark.org/family#F6F86" +,"http://www.benchmark.org/family#F10F174" +,"http://www.benchmark.org/family#F2F12" +,"http://www.benchmark.org/family#F2F28" +,"http://www.benchmark.org/family#F5M60" +,"http://www.benchmark.org/family#F8M134" +,"http://www.benchmark.org/family#F7M117" +,"http://www.benchmark.org/family#F10F189" +,"http://www.benchmark.org/family#F4F55" +,"http://www.benchmark.org/family#F6F76" +,"http://www.benchmark.org/family#F7F119" +,"http://www.benchmark.org/family#F2F36" +,"http://www.benchmark.org/family#F2M9" +,"http://www.benchmark.org/family#F2F38" +,"http://www.benchmark.org/family#F2F22" +,"http://www.benchmark.org/family#F6F89" +,"http://www.benchmark.org/family#F5M64" +,"http://www.benchmark.org/family#F5F67" +,"http://www.benchmark.org/family#F3F53" +,"http://www.benchmark.org/family#F2F26" +,"http://www.benchmark.org/family#F5F65" +} \ No newline at end of file Modified: trunk/examples/forte/uncle_owl_large.conf =================================================================== --- trunk/examples/forte/uncle_owl_large.conf 2012-05-13 17:14:12 UTC (rev 3706) +++ trunk/examples/forte/uncle_owl_large.conf 2012-05-14 03:55:41 UTC (rev 3707) @@ -9,6 +9,11 @@ prefixes = [ ("kb","http://localhost/foo#") ] +cli.type = "org.dllearner.cli.CLI" +cli.writeSpringConfiguration = false +cli.performCrossValidation = true +cli.nrOfFolds = 10 + // knowledge source definition ks.type = "OWL File" ks.fileName = "forte_family.owl" Added: trunk/examples/forte/uncle_owl_large_padcel.conf =================================================================== --- trunk/examples/forte/uncle_owl_large_padcel.conf (rev 0) +++ trunk/examples/forte/uncle_owl_large_padcel.conf 2012-05-14 03:55:41 UTC (rev 3707) @@ -0,0 +1,124 @@ +/** + * See uncle.conf. This is the same learning problem, but loading background + * knowledge from an OWL file instead. + * + * Copyright (C) 2007, Jens Lehmann + * Modified by An C. Tran + */ + + +cli.type = "org.dllearner.cli.CLI" +cli.writeSpringConfiguration = false +cli.performCrossValidation = true +cli.nrOfFolds = 10 + + +/** background knowledge **/ +prefixes = [ ("kb","http://localhost/foo#") ] + +// knowledge source definition +ks.type = "OWL File" +ks.fileName = "forte_family.owl" + +// reasoner +reasoner.type = "fast instance checker" +reasoner.sources = { ks } + + +alg.type = "org.dllearner.algorithms.PADCEL.PADCELearner" +alg.numberOfWorkers = "4" +alg.maxExecutionTimeInSeconds = "300" + + +// learning problem +lp.type = "org.dllearner.algorithms.PADCEL.PADCELPosNegLP" +lp.positiveExamples = { +"kb:art", +"kb:calvin", +"kb:carlos", +"kb:david", +"kb:eric", +"kb:fred", +"kb:frederick", +"kb:george", +"kb:harry", +"kb:jack", +"kb:james", +"kb:jonas", +"kb:karl", +"kb:leon", +"kb:mark", +"kb:melvin", +"kb:neil", +"kb:nero", +"kb:owen", +"kb:paul", +"kb:peter", +"kb:umo", +"kb:walt" +} +lp.negativeExamples = { +"kb:alfred", +"kb:alice", +"kb:angela", +"kb:ann", +"kb:beatrice", +"kb:bob", +"kb:callie", +"kb:carl", +"kb:christy", +"kb:cornelia", +"kb:deanna", +"kb:elisa", +"kb:f12", +"kb:f14", +"kb:f19", +"kb:f2", +"kb:f20", +"kb:f21", +"kb:f22", +"kb:f23", +"kb:f25", +"kb:f26", +"kb:f28", +"kb:f8", +"kb:fannie", +"kb:gail", +"kb:helen", +"kb:jane", +"kb:janet", +"kb:kari", +"kb:lorrie", +"kb:m1", +"kb:m10", +"kb:m11", +"kb:m13", +"kb:m15", +"kb:m16", +"kb:m17", +"kb:m18", +"kb:m24", +"kb:m27", +"kb:m29", +"kb:m3", +"kb:m4", +"kb:m5", +"kb:m6", +"kb:m7", +"kb:m9", +"kb:maria", +"kb:martha", +"kb:nancy", +"kb:nonnie", +"kb:oma", +"kb:paula", +"kb:prissie", +"kb:rachel", +"kb:ray", +"kb:regina", +"kb:steve", +"kb:susan", +"kb:terri", +"kb:terry", +"kb:wendy" +} Added: trunk/examples/moral_reasoner/moral_all_examples_simple_owl_padcel.conf =================================================================== --- trunk/examples/moral_reasoner/moral_all_examples_simple_owl_padcel.conf (rev 0) +++ trunk/examples/moral_reasoner/moral_all_examples_simple_owl_padcel.conf 2012-05-14 03:55:41 UTC (rev 3707) @@ -0,0 +1,241 @@ + /*********************** + solution should be: + guilty = (blameworthy OR vicarious_blame ). + + + Examples: + 102 positive + 100 negative + + ***********************/ +cli.type = "org.dllearner.cli.CLI" +cli.writeSpringConfiguration = false +cli.performCrossValidation = true +cli.nrOfFolds = 10 + + +prefixes = [ ("kb","http://localhost/foo#") ] + +// knowledge source definition +ks.type = "OWL file" +ks.fileName = "moral.owl" + +// reasoner +reasoner.type = "fast instance checker" +reasoner.sources = { ks } + +alg.numberOfWorkers = "8" +alg.maxExecutionTimeInSeconds = "300" +alg.type = "org.dllearner.algorithms.PADCEL.PADCELearner" +alg.ignoredConcepts = {"kb:guilty"} + + +// learning problem +lp.type = "org.dllearner.algorithms.PADCEL.PADCELPosNegLP" +lp.positiveExamples = { +"kb:p0", +"kb:p1", +"kb:p2", +"kb:p3", +"kb:p4", +"kb:p5", +"kb:p6", +"kb:p7", +"kb:p8", +"kb:p9", +"kb:p10", +"kb:p11", +"kb:p12", +"kb:p13", +"kb:p14", +"kb:p15", +"kb:p16", +"kb:p17", +"kb:p18", +"kb:p19", +"kb:p20", +"kb:p21", +"kb:p22", +"kb:p23", +"kb:p24", +"kb:p25", +"kb:p26", +"kb:p27", +"kb:p28", +"kb:p29", +"kb:p30", +"kb:p31", +"kb:p32", +"kb:p33", +"kb:p34", +"kb:p35", +"kb:p36", +"kb:p37", +"kb:p38", +"kb:p39", +"kb:p40", +"kb:p41", +"kb:p42", +"kb:p43", +"kb:p44", +"kb:p45", +"kb:p46", +"kb:p47", +"kb:p48", +"kb:p49", +"kb:p50", +"kb:p51", +"kb:p52", +"kb:p53", +"kb:p54", +"kb:p55", +"kb:p56", +"kb:p57", +"kb:p58", +"kb:p59", +"kb:p60", +"kb:p61", +"kb:p62", +"kb:p63", +"kb:p64", +"kb:p65", +"kb:p66", +"kb:p67", +"kb:p68", +"kb:p69", +"kb:p70", +"kb:p71", +"kb:p72", +"kb:p73", +"kb:p74", +"kb:p75", +"kb:p76", +"kb:p77", +"kb:p78", +"kb:p79", +"kb:p80", +"kb:p81", +"kb:p82", +"kb:p83", +"kb:p84", +"kb:p85", +"kb:p86", +"kb:p87", +"kb:p88", +"kb:p89", +"kb:p90", +"kb:p91", +"kb:p92", +"kb:p93", +"kb:p94", +"kb:p95", +"kb:p96", +"kb:p97", +"kb:p98", +"kb:p99", +"kb:p100", +"kb:p101" +} +lp.negativeExamples = { +"kb:n0", +"kb:n1", +"kb:n2", +"kb:n3", +"kb:n4", +"kb:n5", +"kb:n6", +"kb:n7", +"kb:n8", +"kb:n9", +"kb:n10", +"kb:n11", +"kb:n12", +"kb:n13", +"kb:n14", +"kb:n15", +"kb:n16", +"kb:n17", +"kb:n18", +"kb:n19", +"kb:n20", +"kb:n21", +"kb:n22", +"kb:n23", +"kb:n24", +"kb:n25", +"kb:n26", +"kb:n27", +"kb:n28", +"kb:n29", +"kb:n30", +"kb:n31", +"kb:n32", +"kb:n33", +"kb:n34", +"kb:n35", +"kb:n36", +"kb:n37", +"kb:n38", +"kb:n39", +"kb:n40", +"kb:n41", +"kb:n42", +"kb:n43", +"kb:n44", +"kb:n45", +"kb:n46", +"kb:n47", +"kb:n48", +"kb:n49", +"kb:n50", +"kb:n51", +"kb:n52", +"kb:n53", +"kb:n54", +"kb:n55", +"kb:n56", +"kb:n57", +"kb:n58", +"kb:n59", +"kb:n60", +"kb:n61", +"kb:n62", +"kb:n63", +"kb:n64", +"kb:n65", +"kb:n66", +"kb:n67", +"kb:n68", +"kb:n69", +"kb:n70", +"kb:n71", +"kb:n72", +"kb:n73", +"kb:n74", +"kb:n75", +"kb:n76", +"kb:n77", +"kb:n78", +"kb:n79", +"kb:n80", +"kb:n81", +"kb:n82", +"kb:n83", +"kb:n84", +"kb:n85", +"kb:n86", +"kb:n87", +"kb:n88", +"kb:n89", +"kb:n90", +"kb:n91", +"kb:n92", +"kb:n93", +"kb:n94", +"kb:n95", +"kb:n96", +"kb:n97", +"kb:n98", +"kb:n99" +} + \ No newline at end of file Added: trunk/examples/poker/straight_owl_padcel.conf =================================================================== --- trunk/examples/poker/straight_owl_padcel.conf (rev 0) +++ trunk/examples/poker/straight_owl_padcel.conf 2012-05-14 03:55:41 UTC (rev 3707) @@ -0,0 +1,92 @@ +/** + * See straight.conf. This is the same learning problem, but loading + * background knowledge from an OWL file instead. + * + * Copyright (C) 2007, Jens Lehmann + * Modified: An C. Tran + */ + +cli.type = "org.dllearner.cli.CLI" +cli.writeSpringConfiguration = false +cli.performCrossValidation = true +cli.nrOfFolds = 10 + + + +prefixes = [ ("kb","http://localhost/foo#") ] + +// knowledge source definition +ks.type = "OWL File" +ks.fileName = "straight.owl" + +// reasoner +reasoner.type = "fast instance checker" +reasoner.sources = { ks } + + +alg.numberOfWorkers = "4" +alg.maxExecutionTimeInSeconds = "300" +alg.type = "org.dllearner.algorithms.PADCEL.PADCELearner" + +// learning problem +lp.type = "org.dllearner.algorithms.PADCEL.PADCELPosNegLP" +lp.positiveExamples = { +"kb:hand1", +"kb:hand22", +"kb:hand40", +"kb:hand44" +} + +lp.negativeExamples = { +"kb:hand0", +"kb:hand2", +"kb:hand3", +"kb:hand4", +"kb:hand5", +"kb:hand6", +"kb:hand7", +"kb:hand8", +"kb:hand9", +"kb:hand10", +"kb:hand11", +"kb:hand12", +"kb:hand13", +"kb:hand14", +"kb:hand15", +"kb:hand16", +"kb:hand17", +"kb:hand18", +"kb:hand19", +"kb:hand20", +"kb:hand21", +"kb:hand23", +"kb:hand24", +"kb:hand25", +"kb:hand26", +"kb:hand27", +"kb:hand28", +"kb:hand29", +"kb:hand30", +"kb:hand31", +"kb:hand32", +"kb:hand33", +"kb:hand34", +"kb:hand35", +"kb:hand36", +"kb:hand37", +"kb:hand38", +"kb:hand39", +"kb:hand41", +"kb:hand42", +"kb:hand43", +"kb:hand45", +"kb:hand46", +"kb:hand47", +"kb:hand48", +"kb:hand49", +"kb:hand50", +"kb:hand51", +"kb:hand52", +"kb:hand53", +"kb:hand54" +} Added: trunk/examples/showering-duration/abd.muse.massey.ac.nz.owl =================================================================== --- trunk/examples/showering-duration/abd.muse.massey.ac.nz.owl (rev 0) +++ trunk/examples/showering-duration/abd.muse.massey.ac.nz.owl 2012-05-14 03:55:41 UTC (rev 3707) @@ -0,0 +1,1228 @@ +<?xml version="1.0"?> + + +<!DOCTYPE rdf:RDF [ + <!ENTITY abd "http://abd.muse.massey.ac.nz#" > + <!ENTITY owl "http://www.w3.org/2002/07/owl#" > + <!ENTITY xsd "http://www.w3.org/2001/XMLSchema#" > + <!ENTITY owl2xml "http://www.w3.org/2006/12/owl2-xml#" > + <!ENTITY rdfs "http://www.w3.org/2000/01/rdf-schema#" > + <!ENTITY rdf "http://www.w3.org/1999/02/22-rdf-syntax-ns#" > +]> + + +<rdf:RDF xmlns="http://abd.muse.massey.ac.nz#" + xml:base="http://abd.muse.massey.ac.nz" + xmlns:owl2xml="http://www.w3.org/2006/12/owl2-xml#" + xmlns:abd="http://abd.muse.massey.ac.nz#" + xmlns:xsd="http://www.w3.org/2001/XMLSchema#" + xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:owl="http://www.w3.org/2002/07/owl#"> + <owl:Ontology rdf:about="http://abd.muse.massey.ac.nz"/> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Annotation properties + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Datatypes + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Object Properties + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://abd.muse.massey.ac.nz#activityHasDuration --> + + <owl:ObjectProperty rdf:about="&abd;activityHasDuration"> + <rdfs:domain rdf:resource="&abd;Activity"/> + <rdfs:range rdf:resource="&abd;Duration"/> + </owl:ObjectProperty> + + + + <!-- http://abd.muse.massey.ac.nz#activityHasOtherContext --> + + <owl:ObjectProperty rdf:about="&abd;activityHasOtherContext"> + <rdfs:domain rdf:resource="&abd;Activity"/> + <rdfs:range rdf:resource="&abd;OtherContext"/> + </owl:ObjectProperty> + + + + <!-- http://abd.muse.massey.ac.nz#activityHasStarttime --> + + <owl:ObjectProperty rdf:about="&abd;activityHasStarttime"> + <rdfs:domain rdf:resource="&abd;Activity"/> + <rdfs:range rdf:resource="&abd;Timepoint"/> + </owl:ObjectProperty> + + + + <!-- http://abd.muse.massey.ac.nz#hasTemperature --> + + <owl:ObjectProperty rdf:about="&abd;hasTemperature"> + <rdfs:range rdf:resource="&abd;Temperature"/> + <rdfs:domain rdf:resource="&abd;Timepoint"/> + </owl:ObjectProperty> + + + + <!-- http://abd.muse.massey.ac.nz#placedAt --> + + <owl:ObjectProperty rdf:about="&abd;placedAt"> + <rdfs:domain rdf:resource="&abd;Furniture"/> + <rdfs:range rdf:resource="&abd;Location"/> + </owl:ObjectProperty> + + + + <!-- http://abd.muse.massey.ac.nz#takeActivity --> + + <owl:ObjectProperty rdf:about="&abd;takeActivity"> + <rdfs:range rdf:resource="&abd;Activity"/> + <rdfs:domain rdf:resource="&abd;People"/> + </owl:ObjectProperty> + + + + <!-- http://abd.muse.massey.ac.nz#takenAt --> + + <owl:ObjectProperty rdf:about="&abd;takenAt"> + <rdfs:domain rdf:resource="&abd;Activity"/> + <rdfs:range rdf:resource="&abd;Location"/> + </owl:ObjectProperty> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Data properties + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://abd.muse.massey.ac.nz#hasDayValue --> + + <owl:DatatypeProperty rdf:about="&abd;hasDayValue"> + <rdfs:domain rdf:resource="&abd;Timepoint"/> + <rdfs:range rdf:resource="&xsd;double"/> + </owl:DatatypeProperty> + + + + <!-- http://abd.muse.massey.ac.nz#hasDisturbedNightValue --> + + <owl:DatatypeProperty rdf:about="&abd;hasDisturbedNightValue"> + <rdfs:domain rdf:resource="&abd;DisturbedNight"/> + <rdfs:range rdf:resource="&xsd;string"/> + </owl:DatatypeProperty> + + + + <!-- http://abd.muse.massey.ac.nz#hasDurationValue --> + + <owl:DatatypeProperty rdf:about="&abd;hasDurationValue"> + <rdf:type rdf:resource="&owl;FunctionalProperty"/> + <rdfs:domain rdf:resource="&abd;Duration"/> + <rdfs:range rdf:resource="&xsd;double"/> + </owl:DatatypeProperty> + + + + <!-- http://abd.muse.massey.ac.nz#hasHourValue --> + + <owl:DatatypeProperty rdf:about="&abd;hasHourValue"> + <rdfs:domain rdf:resource="&abd;Timepoint"/> + <rdfs:range rdf:resource="&xsd;double"/> + </owl:DatatypeProperty> + + + + <!-- http://abd.muse.massey.ac.nz#hasMinuteValue --> + + <owl:DatatypeProperty rdf:about="&abd;hasMinuteValue"> + <rdfs:domain rdf:resource="&abd;Timepoint"/> + <rdfs:range rdf:resource="&xsd;double"/> + </owl:DatatypeProperty> + + + + <!-- http://abd.muse.massey.ac.nz#hasMonthValue --> + + <owl:DatatypeProperty rdf:about="&abd;hasMonthValue"> + <rdfs:domain rdf:resource="&abd;Timepoint"/> + <rdfs:range rdf:resource="&xsd;double"/> + </owl:DatatypeProperty> + + + + <!-- http://abd.muse.massey.ac.nz#hasScheduledValue --> + + <owl:DatatypeProperty rdf:about="&abd;hasScheduledValue"> + <rdfs:domain rdf:resource="&abd;Scheduled"/> + <rdfs:range rdf:resource="&xsd;string"/> + </owl:DatatypeProperty> + + + + <!-- http://abd.muse.massey.ac.nz#hasSecondValue --> + + <owl:DatatypeProperty rdf:about="&abd;hasSecondValue"> + <rdfs:domain rdf:resource="&abd;Timepoint"/> + <rdfs:range rdf:resource="&xsd;double"/> + </owl:DatatypeProperty> + + + + <!-- http://abd.muse.massey.ac.nz#hasTemperatureValue --> + + <owl:DatatypeProperty rdf:about="&abd;hasTemperatureValue"> + <rdfs:domain rdf:resource="&abd;Temperature"/> + <rdfs:range rdf:resource="&xsd;double"/> + </owl:DatatypeProperty> + + + + <!-- http://abd.muse.massey.ac.nz#hasYearValue --> + + <owl:DatatypeProperty rdf:about="&abd;hasYearValue"> + <rdfs:domain rdf:resource="&abd;Timepoint"/> + <rdfs:range rdf:resource="&xsd;double"/> + </owl:DatatypeProperty> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Classes + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://abd.muse.massey.ac.nz#AbnormalActivity --> + + <owl:Class rdf:about="&abd;AbnormalActivity"> + <rdfs:subClassOf rdf:resource="&abd;Activity"/> + <owl:disjointWith rdf:resource="&abd;NormalActivity"/> + </owl:Class> + + + + <!-- http://abd.muse.massey.ac.nz#Activity --> + + <owl:Class rdf:about="&abd;Activity"/> + + + + <!-- http://abd.muse.massey.ac.nz#Afternoon --> + + <owl:Class rdf:about="&abd;Afternoon"> + <owl:equivalentClass> + <owl:Class> + <owl:intersectionOf rdf:parseType="Collection"> + <rdf:Description rdf:about="&abd;Timepoint"/> + <owl:Restriction> + <owl:onProperty rdf:resource="&abd;hasHourValue"/> + <owl:someValuesFrom> + <rdfs:Datatype> + <owl:onDatatype rdf:resource="&xsd;double"/> + <owl:withRestrictions rdf:parseType="Collection"> + <rdf:Description> + <xsd:minInclusive rdf:datatype="&xsd;double">12</xsd:minInclusive> + </rdf:Description> + </owl:withRestrictions> + </rdfs:Datatype> + </owl:someValuesFrom> + </owl:Restriction> + <owl:Restriction> + <owl:onProperty rdf:resource="&abd;hasHourValue"/> + <owl:someValuesFrom> + <rdfs:Datatype> + <owl:onDatatype rdf:resource="&xsd;double"/> + <owl:withRestrictions rdf:parseType="Collection"> + <rdf:Description> + <xsd:maxExclusive rdf:datatype="&xsd;double">18</xsd:maxExclusive> + </rdf:Description> + </owl:withRestrictions> + </rdfs:Datatype> + </owl:someValuesFrom> + </owl:Restriction> + </owl:intersectionOf> + </owl:Class> + </owl:equivalentClass> + <rdfs:subClassOf rdf:resource="&abd;QualitativeTimeOfDay"/> + </owl:Class> + + + + <!-- http://abd.muse.massey.ac.nz#Autumn --> + + <owl:Class rdf:about="&abd;Autumn"> + <owl:equivalentClass> + <owl:Class> + <owl:intersectionOf rdf:parseType="Collection"> + <rdf:Description rdf:about="&abd;Timepoint"/> + <owl:Restriction> + <owl:onProperty rdf:resource="&abd;hasMonthValue"/> + <owl:someValuesFrom> + <rdfs:Datatype> + ... [truncated message content] |
From: <tc...@us...> - 2012-05-13 17:14:22
|
Revision: 3706 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3706&view=rev Author: tcanvn Date: 2012-05-13 17:14:12 +0000 (Sun, 13 May 2012) Log Message: ----------- 1. Add PADCEL (PArallel Divide & Conquer Expression Learning) algorithm with the new splitting strategy used in PADCEL 2. Add abstract class for PADCELEx (PADCEL with Exceptions) algorithm (for new CLI reference). The implementation of this algorithm is being checked and committed Added Paths: ----------- trunk/components-core/src/main/java/nz/ trunk/components-core/src/main/java/nz/ac/ trunk/components-core/src/main/java/nz/ac/massey/ trunk/components-core/src/main/java/nz/ac/massey/abd/ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELAbstract.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELComplenessComparator.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCompletenessComparator.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCorrectnessComparator.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCoverageGreedyReducer.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCoveredNegativeExampleComparator.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefaultHeuristic.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefinitionGenerationTimeComparator.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefinitionLengthComparator.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefinitionLengthReducer.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELEvaluationResult.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELExtraNode.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELGenerationTimeReducer.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELHeuristic.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELImprovedCovegareGreedyReducer.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELNode.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELOntologyUtil.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELPosNegLP.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELReducer.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELRefinementOperatorFactory.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELRefinementOperatorPool.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELScore.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELStringUtilities.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELTaskComparator.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELWorker.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELWorkerThreadFactory.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELearnerMBean.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/split/ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/split/PADCELDoubleSplitterAbstract.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/split/PADCELDoubleSplitterV1.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/split/ValueCount.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/split/ValuesSet.java trunk/components-core/src/main/java/org/dllearner/algorithms/PADCELEx/ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCELEx/PADCELExAbstract.java Added: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELAbstract.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELAbstract.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELAbstract.java 2012-05-13 17:14:12 UTC (rev 3706) @@ -0,0 +1,74 @@ +package org.dllearner.algorithms.PADCEL; + +import java.util.Set; +import java.util.SortedSet; +import java.util.concurrent.ConcurrentSkipListSet; + +import org.dllearner.core.AbstractCELA; +import org.dllearner.core.AbstractReasonerComponent; +import org.dllearner.core.owl.Description; + +/** + * Abstract class for all PDL-Learner algorithms family + * + * @author An C. Tran + * + */ +public abstract class PADCELAbstract extends AbstractCELA { + + /** + * All generated descriptions thread-safe set is used to avoid concurrently + * accessing + */ + protected ConcurrentSkipListSet<Description> allDescriptions = null; + + /** + * Search tree. It hold all evaluated descriptions that are not correct and + * not weak ==> candidate for partial definitions Nodes in the search tree + * must be ordered using heuristic so that it can help the searching more + * efficiently (best search rather than 'blind' breadth first of depth + * first) NOTE: node = (description + accuracy/correctness/completeness/... + * value) + */ + protected ConcurrentSkipListSet<PADCELNode> searchTree = null; // thread safe + // set + + /** + * partial definitions (they should be sorted so that we can get the best + * partial definition at any time + */ + protected SortedSet<PADCELExtraNode> partialDefinitions = null; + + /** + * Default constructor + */ + public PADCELAbstract() { + super(); + } + + /** + * ======================================================================== + * Constructor for the learning algorithm + * + * @param learningProblem + * Must be a PDLLPosNegLP + * @param reasoningService + * A reasoner + */ + public PADCELAbstract(PADCELPosNegLP learningProblem, AbstractReasonerComponent reasoningService) { + super(learningProblem, reasoningService); + } + + public abstract Description getUnionCurrenlyBestDescription(); + + public abstract int getNoOfCompactedPartialDefinition(); + + public abstract Set<PADCELExtraNode> getPartialDefinitions(); + + public abstract double getCurrentlyOveralMaxCompleteness(); + + public long getNumberOfPartialDefinitions() { + return this.partialDefinitions.size(); + } + +} Property changes on: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELAbstract.java ___________________________________________________________________ Added: svn:mime-type + text/plain Added: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELComplenessComparator.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELComplenessComparator.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELComplenessComparator.java 2012-05-13 17:14:12 UTC (rev 3706) @@ -0,0 +1,37 @@ +package org.dllearner.algorithms.PADCEL; + +import java.util.Comparator; + +import org.dllearner.utilities.owl.ConceptComparator; + +/** + * Comparator for PDLLNode based on the completeness of the description If two + * nodes has the same completeness, description length will be taken into the + * consideration and finally the ConceptComparator + * + * @author An C. Tran + * + */ + +public class PADCELComplenessComparator implements Comparator<PADCELNode> { + + @Override + public int compare(PADCELNode node1, PADCELNode node2) { + int v1 = node1.getCoveredPositiveExamples().size(); + int v2 = node2.getCoveredPositiveExamples().size(); + if (v1 > v2) + return -1; + else if (v1 < v2) + return 1; + else { + if (node1.getDescription().getLength() < node2.getDescription().getLength()) + return -1; + else if (node1.getDescription().getLength() > node2.getDescription().getLength()) + return 1; + else + return new ConceptComparator().compare(node1.getDescription(), + node2.getDescription()); + } + } + +} Property changes on: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELComplenessComparator.java ___________________________________________________________________ Added: svn:mime-type + text/plain Added: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCompletenessComparator.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCompletenessComparator.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCompletenessComparator.java 2012-05-13 17:14:12 UTC (rev 3706) @@ -0,0 +1,38 @@ +package org.dllearner.algorithms.PADCEL; + +import java.util.Comparator; + +import org.dllearner.utilities.owl.ConceptComparator; + +/** + * Use to compare 2 ExtraPLOENode nodes based on their completeness (coverage). The description + * length and ConceptComparator will be used it they have equal coverage + * + * @author An C. Tran + * + */ +public class PADCELCompletenessComparator implements Comparator<PADCELExtraNode> { + + @Override + public int compare(PADCELExtraNode node1, PADCELExtraNode node2) { + + int v1 = node1.getCoveredPositiveExamples().size(); + int v2 = node2.getCoveredPositiveExamples().size(); + + if (v1 > v2) + return -1; + else if (v1 < v2) + return 1; + else { + int len1 = node1.getDescription().getLength(); + int len2 = node2.getDescription().getLength(); + if (len1 < len2) + return -1; + else if (len1 > len2) + return 1; + else + return new ConceptComparator().compare(node1.getDescription(), + node2.getDescription()); + } + } +} Property changes on: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCompletenessComparator.java ___________________________________________________________________ Added: svn:mime-type + text/plain Added: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCorrectnessComparator.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCorrectnessComparator.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCorrectnessComparator.java 2012-05-13 17:14:12 UTC (rev 3706) @@ -0,0 +1,38 @@ +package org.dllearner.algorithms.PADCEL; + +import java.util.Comparator; + +import org.dllearner.utilities.owl.ConceptComparator; + +/** + * Use to compare 2 ExtraPLOENode nodes based on their correctness. The description length and + * ConceptComparator will be used it they have equal coverage + * + * @author An C. Tran + * + */ +public class PADCELCorrectnessComparator implements Comparator<PADCELExtraNode> { + + @Override + public int compare(PADCELExtraNode node1, PADCELExtraNode node2) { + double correctness1 = node1.getCorrectness(); + double correctness2 = node2.getCorrectness(); + + if (correctness1 > correctness2) + return -1; // smaller will be on the top + else if (correctness1 < correctness2) + return 1; + else { + int len1 = node1.getDescription().getLength(); + int len2 = node2.getDescription().getLength(); + + if (len1 < len2) + return -1; + else if (len1 > len2) + return 1; + else + return new ConceptComparator().compare(node1.getDescription(), + node2.getDescription()); + } + } +} Property changes on: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCorrectnessComparator.java ___________________________________________________________________ Added: svn:mime-type + text/plain Added: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCoverageGreedyReducer.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCoverageGreedyReducer.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCoverageGreedyReducer.java 2012-05-13 17:14:12 UTC (rev 3706) @@ -0,0 +1,82 @@ +package org.dllearner.algorithms.PADCEL; + +import java.util.HashSet; +import java.util.Iterator; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; + +import org.dllearner.core.owl.Individual; + +/** + * This class implements a simple strategy for compacting the partial definition set In this + * strategy, the partial definition will be chosen based on their accuracy. The partial definition + * with the best accuracy will be chosen first and the rests will not be re-calculated before the + * next reduction + * + * @author An C. Tran + * + */ +public class PADCELCoverageGreedyReducer implements PADCELReducer { + + /** + * Compact partial definitions + * + * @param partialDefinitions + * Set of partial definitions + * @param positiveExamples + * Set of positive examples (used to check whether partial definition is useful + * @param uncoveredPositiveExamples + * Number of uncovered positive examples allowed + * + * @return Subset of partial definitions that cover all positive examples + */ + @Override + public SortedSet<PADCELExtraNode> compact(SortedSet<PADCELExtraNode> partialDefinitions, + Set<Individual> positiveExamples) { + return compact(partialDefinitions, positiveExamples, 0); + } + + /** + * Compact partial definition with noise allowed + * + * @param partialDefinitions + * Set of partial definitions + * @param positiveExamples + * Set of positive examples (used to check whether partial definition is useful + * @param uncoveredPositiveExamples + * Number of uncovered positive examples allowed + * + * @return Subset of partial definitions that cover (positive examples \ uncovered positive + * examples) + */ + @Override + public SortedSet<PADCELExtraNode> compact(SortedSet<PADCELExtraNode> partialDefinitions, + Set<Individual> positiveExamples, int uncoveredPositiveExamples) { + + Set<Individual> positiveExamplesTmp = new HashSet<Individual>(); + positiveExamplesTmp.addAll(positiveExamples); + + TreeSet<PADCELExtraNode> minimisedPartialDefinition = new TreeSet<PADCELExtraNode>( + new PADCELCorrectnessComparator()); + + Iterator<PADCELExtraNode> partialDefinitionIterator = partialDefinitions.iterator(); + while ((positiveExamplesTmp.size() > uncoveredPositiveExamples) + && (partialDefinitionIterator.hasNext())) { + PADCELExtraNode node = partialDefinitionIterator.next(); + + int positiveExamplesRemoved = positiveExamplesTmp.size(); + positiveExamplesTmp.removeAll(node.getCoveredPositiveExamples()); + + positiveExamplesRemoved -= positiveExamplesTmp.size(); + + if (positiveExamplesRemoved > 0) { + node.setCorrectness(positiveExamplesRemoved); + minimisedPartialDefinition.add(node); + } + } + + return minimisedPartialDefinition; + } + +} Property changes on: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCoverageGreedyReducer.java ___________________________________________________________________ Added: svn:mime-type + text/plain Added: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCoveredNegativeExampleComparator.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCoveredNegativeExampleComparator.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCoveredNegativeExampleComparator.java 2012-05-13 17:14:12 UTC (rev 3706) @@ -0,0 +1,38 @@ +package org.dllearner.algorithms.PADCEL; + +import java.util.Comparator; + +import org.dllearner.utilities.owl.ConceptComparator; + +/** + * Use to compare 2 ExtraPLOENode nodes based on the number of covered negative examples. The + * description length and ConceptComparator will be used it they have equal coverage + * + * @author An C. Tran + * + */ +public class PADCELCoveredNegativeExampleComparator implements Comparator<PADCELExtraNode> { + + @Override + public int compare(PADCELExtraNode node1, PADCELExtraNode node2) { + int coveredNeg1 = node1.getCoveredNegativeExamples().size(); + int coveredNeg2 = node2.getCoveredPositiveExamples().size(); + + if (coveredNeg1 > coveredNeg2) + return -1; // smaller will be on the top + else if (coveredNeg1 < coveredNeg2) + return 1; + else { + int len1 = node1.getDescription().getLength(); + int len2 = node2.getDescription().getLength(); + + if (len1 < len2) + return -1; + else if (len1 > len2) + return 1; + else + return new ConceptComparator().compare(node1.getDescription(), + node2.getDescription()); + } + } +} Property changes on: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELCoveredNegativeExampleComparator.java ___________________________________________________________________ Added: svn:mime-type + text/plain Added: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefaultHeuristic.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefaultHeuristic.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefaultHeuristic.java 2012-05-13 17:14:12 UTC (rev 3706) @@ -0,0 +1,95 @@ +package org.dllearner.algorithms.PADCEL; + +import org.dllearner.utilities.owl.ConceptComparator; + +/** + * Implements the heuristic used to expand the search tree. Dimensions used: <br> + * + correctness: main value<br> + * + horizontal expansion: penalty<br> + * + accuracy gained from the parent node: bonus<br> + * + refinement nodes: penalty<br> + * + concept type + name (org.dllearner.utilities.owl.ConceptComparator) + * + * @author An C. Tran + * + */ +public class PADCELDefaultHeuristic implements PADCELHeuristic { + + // penalty for long descriptions + protected double expansionPenaltyFactor = 0.1; + + // bonus for gained accuracy + protected double gainBonusFactor = 0.2; + + // penalty if a node description has very many refinements since exploring + // such a node is computationally very expensive + protected double nodeRefinementPenalty = 0.0001; + + // award for node with high accuracy + protected double accuracyAwardFactor = 0.01; + + // syntactic comparison as final comparison criterion + protected ConceptComparator conceptComparator = new ConceptComparator(); + + /** + * Compare two node + * + * @param node1 + * Node to compare + * @param node2 + * Node to compare + * + * @return 1 if node1 "greater" than node2 and vice versa + */ + public int compare(PADCELNode node1, PADCELNode node2) { + double diff = getNodeScore(node1) - getNodeScore(node2); + + if (diff > 0) { // node1 has better score than node2 + return 1; + } else if (diff < 0) { + return -1; + } else { + int comp = conceptComparator.compare(node1.getDescription(), node2.getDescription()); + + // this allows duplicate descriptions exists in the set + if (comp != 0) + return comp; + else + return -1; + + } + } + + /** + * Calculate score for a node which is used as the searching heuristic + * + * @param node + * Node to be scored + * + * @return Score of the node + */ + protected double getNodeScore(PADCELNode node) { + + // the scoring mainly bases on correctness + double score = node.getCorrectness(); + + // bonus for the accuracy gained + if (!node.isRoot()) { + double parentAccuracy = ((PADCELNode) (node.getParent())).getAccuracy(); + score += (parentAccuracy - node.getAccuracy()) * gainBonusFactor; + } + + // award node with high accuracy + score += node.getAccuracy() * accuracyAwardFactor; + + // penalty for horizontal expansion + score -= node.getHorizontalExpansion() * expansionPenaltyFactor; + // score -= node.getDescription().getLength() * expansionPenaltyFactor; + + return score; + } + + public double getScore(PADCELNode node) { + return this.getNodeScore(node); + } +} Property changes on: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefaultHeuristic.java ___________________________________________________________________ Added: svn:mime-type + text/plain Added: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefinitionGenerationTimeComparator.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefinitionGenerationTimeComparator.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefinitionGenerationTimeComparator.java 2012-05-13 17:14:12 UTC (rev 3706) @@ -0,0 +1,37 @@ +package org.dllearner.algorithms.PADCEL; + +/** + * Compare two node based on their generation time. + * This will be used in the Generation Time Greedy Compactness strategy + * GOLR + * + * @author An C. Tran + */ + +import java.util.Comparator; + +import org.dllearner.utilities.owl.ConceptComparator; + +public class PADCELDefinitionGenerationTimeComparator implements Comparator<PADCELExtraNode> { + + @Override + public int compare(PADCELExtraNode node1, PADCELExtraNode node2) { + double genTime1 = node1.getGenerationTime(); + double genTime2 = node2.getGenerationTime(); + + if (genTime1 < genTime2) + return -1; + else if (genTime1 > genTime2) + return 1; + else { + if (node1.getDescription().getLength() < node2.getDescription().getLength()) + return -1; + else if (node1.getDescription().getLength() > node2.getDescription().getLength()) + return 1; + else + return new ConceptComparator().compare(node1.getDescription(), + node2.getDescription()); + } + } + +} Property changes on: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefinitionGenerationTimeComparator.java ___________________________________________________________________ Added: svn:mime-type + text/plain Added: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefinitionLengthComparator.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefinitionLengthComparator.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefinitionLengthComparator.java 2012-05-13 17:14:12 UTC (rev 3706) @@ -0,0 +1,29 @@ +package org.dllearner.algorithms.PADCEL; + +import java.util.Comparator; + +import org.dllearner.utilities.owl.ConceptComparator; + +/** + * Compare two node based on their definition length. This will be used in the Definition Length + * Greedy Compactness strategy + * + * @author An C. Tran + * + */ + +public class PADCELDefinitionLengthComparator implements Comparator<PADCELExtraNode> { + + @Override + public int compare(PADCELExtraNode node1, PADCELExtraNode node2) { + int len1 = node1.getDescription().getLength(); + int len2 = node2.getDescription().getLength(); + if (len1 < len2) + return -1; + else if (len1 > len2) + return 1; + else + return new ConceptComparator().compare(node1.getDescription(), node2.getDescription()); + } + +} Property changes on: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefinitionLengthComparator.java ___________________________________________________________________ Added: svn:mime-type + text/plain Added: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefinitionLengthReducer.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefinitionLengthReducer.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefinitionLengthReducer.java 2012-05-13 17:14:12 UTC (rev 3706) @@ -0,0 +1,59 @@ +package org.dllearner.algorithms.PADCEL; + +import java.util.HashSet; +import java.util.Iterator; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; + +import org.dllearner.core.owl.Individual; + +/** + * Compact set of partial definitions using Definition Length Grredy Reduction strategy + * + * @author An C. Tran + * + */ +public class PADCELDefinitionLengthReducer implements PADCELReducer { + + @Override + public SortedSet<PADCELExtraNode> compact(SortedSet<PADCELExtraNode> partialDefinitions, + Set<Individual> positiveExamples) { + return compact(partialDefinitions, positiveExamples, 0); + } + + @Override + public SortedSet<PADCELExtraNode> compact(SortedSet<PADCELExtraNode> partialDefinitions, + Set<Individual> positiveExamples, int uncoveredPositiveExamples) { + Set<Individual> positiveExamplesTmp = new HashSet<Individual>(); + positiveExamplesTmp.addAll(positiveExamples); + + TreeSet<PADCELExtraNode> newSortedPartialDefinitions = new TreeSet<PADCELExtraNode>( + new PADCELDefinitionLengthComparator()); + synchronized (partialDefinitions) { + newSortedPartialDefinitions.addAll(partialDefinitions); + } + + TreeSet<PADCELExtraNode> minimisedPartialDefinition = new TreeSet<PADCELExtraNode>( + new PADCELDefinitionGenerationTimeComparator()); + + Iterator<PADCELExtraNode> partialDefinitionIterator = newSortedPartialDefinitions.iterator(); + while ((positiveExamplesTmp.size() > uncoveredPositiveExamples) + && (partialDefinitionIterator.hasNext())) { + PADCELExtraNode node = partialDefinitionIterator.next(); + + int positiveExamplesRemoved = positiveExamplesTmp.size(); + positiveExamplesTmp.removeAll(node.getCoveredPositiveExamples()); + + positiveExamplesRemoved -= positiveExamplesTmp.size(); + + if (positiveExamplesRemoved > 0) { + node.setCorrectness(positiveExamplesRemoved); + minimisedPartialDefinition.add(node); + } + } + + return minimisedPartialDefinition; + } + +} Property changes on: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELDefinitionLengthReducer.java ___________________________________________________________________ Added: svn:mime-type + text/plain Added: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELEvaluationResult.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELEvaluationResult.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELEvaluationResult.java 2012-05-13 17:14:12 UTC (rev 3706) @@ -0,0 +1,132 @@ +package org.dllearner.algorithms.PADCEL; + +import java.util.Set; + +import org.dllearner.core.owl.Individual; + +/** + * This class represents the result of an evaluation result return from the PDLL learning problem + * (PDLLPosNegLP) <br> + * Information included in a PDLL evaluation includes: + * <ol> + * <li>- accuracy: double</li> + * <li>- correctness: double</li> + * <li>- completeness: double</li> + * <li>- covered positive examples: Set<Individual></li> + * <li>- covered negative examples: Set<Individual></li> + * </ol> + * + * @author An C. Tran + * + */ +public class PADCELEvaluationResult { + + protected double accuracy = -1; + protected double correctness = -1; + protected double completeness = -1; + protected Set<Individual> coveredPossitiveExamples = null; + protected Set<Individual> coveredNegativeExamples = null; + + /** + * Default constructor, use default value for all properties + */ + public PADCELEvaluationResult() { + + } + + /** + * This use to create an evaluation result that is not for a correct definition. So, we don't + * need to hold the set of covered positive examples + * + * @param accuracy + * @param correctness + */ + public PADCELEvaluationResult(double accuracy, double correctness, double completeness) { + this.accuracy = accuracy; + this.correctness = correctness; + this.completeness = completeness; + } + + /** + * Used to create an evaluation result for a correct definition.<br> + * Covered positive examples have to be kept to be used in the result compactness later on.<br> + * This is usually used in case of partial definition + * + * @param accuracy + * @param correctness + * @param completeness + * @param coveredPossitiveExamples + */ + public PADCELEvaluationResult(double accuracy, double correctness, double completeness, + Set<Individual> coveredPossitiveExamples) { + this.accuracy = accuracy; + this.correctness = correctness; + this.completeness = completeness; + this.coveredPossitiveExamples = coveredPossitiveExamples; + } + + /** + * Used to create an evaluation result for a correct definition.<br> + * Both covered positive examples and covered negative examples will be kept to be used in the + * result compactness later on. + * + * @param accuracy + * @param correctness + * @param completeness + * @param coveredPositiveExamples + * @param coveredNegativeExamples + */ + public PADCELEvaluationResult(double accuracy, double correctness, double completeness, + Set<Individual> coveredPositiveExamples, Set<Individual> coveredNegativeExamples) { + + this.accuracy = accuracy; + this.correctness = correctness; + this.completeness = completeness; + this.coveredPossitiveExamples = coveredPositiveExamples; + this.coveredNegativeExamples = coveredNegativeExamples; + } + + // --------------------------------------- + // Getters and setters + // --------------------------------------- + public double getAccuracy() { + return accuracy; + } + + public void setAccuracy(double accuracy) { + this.accuracy = accuracy; + } + + public double getCorrectness() { + return correctness; + } + + public void setCorrectness(double correctness) { + this.correctness = correctness; + } + + public double getCompleteness() { + return completeness; + } + + public void setCompleteness(double completeness) { + this.completeness = completeness; + } + + public Set<Individual> getCoveredPossitiveExamples() { + return coveredPossitiveExamples; + } + + public void setCoveredPossitiveExamples(Set<Individual> coveredPossitiveExamples) { + this.coveredPossitiveExamples = coveredPossitiveExamples; + } + + public Set<Individual> getCoveredNegativeExamples() { + return coveredNegativeExamples; + } + + public void setCoveredNegativeExamples(Set<Individual> coveredNegativeExamples) { + this.coveredNegativeExamples = coveredNegativeExamples; + } + +} Property changes on: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELEvaluationResult.java ___________________________________________________________________ Added: svn:mime-type + text/plain Added: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELExtraNode.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELExtraNode.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELExtraNode.java 2012-05-13 17:14:12 UTC (rev 3706) @@ -0,0 +1,132 @@ +package org.dllearner.algorithms.PADCEL; + +import java.util.HashSet; +import java.util.Set; + +import org.dllearner.algorithms.celoe.OENode; +import org.dllearner.core.owl.Description; +import org.dllearner.core.owl.Individual; + +/** + * Generation time and node type is added into PADCELNode. This information is necessary for some + * reduction algorithms + * + * @author An C. Tran + * + */ +public class PADCELExtraNode extends PADCELNode { + + private double generationTime = Double.MIN_VALUE; // time in ms that the node was generated + private int type = -1; + + Set<OENode> compositeNodes = new HashSet<OENode>(); + + /** + * ============================================================================================ + * Constructor with correctness of the description + * + * @param parentNode + * Parent node of this node + * @param description + * Description of the node + * @param accuracy + * Accuracy of the node + * @param distance + * Distance between the node and the learning problem + * @param correctness + * Correctness of the node + */ + public PADCELExtraNode(OENode parentNode, Description description, double accuracy, + double correctness) { + super(parentNode, description, accuracy, correctness); + } + + public PADCELExtraNode(OENode parentNode, Description description, double accuracy) { + super(parentNode, description, accuracy); + } + + public PADCELExtraNode(PADCELNode node) { + super(node.getParent(), node.getDescription(), node.getAccuracy(), node.getCorrectness()); + setCoveredPositiveExamples(node.getCoveredPositiveExamples()); + setCoveredNegativeExamples(node.getCoveredNegativeExamples()); + } + + public PADCELExtraNode(PADCELNode node, Set<Individual> cp, double generationTime) { + super(node.getParent(), node.getDescription(), node.getAccuracy(), node.getCorrectness()); + super.coveredPositiveExamples = cp; + this.generationTime = generationTime; + } + + /** + * ============================================================================================ + * Constructor with the correctness and the generation time of the description + * + * @param parentNode + * Parent node of this node + * @param description + * Description of the node + * @param accuracy + * Accuracy of the node + * @param distance + * Distance between the node and the learning problem + * @param correctness + * Correctness of the node + * @param genTime + * Time in ms that the work used to generate this node + */ + public PADCELExtraNode(PADCELNode parentNode, Description description, double accuracy, + double correctness, double genTime) { + super(parentNode, description, accuracy, correctness); + this.coveredPositiveExamples = null; + this.generationTime = genTime; + } + + /** + * ============================================================================================ + * Constructor with the set of positive examples covered by the description of the node + * + * @param parentNode + * @param description + * @param accuracy + * @param distance + * @param correctness + * @param cn + * Covered positive examples + */ + public PADCELExtraNode(PADCELNode parentNode, Description description, double accuracy, + double correctness, Set<Individual> cp) { + super(parentNode, description, accuracy, correctness); + super.setCoveredPositiveExamples(cp); + } + + // ------------------------- + // getters and setters + // ------------------------- + public void setCoveredPositiveExamples(Set<Individual> cpn) { + super.setCoveredPositiveExamples(cpn); + } + + public double getGenerationTime() { + return generationTime; + } + + public void setGenerationTime(double d) { + this.generationTime = d; + } + + public void setType(int t) { + this.type = t; + } + + public int getType() { + return this.type; + } + + public void setCompositeList(Set<PADCELExtraNode> compositeNodes) { + this.compositeNodes.addAll(compositeNodes); + } + + public Set<OENode> getCompositeNodes() { + return this.compositeNodes; + } +} Property changes on: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELExtraNode.java ___________________________________________________________________ Added: svn:mime-type + text/plain Added: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELGenerationTimeReducer.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELGenerationTimeReducer.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELGenerationTimeReducer.java 2012-05-13 17:14:12 UTC (rev 3706) @@ -0,0 +1,62 @@ +package org.dllearner.algorithms.PADCEL; + +import java.util.HashSet; +import java.util.Iterator; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; + +import org.dllearner.core.owl.Individual; + +/** + * Compact two a partial definitions set using Generation Time Greedy strategy + * + * @author An C. Tran + * + */ + +public class PADCELGenerationTimeReducer implements PADCELReducer { + + @Override + public SortedSet<PADCELExtraNode> compact(SortedSet<PADCELExtraNode> partialDefinitions, + Set<Individual> positiveExamples) { + return compact(partialDefinitions, positiveExamples, 0); + } + + @Override + public SortedSet<PADCELExtraNode> compact(SortedSet<PADCELExtraNode> partialDefinitions, + Set<Individual> positiveExamples, int uncoveredPositiveExamples) { + Set<Individual> positiveExamplesTmp = new HashSet<Individual>(); + positiveExamplesTmp.addAll(positiveExamples); + + TreeSet<PADCELExtraNode> newSortedPartialDefinitions = new TreeSet<PADCELExtraNode>( + new PADCELDefinitionGenerationTimeComparator()); + + synchronized (partialDefinitions) { + newSortedPartialDefinitions.addAll(partialDefinitions); + } + + TreeSet<PADCELExtraNode> minimisedPartialDefinition = new TreeSet<PADCELExtraNode>( + new PADCELDefinitionGenerationTimeComparator()); + + Iterator<PADCELExtraNode> partialDefinitionIterator = newSortedPartialDefinitions.iterator(); + + while ((positiveExamplesTmp.size() > uncoveredPositiveExamples) + && (partialDefinitionIterator.hasNext())) { + PADCELExtraNode node = partialDefinitionIterator.next(); + + int positiveExamplesRemoved = positiveExamplesTmp.size(); + positiveExamplesTmp.removeAll(node.getCoveredPositiveExamples()); + + positiveExamplesRemoved -= positiveExamplesTmp.size(); + + if (positiveExamplesRemoved > 0) { + node.setCorrectness(positiveExamplesRemoved); + minimisedPartialDefinition.add(node); + } + } + + return minimisedPartialDefinition; + } + +} Property changes on: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELGenerationTimeReducer.java ___________________________________________________________________ Added: svn:mime-type + text/plain Added: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELHeuristic.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELHeuristic.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELHeuristic.java 2012-05-13 17:14:12 UTC (rev 3706) @@ -0,0 +1,16 @@ +package org.dllearner.algorithms.PADCEL; + +import java.util.Comparator; + +/** + * Interface for heuristics used in PADCEL + * + * @author An C. Tran + * + */ +public interface PADCELHeuristic extends Comparator<PADCELNode> { + + public int compare(PADCELNode node1, PADCELNode node2); + + public double getScore(PADCELNode node); +} Property changes on: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELHeuristic.java ___________________________________________________________________ Added: svn:mime-type + text/plain Added: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELImprovedCovegareGreedyReducer.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELImprovedCovegareGreedyReducer.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELImprovedCovegareGreedyReducer.java 2012-05-13 17:14:12 UTC (rev 3706) @@ -0,0 +1,115 @@ +package org.dllearner.algorithms.PADCEL; + +import java.util.HashSet; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; + +import org.apache.log4j.Logger; +import org.dllearner.core.owl.Individual; + +/** + * This class implements "wise" coverage greedy strategy for compacting the partial definitions In + * this strategy, the partial definitions will be chosen based on their coverage. When a partial + * definition has been chosen, coverage of other partial definition will be recalculated + * + * @author An C. Tran + * + */ + +public class PADCELImprovedCovegareGreedyReducer implements PADCELReducer { + + Logger logger = Logger.getLogger(this.getClass()); + + /** + * Compact partial definition with noise allowed + * + * @param partialDefinitions + * Set of partial definitions + * @param positiveExamples + * Set of positive examples (used to check whether partial definition is useful + * @param uncoveredPositiveExamples + * Number of uncovered positive examples allowed + * + * @return Subset of partial definitions that cover all positive examples + */ + @Override + public SortedSet<PADCELExtraNode> compact(SortedSet<PADCELExtraNode> partialDefinitions, + Set<Individual> positiveExamples) { + return this.compact(partialDefinitions, positiveExamples, 0); + } + + /** + * Compact partial definition with noise allowed + * + * @param partialDefinitions + * Set of partial definitions + * @param positiveExamples + * Set of positive examples (used to check whether partial definition is useful + * @param uncoveredPositiveExamples + * Number of uncovered positive examples allowed + * + * @return Subset of partial definitions that cover (positive examples \ uncovered positive + * examples) + */ + @Override + public SortedSet<PADCELExtraNode> compact(SortedSet<PADCELExtraNode> partialDefinitions, + Set<Individual> positiveExamples, int uncoveredPositiveExamples) { + Set<Individual> positiveExamplesTmp = new HashSet<Individual>(); + positiveExamplesTmp.addAll(positiveExamples); + + TreeSet<PADCELExtraNode> reducedPartialDefinition = new TreeSet<PADCELExtraNode>( + new PADCELCompletenessComparator()); + + if (partialDefinitions.size() == 0) + return reducedPartialDefinition; + + synchronized (partialDefinitions) { + Object[] partialDefs = partialDefinitions.toArray(); + + // the highest accurate partial definition + // reducedPartialDefinition.add((PDLLExtraNode)partialDefs[0]); + // positiveExamplesTmp.removeAll(((PDLLExtraNode)partialDefs[0]).getCoveredPositiveExamples()); + + for (int i = 0; (positiveExamplesTmp.size() > uncoveredPositiveExamples) + && (i < partialDefinitions.size()); i++) { + + // count the number of different positive examples covered + int counti = 0; + for (Individual indi : ((PADCELExtraNode) partialDefs[i]) + .getCoveredPositiveExamples()) { + if (positiveExamplesTmp.contains(indi)) + counti++; + } // count the number of different covered positive examples by + // i + + for (int j = i + 1; j < partialDefinitions.size(); j++) { + int countj = 0; + + for (Individual indj : ((PADCELExtraNode) partialDefs[j]) + .getCoveredPositiveExamples()) + if (positiveExamplesTmp.contains(indj)) + countj++; + + // TODO: revise this code: Swapping should be done only one + // time at the end + // swap the partial definition so that the "best" partial + // definition will be in the top + if (countj > counti) { + PADCELExtraNode tmp = (PADCELExtraNode) partialDefs[j]; + partialDefs[j] = partialDefs[i]; + partialDefs[i] = tmp; + counti = countj; + } + } + + reducedPartialDefinition.add((PADCELExtraNode) partialDefs[i]); + positiveExamplesTmp.removeAll(((PADCELExtraNode) partialDefs[i]) + .getCoveredPositiveExamples()); + } + } + + return reducedPartialDefinition; + } + +} Property changes on: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELImprovedCovegareGreedyReducer.java ___________________________________________________________________ Added: svn:mime-type + text/plain Added: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELNode.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELNode.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELNode.java 2012-05-13 17:14:12 UTC (rev 3706) @@ -0,0 +1,113 @@ +package org.dllearner.algorithms.PADCEL; + +import java.text.DecimalFormat; +import java.util.HashSet; +import java.util.Set; + +import org.dllearner.algorithms.celoe.OENode; +import org.dllearner.core.owl.Description; +import org.dllearner.core.owl.Individual; + +/** + * Represents a node in the search tree used in the PDLLeanring<br> + * A node includes description and its corresponding properties such as: correctness, accuracy, + * distance between the description the leaning problem, parent node of the description. It also + * contains a flag which indicates the node is processed or not. + * + * @author An C. Tran + * + */ +public class PADCELNode extends OENode { + + private double correctness = -1.0; + private double completeness = -1.0; + + protected Set<Individual> coveredPositiveExamples = new HashSet<Individual>(); + protected Set<Individual> coveredNegativeExamples = new HashSet<Individual>(); + + private DecimalFormat dfPercent = new DecimalFormat("0.00%"); + + public PADCELNode(OENode parentNode, Description description, double accuracy) { + super(parentNode, description, accuracy); + } + + public PADCELNode(OENode parentNode, Description description, double accuracy, double correctness) { + super(parentNode, description, accuracy); + this.correctness = correctness; + } + + public PADCELNode(OENode parentNode, Description description, double accuracy, + double correctness, double completeness) { + super(parentNode, description, accuracy); + this.correctness = correctness; + this.completeness = completeness; + } + + public PADCELNode(OENode parentNode, Description description) { + super(parentNode, description, 0); + } + + public PADCELNode(OENode parentNode, Description description, + Set<Individual> coveredPositiveExamples, Set<Individual> coveredNegativeExamples) { + super(parentNode, description, 0); + this.coveredPositiveExamples.addAll(coveredPositiveExamples); + this.coveredNegativeExamples.addAll(coveredNegativeExamples); + } + + public void setCorrectness(double cor) { + this.correctness = cor; + } + + public double getCorrectness() { + return this.correctness; + } + + public void setCompleteness(double comp) { + this.completeness = comp; + } + + public double getCompleteness() { + return this.completeness; + } + + public void setAccuracy(double acc) { + this.accuracy = acc; + } + + public Set<Individual> getCoveredPositiveExamples() { + return this.coveredPositiveExamples; + } + + public Set<Individual> getCoveredNegativeExamples() { + return this.coveredNegativeExamples; + } + + public void setCoveredPositiveExamples(Set<Individual> coveredPositiveExamples) { + if (coveredPositiveExamples != null) + this.coveredPositiveExamples.addAll(coveredPositiveExamples); + else + this.coveredPositiveExamples.clear(); + } + + public void setCoveredNegativeExamples(Set<Individual> coveredNegativeExamples) { + if (coveredNegativeExamples != null) + this.coveredNegativeExamples.addAll(coveredNegativeExamples); + else + this.coveredNegativeExamples.clear(); + } + + @Override + public String toString() { + String ret = this.getDescription().toString(null, null) + " ["; + ret += "acc:" + dfPercent.format(this.getAccuracy()) + ", "; + ret += "cor:" + dfPercent.format(this.getCorrectness()) + ", "; + ret += "comp:" + dfPercent.format(this.completeness) + "]"; + return ret; + + } + + public void setDescription(Description newDescription) { + this.description = newDescription; + } + +} Property changes on: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELNode.java ___________________________________________________________________ Added: svn:mime-type + text/plain Added: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELOntologyUtil.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELOntologyUtil.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELOntologyUtil.java 2012-05-13 17:14:12 UTC (rev 3706) @@ -0,0 +1,87 @@ +package org.dllearner.algorithms.PADCEL; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.model.OWLOntologyStorageException; + +/** + * Some utility functions for ontology manipulation + * + * @author An C. Tran + * + */ + +public class PADCELOntologyUtil { + + /** + * ========================================================================================== + * Get ontology given its file path + * + * @param ontologyFilePath + * + * @return Opened ontology + * @throws OWLOntologyCreationException + */ + public static OWLOntology loadOntology(String ontologyFilePath) + throws OWLOntologyCreationException { + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + OWLOntology ontology; + + String flash = (System.getProperty("os.name").contains("Windows")) ? "/" : ""; + + File f = new File(ontologyFilePath); + + if (!ontologyFilePath.contains("file:")) + ontologyFilePath = "file:" + flash + f.getAbsolutePath(); + + ontologyFilePath = ontologyFilePath.replace('\\', '/'); + + ontology = manager.loadOntology(IRI.create(ontologyFilePath)); + + return ontology; + } + + /** + * ============================================================================================ + * Persist the ontology + * + * @param ontology + * Ontology which need to be persisted + * + * @throws OWLOntologyStorageException + */ + public static void persistOntology(OWLOntology ontology) throws OWLOntologyStorageException { + OWLOntologyManager manager = ontology.getOWLOntologyManager(); + manager.saveOntology(ontology); + } + + /** + * ============================================================================================ + * Persist ontology to another ontology + * + * @param ontology + * Ontology contains changes + * @param newFilePath + * Path to the new ontology file + * + * @throws OWLOntologyStorageException + * @throws IOException + */ + public static void persistOntology(OWLOntology ontology, String newFilePath) + throws OWLOntologyStorageException, IOException { + OWLOntologyManager manager = ontology.getOWLOntologyManager(); + + File f = new File(newFilePath); + FileOutputStream fo = new FileOutputStream(f); + + manager.saveOntology(ontology, fo); + fo.close(); + } + +} Property changes on: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELOntologyUtil.java ___________________________________________________________________ Added: svn:mime-type + text/plain Added: trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELPosNegLP.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELPosNegLP.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/PADCEL/PADCELPosNegLP.java 2012-05-13 17:14:12 UTC (rev 3706) @@ -0,0 +1,493 @@ +package org.dllearner.algorithms.PADCEL; + +/** + * PDLL Learning problem: provides correctness, completeness, and accuracy calculation. + * Predictive accuracy calculation is used. + * + * @author An C. Tran + */ + +import java.util.Set; + +import java.util.Collection; +import java.util.HashSet; +import java.util.LinkedList; + +import org.apache.log4j.Logger; +import org.dllearner.core.AbstractLearningProblem; +import org.dllearner.core.ComponentAnn; +import org.dllearner.core.ComponentInitException; +import org.dllearner.core.AbstractReasonerComponent; +import org.dllearner.core.EvaluatedDescription; +import org.dllearner.core.options.ConfigOption; +import org.dllearner.core.options.StringSetConfigOption; +import org.dllearner.core.owl.Description; +import org.dllearner.core.owl.Individual; + +@ComponentAnn(name = "PDLLPosNegLP", shortName = "pdllPosNegLP", version = 0.1, description = "PDLL positive negative learning problem") +public class PADCELPosNegLP extends AbstractLearningProblem { + + protected Set<Individual> positiveExamples; + protected Set<Individual> negativeExamples; + + protected Set<Individual> uncoveredPositiveExamples; + + private Logger logger = Logger.getLogger(this.getClass()); + + // reasoner component is declared in AbstractLearningProblem class + + /** + * Constructor, used in case that positive and negative examples are provided when this + * component is initialized + * + * @param reasoningService + * Reasoner, provides reasoning service. Used to checking the instance type + * @param positiveExamples + * Positive examples + * @param negativeExamples + * Negative examples + */ + public PADCELPosNegLP(AbstractReasonerComponent reasoningService, + Set<Individual> positiveExamples, Set<Individual> negativeExamples) { + super(reasoningService); + this.positiveExamples = positiveExamples; + this.negativeExamples = negativeExamples; + this.uncoveredPositiveExamples = this.positiveExamples; + } + + /** + * This constructor is used when the learning configuration file is used + * + * @param reasoningService + */ + public PADCELPosNegLP(AbstractReasonerComponent reasoningService) { + super(reasoningService); + } + + /** + * This constructor can be used by SpringDefinition to create bean object Properties of new bean + * may be initialised later using setters + */ + public PADCELPosNegLP() { + super(); + + if (logger.isDebugEnabled()) + logger.debug("Learning problem created: " + this); + } + + /** + * Get list of positive examples covered by a description + * + * @param description + * Description + * + * @return Set of positive examples covered by the description + */ + protected Set<Individual> coveredPositiveExamples(Description description) { + Set<Individual> coveredPositiveExamples = new HashSet<Individual>(); + + for (Individual example : positiveExamples) + if (reasoner.hasType(description, example)) + coveredPositiveExamples.add(example); + + return coveredPositiveExamples; + } + + /** + * Get number of positive examples covered by a description + * + * @param description + * Description + * @return Number if positive examples covered by the description + */ + protected int getNumberCoveredPositiveExamples(Description description) { + int coveredPos = 0; + + for (Individual example : positiveExamples) + if (reasoner.hasType(description, example)) + coveredPos++; + + return coveredPos; + } + + /** + * Get number of negative examples covered by a description + * + * @param description + * Description to test + * + * @return Number of negative examples covered by the description + */ + protected int getNumberOfCoveredNegativeExamples(Description description) { + int coveredNeg = 0; + for (Individual example : negativeExamples) { + if (reasoner.hasType(description, example)) { + coveredNeg++; + } + } + + return coveredNeg; + } + + /** + * Calculate predictive accuracy of a description pred-accuracy(D) = + * (covered-positive-examples(D) + uncovered-negative-examples(D)) / all-examples + * + * @param description + * Description which will ve calculated the accuracy + * + * @return Predictive accuracy of a description + */ + protected double accuracy_cal(Description description) { + // double accuracy = (positiveExamples.size() - notCoveredPos + notCoveredNeg)/ + // (double)(positiveExamples.size() + negativeExamples.size()); + int cp = this.getNumberCoveredPositiveExamples(description); + int un = this.negativeExamples.size() + - this.getNumberOfCoveredNegativeExamples(description); + + return (cp + un) / (double) (positiveExamples.size() + negativeExamples.size()); + } + + /** + * Calculate the correctness of a description + * + * @param description + * Description to calculate + * + * @return Correctness of the description + */ + protected double correctness_cal(Description description) { + int un = this.negativeExamples.size() + - this.getNumberOfCoveredNegativeExamples(description); + return un / (double) this.negativeExamples.size(); + } + + /** + * Calculate the completeness of a description + * + * @param description + * Description to calculate + * + * @return Complete if the description + */ + protected double completeness_cal(Description description) { + int cp = this.getNumberCoveredPositiveExamples(description); + return cp / (double) this.positiveExamples.size(); + } + + /** + * Calculate accuracy and correctness, calculated by:<br> + * correctness(D) = not-covered-examples(D) / all-negative-examples<br> + * completeness(D) = covered-positive-examples / all-positive-examples<br> + * accuracy(D) = [covered-positive-examples(D) + not-covered-negative-examples(D)] / + * all-examples<br> + * Noise has not been supported in the current version + * + * + * @param description + * Description to be calculated accuracy and correctness + * + * @return A PDLLEvaluationResult object. If the description is weak, its accuracy will be -1 + * + * NOTE: do we need "weak" concept with the value of -1? How if we just simply assign 0 + * for it? + */ + public PADCELEvaluationResult getAccuracyAndCorrectness(Description description) { + + int notCoveredPos = 0; + int notCoveredNeg = 0; + Set<Individual> coveredPositiveExamples = new HashSet<Individual>(); + + for (Individual example : positiveExamples) { + if (!reasoner.hasType(description, example)) + notCoveredPos++; + else + coveredPositiveExamples.add(example); + } + + if (coveredPositiveExamples.size() > 0) { + + notCoveredNeg = negativeExamples.size() + - getNumberOfCoveredNegativeExamples(description); + + double correctness = (double) notCoveredNeg / (double) negativeExamples.size(); + double completeness = (double) coveredPositiveExamples.size() / positiveExamples.size(); + + if (correctness < 1.0d) + coveredPositiveExamples = null; + + double accuracy = (positiveExamples.size() - notCoveredPos + notCoveredNeg) + / (double) (positiveExamples.size() + negativeExamples.size()); + + // accuracy = (covered positive examples + not covered negative examples) / all examples + // (completeness + correctness) + return new PADCELEvaluationResult(accuracy, correctness, completeness, + coveredPositiveExamples); + + } else { + // a node will be considered as "weak" if it covers none of the positive example and + // the accuracy will be assigned -1 + return new PADCELEvaluationResult(-1, notCoveredNeg / (double) negativeExamples.size(), 0); + } + + } + + /** + * In this accuracy calculation, the accuracy value is based on the current uncovered positive + * examples but the covered positive examples returned still takes all positive examples into + * account + * + * @param description + * Description to be calculated + * @return + */ + public PADCELEvaluationResult getAccuracyAndCorrectness2(Description descript... [truncated message content] |
From: <lor...@us...> - 2012-05-11 07:50:35
|
Revision: 3705 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3705&view=rev Author: lorenz_b Date: 2012-05-11 07:50:26 +0000 (Fri, 11 May 2012) Log Message: ----------- Added option to compute CBD against local JENA model. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java 2012-05-11 07:40:34 UTC (rev 3704) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java 2012-05-11 07:50:26 UTC (rev 3705) @@ -8,6 +8,7 @@ import org.apache.log4j.Logger; import com.hp.hpl.jena.query.Query; +import com.hp.hpl.jena.query.QueryExecutionFactory; import com.hp.hpl.jena.query.QueryFactory; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; @@ -18,10 +19,11 @@ private static final Logger logger = Logger.getLogger(ConciseBoundedDescriptionGeneratorImpl.class); private static final int CHUNK_SIZE = 1000; - private static final int DEFAULT_DEPTH = 1; + private static final int DEFAULT_DEPTH = 2; private ExtractionDBCache cache; private SparqlEndpoint endpoint; + private Model baseModel; public ConciseBoundedDescriptionGeneratorImpl(SparqlEndpoint endpoint, ExtractionDBCache cache) { this.endpoint = endpoint; @@ -32,6 +34,10 @@ this(endpoint, null); } + public ConciseBoundedDescriptionGeneratorImpl(Model model) { + this.baseModel = model; + } + public Model getConciseBoundedDescription(String resourceURI){ return getConciseBoundedDescription(resourceURI, DEFAULT_DEPTH); } @@ -68,6 +74,7 @@ } catch (SQLException e) { logger.error(e); } + return all; } @@ -80,19 +87,19 @@ StringBuilder sb = new StringBuilder(); sb.append("CONSTRUCT {\n"); sb.append("<").append(resource).append("> ").append("?p0 ").append("?o0").append(".\n"); - sb.append("?p0 a ?type0.\n"); +// sb.append("?p0 a ?type0.\n"); for(int i = 1; i < depth; i++){ sb.append("?o").append(i-1).append(" ").append("?p").append(i).append(" ").append("?o").append(i).append(".\n"); - sb.append("?p").append(i).append(" ").append("a").append(" ").append("?type").append(i).append(".\n"); +// sb.append("?p").append(i).append(" ").append("a").append(" ").append("?type").append(i).append(".\n"); } sb.append("}\n"); sb.append("WHERE {\n"); sb.append("<").append(resource).append("> ").append("?p0 ").append("?o0").append(".\n"); - sb.append("?p0 a ?type0.\n"); +// sb.append("?p0 a ?type0.\n"); for(int i = 1; i < depth; i++){ sb.append("OPTIONAL{\n"); sb.append("?o").append(i-1).append(" ").append("?p").append(i).append(" ").append("?o").append(i).append(".\n"); - sb.append("?p").append(i).append(" ").append("a").append(" ").append("?type").append(i).append(".\n"); +// sb.append("?p").append(i).append(" ").append("a").append(" ").append("?type").append(i).append(".\n"); } for(int i = 1; i < depth; i++){ sb.append("}"); @@ -110,18 +117,23 @@ } Model model; - if(cache == null){ - QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), query); - for (String dgu : endpoint.getDefaultGraphURIs()) { - queryExecution.addDefaultGraph(dgu); + if(baseModel == null){ + if(cache == null){ + QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), query); + for (String dgu : endpoint.getDefaultGraphURIs()) { + queryExecution.addDefaultGraph(dgu); + } + for (String ngu : endpoint.getNamedGraphURIs()) { + queryExecution.addNamedGraph(ngu); + } + model = queryExecution.execConstruct(); + } else { + model = cache.executeConstructQuery(endpoint, query); } - for (String ngu : endpoint.getNamedGraphURIs()) { - queryExecution.addNamedGraph(ngu); - } - model = queryExecution.execConstruct(); } else { - model = cache.executeConstructQuery(endpoint, query); + model = QueryExecutionFactory.create(query, baseModel).execConstruct(); } + if(logger.isDebugEnabled()){ logger.debug("Got " + model.size() + " new triples in."); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2012-05-11 07:40:45
|
Revision: 3704 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3704&view=rev Author: jenslehmann Date: 2012-05-11 07:40:34 +0000 (Fri, 11 May 2012) Log Message: ----------- moved class Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SchemaIndexer.java trunk/interfaces/src/main/resources/log4j.properties Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/ClassIndexer.java Removed Paths: ------------- trunk/components-core/src/main/java/org/nlp2rdf/ Copied: trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/ClassIndexer.java (from rev 3703, trunk/components-core/src/main/java/org/nlp2rdf/ontology/ClassIndexer.java) =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/ClassIndexer.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/ClassIndexer.java 2012-05-11 07:40:34 UTC (rev 3704) @@ -0,0 +1,258 @@ +/***************************************************************************/ +/* Copyright (C) 2010-2011, Sebastian Hellmann */ +/* Note: If you need parts of NLP2RDF in another licence due to licence */ +/* incompatibility, please mail hel...@in... */ +/* */ +/* This file is part of NLP2RDF. */ +/* */ +/* NLP2RDF is free software; you can redistribute it and/or modify */ +/* it under the terms of the GNU General Public License as published by */ +/* the Free Software Foundation; either version 3 of the License, or */ +/* (at your option) any later version. */ +/* */ +/* NLP2RDF is distributed in the hope that it will be useful, */ +/* but WITHOUT ANY WARRANTY; without even the implied warranty of */ +/* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the */ +/* GNU General Public License for more details. */ +/* */ +/* You should have received a copy of the GNU General Public License */ +/* along with this program. If not, see <http://www.gnu.org/licenses/>. */ +/***************************************************************************/ + +package org.dllearner.kb.sparql.simple; + +import com.hp.hpl.jena.ontology.OntClass; +import com.hp.hpl.jena.ontology.OntModel; +import com.hp.hpl.jena.ontology.OntModelSpec; +import com.hp.hpl.jena.rdf.model.ModelFactory; +import com.hp.hpl.jena.util.iterator.ExtendedIterator; +import com.jamonapi.Monitor; +import com.jamonapi.MonitorFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.naming.ldap.ExtendedRequest; +import java.util.*; + + +/** + * Indexes an Ontology + * skips complex classes per default, this does not affect the hierarchy outcome + */ +public class ClassIndexer { + private static Logger log = LoggerFactory.getLogger(ClassIndexer.class); + + //Options + private boolean copyLabels = true; + private boolean copyComments = true; + private String language = null; + + //Not implemented + private Map<String, String> transform = new HashMap<String, String>(); + //Not implemented + private Set<String> remove = new HashSet<String>(); + + //internal variables + private Map<String, OntModel> classUriToClassHierarchy = new HashMap<String, OntModel>(); + + public ClassIndexer() { + } + + public void index(OntModel from) { + + // Set<OntClass> classes = from.listClasses(); + int i = 0; + OntClass cl; + for (ExtendedIterator<OntClass> it = from.listClasses(); it.hasNext(); ) { + Monitor m0 = MonitorFactory.start("Indexer listClasses"); + cl = it.next(); + m0.stop(); + //for (OntClass cl : classes) { + Monitor m1 = MonitorFactory.start("Indexer generating tree"); + Tree t = new Tree(cl); + m1.stop(); + Monitor m2 = MonitorFactory.start("Indexer generating model"); + OntModel m = t.toModel(); + m2.stop(); + Monitor m3 = MonitorFactory.start("Indexer generating hashmap"); + classUriToClassHierarchy.put(cl.getURI(), m); + m3.stop(); + } + + } + + /** + * @param classUri + * @return a filled OntModel with all superclasses of classUri or null, if no class is found + */ + public OntModel getHierarchyForClassURI(String classUri) { + return classUriToClassHierarchy.get(classUri); + } + + /** + * transforms namespaces + * + * @param in + * @return + */ + private String transformNamespace(String in) { + String ret = in; + for (String s : transform.keySet()) { + if (in.startsWith(s)) { + return in.replace(s, transform.get(s)); + + } + } + return ret; + } + + /** + * filters out certain namespaces + * + * @param s + * @return + */ + private boolean filterNamespace(String s) { + for (String prefix : remove) { + if (s.startsWith(prefix)) { + return true; + } + } + return false; + } + + + public boolean isCopyLabels() { + return copyLabels; + } + + public void setCopyLabels(boolean copyLabels) { + this.copyLabels = copyLabels; + } + + public boolean isCopyComments() { + return copyComments; + } + + public void setCopyComments(boolean copyComments) { + this.copyComments = copyComments; + } + + public String getLanguage() { + return language; + } + + public void setLanguage(String language) { + this.language = language; + } + + /** + * A simple Helper Class to convert the hierarchy + */ + private class Tree { + final String uri; + List<Tree> parents; + final String label; + final String comment; + + public Tree(OntClass me) { + this.uri = me.getURI(); + label = me.getLabel(language); + comment = me.getComment(language); + parents = new ArrayList<Tree>(); + + Set<OntClass> superClasses = me.listSuperClasses(true).toSet(); + for (OntClass s : superClasses) { + //this is were complex classes are skipped + if (s.isAnon()) { + continue; + } + log.trace(s.toString()); + parents.add(new Tree(s)); + } + } + + public OntModel toModel() { + OntModel model = ModelFactory.createOntologyModel(OntModelSpec.OWL_DL_MEM, ModelFactory.createDefaultModel()); + OntClass me = model.createClass(uri); + //TODO test this for <&> + if (copyLabels && label != null) { + me.addLabel(label, language); + } + if (copyComments && comment != null) { + me.addComment(comment, language); + } + for (Tree p : parents) { + OntClass superClass = model.createClass(p.uri); + me.addSuperClass(superClass); + model.add(p.toModel()); + } + return model; + } + } + +} + +/** + public void expandSuperAndCopy(String originalClassUri) { + + String newClassUri = transform(originalClassUri); + if (isRemove(originalClassUri) || isRemove(newClassUri)) { + return; + } + + + // create initial classes + OntClass toClass = toModel.createClass(newClassUri); + OntClass fromClass = fromModel.getOntClass(originalClassUri); + + if(toClass==null || fromClass == null){ + logger.error("null occured in fromClass "+originalClassUri+" but retrieving yielded: "+fromClass ); + return; + } + + //System.out.println("begin"); + //for(OntClass cltest: fromModel.listClasses().toSet()){ + // System.out.println(cltest.getURI()); + // System.out.println(cltest.getClass().getSimpleName()); + //} + //System.out.println("end"); + + if (copyLabelsAndComments ) { + String tmp = null; + + if((tmp=fromClass.getLabel(null))!=null) {toClass.setLabel(tmp, null);} + // System.out.println(fromClass.getURI()+"has label "+tmp); + + if((tmp=fromClass.getComment(null))!=null) {toClass.setComment(tmp, null);} + // System.out.println(fromClass.getURI()+"has comment "+tmp); + } + + // get the superclasses + Set<OntClass> fromSuperclasses = fromClass.listSuperClasses(true).toSet(); + + for (OntClass fromSuperclass : fromSuperclasses) { + String newFromSuperclassUri = transform(fromSuperclass.getURI()); + if (isRemove(fromSuperclass.getURI()) || isRemove(newFromSuperclassUri)) { + continue; + } + if(fromSuperclass.isAnon()){ + continue; + } + + OntClass toSuperclass = toModel.createClass(newFromSuperclassUri); + toClass.addSuperClass(toSuperclass); + + if (copyLabelsAndComments) { + String tmp = null; + if((tmp=fromSuperclass.getLabel(null))!=null) {toSuperclass.setLabel(tmp, null);} + // System.out.println(fromSuperclass.getURI()+"has label "+tmp); + + if((tmp=fromSuperclass.getComment(null))!=null) {toSuperclass.setComment(tmp, null);} + // System.out.println(fromSuperclass.getURI()+"has comment "+tmp); + } + // System.out.println(fromSuperclass); + expandSuperAndCopy(fromSuperclass.getURI()); + } + + } **/ \ No newline at end of file Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SchemaIndexer.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SchemaIndexer.java 2012-05-11 07:03:50 UTC (rev 3703) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SchemaIndexer.java 2012-05-11 07:40:34 UTC (rev 3704) @@ -10,7 +10,6 @@ import com.jamonapi.Monitor; import com.jamonapi.MonitorFactory; -import org.nlp2rdf.ontology.ClassIndexer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; Modified: trunk/interfaces/src/main/resources/log4j.properties =================================================================== --- trunk/interfaces/src/main/resources/log4j.properties 2012-05-11 07:03:50 UTC (rev 3703) +++ trunk/interfaces/src/main/resources/log4j.properties 2012-05-11 07:40:34 UTC (rev 3704) @@ -13,9 +13,9 @@ log4j.appender.file.Threshold=DEBUG # DL-Learner Logs -log4j.logger.org.dllearner=INFO +log4j.logger.org.dllearner=DEBUG # Turn this to Debug if you wish to dump stack traces to the appenders (console, file) -log4j.logger.org.dllearner.cli=INFO +log4j.logger.org.dllearner.cli=DEBUG log4j.logger.org.dllearner.server.nke.LogicalRelationStrategy=DEBUG log4j.category.org.dllearner.kb.simple=DEBUG This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-05-11 07:03:57
|
Revision: 3703 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3703&view=rev Author: lorenz_b Date: 2012-05-11 07:03:50 +0000 (Fri, 11 May 2012) Log Message: ----------- Small bugfix. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyRangeAxiomLearner.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java 2012-05-11 05:49:23 UTC (rev 3702) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java 2012-05-11 07:03:50 UTC (rev 3703) @@ -190,14 +190,14 @@ ObjectPropertyDomainAxiomLearner l = new ObjectPropertyDomainAxiomLearner(ks); l.setReasoner(reasoner); - l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/currency")); - l.setMaxExecutionTimeInSeconds(10); + l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/creator")); + l.setMaxExecutionTimeInSeconds(40); l.addFilterNamespace("http://dbpedia.org/ontology/"); // l.setReturnOnlyNewAxioms(true); l.init(); l.start(); - System.out.println(l.getCurrentlyBestEvaluatedAxioms(10, 0.75)); + System.out.println(l.getCurrentlyBestEvaluatedAxioms(10, 0.3)); } } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyRangeAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyRangeAxiomLearner.java 2012-05-11 05:49:23 UTC (rev 3702) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyRangeAxiomLearner.java 2012-05-11 07:03:50 UTC (rev 3703) @@ -44,6 +44,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.hp.hpl.jena.query.ParameterizedSparqlString; import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; @@ -58,6 +59,7 @@ public ObjectPropertyRangeAxiomLearner(SparqlEndpointKS ks){ this.ks = ks; + super.iterativeQueryTemplate = new ParameterizedSparqlString("SELECT DISTINCT ?ind ?type WHERE {?s ?p ?ind. ?ind a ?type.}"); } public ObjectProperty getPropertyToDescribe() { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tc...@us...> - 2012-05-11 05:49:31
|
Revision: 3702 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3702&view=rev Author: tcanvn Date: 2012-05-11 05:49:23 +0000 (Fri, 11 May 2012) Log Message: ----------- Changes: 1. add Apache Common Pool library into the build path (pom) 2. change visibility of some of the properties on OENode from private to protected 3. add RhoDRDown used in Dl-Learner version 2008 back to refinement operators package (for ParCEL) 4. modify KBFile.class to fix the bug that causes file-separate redundancy when loading a KB file by a relative path (in Windows) Modified Paths: -------------- trunk/components-core/pom.xml trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/OENode.java trunk/components-core/src/main/java/org/dllearner/kb/KBFile.java Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown2008.java Modified: trunk/components-core/pom.xml =================================================================== --- trunk/components-core/pom.xml 2012-05-10 13:31:25 UTC (rev 3701) +++ trunk/components-core/pom.xml 2012-05-11 05:49:23 UTC (rev 3702) @@ -211,7 +211,6 @@ <groupId>org.slf4j</groupId> <artifactId>slf4j-api</artifactId> </dependency> - <dependency> <groupId>commons-codec</groupId> <artifactId>commons-codec</artifactId> @@ -229,7 +228,18 @@ <groupId>commons-lang</groupId> <artifactId>commons-lang</artifactId> </dependency> - - - </dependencies> + <dependency> + <groupId>commons-pool</groupId> + <artifactId>commons-pool</artifactId> + </dependency> + </dependencies> + <dependencyManagement> + <dependencies> + <dependency> + <groupId>commons-pool</groupId> + <artifactId>commons-pool</artifactId> + <version>1.6</version> + </dependency> + </dependencies> + </dependencyManagement> </project> Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/OENode.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/OENode.java 2012-05-10 13:31:25 UTC (rev 3701) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/OENode.java 2012-05-11 05:49:23 UTC (rev 3702) @@ -43,14 +43,14 @@ */ public class OENode implements SearchTreeNode { - private Description description; + protected Description description; - private double accuracy; + protected double accuracy; - private int horizontalExpansion; + protected int horizontalExpansion; - private OENode parent; - private List<OENode> children = new LinkedList<OENode>(); + protected OENode parent; + protected List<OENode> children = new LinkedList<OENode>(); // the refinement count corresponds to the number of refinements of the // description in this node - it is a better heuristic indicator than child count Modified: trunk/components-core/src/main/java/org/dllearner/kb/KBFile.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/KBFile.java 2012-05-10 13:31:25 UTC (rev 3701) +++ trunk/components-core/src/main/java/org/dllearner/kb/KBFile.java 2012-05-11 05:49:23 UTC (rev 3702) @@ -37,7 +37,6 @@ import org.dllearner.parser.ParseException; import org.dllearner.reasoning.DIGConverter; import org.dllearner.utilities.owl.OWLAPIAxiomConvertVisitor; -import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; @@ -106,7 +105,16 @@ /** Leave it as is */ kb = KBParser.parseKBFile(getUrl()); } else { - File f = new File(new URI(baseDir + File.separator + getUrl())); + + //this check is for eliminating the redundancy + //if the baseDir has separator at the end, do not add one more between baseDir and KB filename (or url) + String fullFilepath; + if (baseDir.endsWith("\\") || baseDir.endsWith("/")) + fullFilepath = baseDir + getUrl(); + else + fullFilepath = baseDir + File.separator + getUrl(); + + File f = new File(new URI(fullFilepath)); setUrl(f.toURI().toString()); kb = KBParser.parseKBFile(f); } Added: trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown2008.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown2008.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown2008.java 2012-05-11 05:49:23 UTC (rev 3702) @@ -0,0 +1,1520 @@ +/** + * Copyright (C) 2007-2008, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package org.dllearner.refinementoperators; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeMap; +import java.util.TreeSet; +import java.util.Map.Entry; + +import org.apache.log4j.Logger; +import org.dllearner.core.AbstractReasonerComponent; +import org.dllearner.core.ComponentAnn; +import org.dllearner.core.options.CommonConfigOptions; +import org.dllearner.core.owl.BooleanValueRestriction; +import org.dllearner.core.owl.ClassHierarchy; +import org.dllearner.core.owl.Constant; +import org.dllearner.core.owl.DataRange; +import org.dllearner.core.owl.DatatypeProperty; +import org.dllearner.core.owl.DatatypeSomeRestriction; +import org.dllearner.core.owl.Description; +import org.dllearner.core.owl.DoubleMaxValue; +import org.dllearner.core.owl.DoubleMinValue; +import org.dllearner.core.owl.Individual; +import org.dllearner.core.owl.Intersection; +import org.dllearner.core.owl.NamedClass; +import org.dllearner.core.owl.Negation; +import org.dllearner.core.owl.Nothing; +import org.dllearner.core.owl.ObjectAllRestriction; +import org.dllearner.core.owl.ObjectCardinalityRestriction; +import org.dllearner.core.owl.ObjectMaxCardinalityRestriction; +import org.dllearner.core.owl.ObjectMinCardinalityRestriction; +import org.dllearner.core.owl.ObjectProperty; +import org.dllearner.core.owl.ObjectPropertyExpression; +import org.dllearner.core.owl.ObjectQuantorRestriction; +import org.dllearner.core.owl.ObjectSomeRestriction; +import org.dllearner.core.owl.ObjectValueRestriction; +import org.dllearner.core.owl.StringValueRestriction; +import org.dllearner.core.owl.Thing; +import org.dllearner.core.owl.Union; +import org.dllearner.utilities.Helper; +import org.dllearner.utilities.owl.ConceptComparator; +import org.dllearner.utilities.owl.ConceptTransformation; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * A downward refinement operator, which makes use of domains + * and ranges of properties. The operator is currently under + * development. Its aim is to span a much "cleaner" and smaller search + * tree compared to RhoDown by omitting many class descriptions, + * which are obviously too weak, because they violate + * domain/range restrictions. Furthermore, it makes use of disjoint + * classes in the knowledge base. + * + * TODO Some of the code has moved to {@link Utility} in a modified + * form to make it accessible for implementations of other refinement + * operators. These utility methods may be completed and carefully + * integrated back later. + * + * @author Jens Lehmann + * + */ + +@ComponentAnn(name = "rho refinement operator 2008", shortName = "rho2008", version = 0.08) +public class RhoDRDown2008 extends RefinementOperatorAdapter { + + private static Logger logger = Logger.getLogger(RhoDRDown2008.class); + + private AbstractReasonerComponent rs; + + // hierarchies + private ClassHierarchy subHierarchy; + + // domains and ranges + private Map<ObjectProperty,Description> opDomains = new TreeMap<ObjectProperty,Description>(); + private Map<DatatypeProperty,Description> dpDomains = new TreeMap<DatatypeProperty,Description>(); + private Map<ObjectProperty,Description> opRanges = new TreeMap<ObjectProperty,Description>(); + + // maximum number of fillers for eeach role + private Map<ObjectProperty,Integer> maxNrOfFillers = new TreeMap<ObjectProperty,Integer>(); + // limit for cardinality restrictions (this makes sense if we e.g. have compounds with up to + // more than 200 atoms but we are only interested in atoms with certain characteristics and do + // not want something like e.g. >= 204 hasAtom.NOT Carbon-87; which blows up the search space + private int cardinalityLimit = 5; + + // start concept (can be used to start from an arbitrary concept, needs + // to be Thing or NamedClass), note that when you use e.g. Compound as + // start class, then the algorithm should start the search with class + // Compound (and not with Thing), because otherwise concepts like + // NOT Carbon-87 will be returned which itself is not a subclass of Compound + private Description startClass = new Thing(); + + // the length of concepts of top refinements, the first values is + // for refinements of \rho_\top(\top), the second one for \rho_A(\top) + private int topRefinementsLength = 0; + private Map<NamedClass, Integer> topARefinementsLength = new TreeMap<NamedClass, Integer>(); + // M is finite and this value is the maximum length of any value in M + private static int mMaxLength = 4; + + // the sets M_\top and M_A + private Map<Integer,SortedSet<Description>> m = new TreeMap<Integer,SortedSet<Description>>(); + private Map<NamedClass,Map<Integer,SortedSet<Description>>> mA = new TreeMap<NamedClass,Map<Integer,SortedSet<Description>>>(); + + // @see MathOperations.getCombos + private Map<Integer, List<List<Integer>>> combos = new HashMap<Integer, List<List<Integer>>>(); + + // refinements of the top concept ordered by length + private Map<Integer, SortedSet<Description>> topRefinements = new TreeMap<Integer, SortedSet<Description>>(); + private Map<NamedClass,Map<Integer, SortedSet<Description>>> topARefinements = new TreeMap<NamedClass,Map<Integer, SortedSet<Description>>>(); + + // cumulated refinements of top (all from length one to the specified length) + private Map<Integer, TreeSet<Description>> topRefinementsCumulative = new HashMap<Integer, TreeSet<Description>>(); + private Map<NamedClass,Map<Integer, TreeSet<Description>>> topARefinementsCumulative = new TreeMap<NamedClass,Map<Integer, TreeSet<Description>>>(); + + // app_A set of applicable properties for a given class (separate for + // object properties, boolean datatypes, and double datatypes) + private Map<NamedClass, Set<ObjectProperty>> appOP = new TreeMap<NamedClass, Set<ObjectProperty>>(); + private Map<NamedClass, Set<DatatypeProperty>> appBD = new TreeMap<NamedClass, Set<DatatypeProperty>>(); + private Map<NamedClass, Set<DatatypeProperty>> appDD = new TreeMap<NamedClass, Set<DatatypeProperty>>(); + + // most general applicable properties + private Map<NamedClass,Set<ObjectProperty>> mgr = new TreeMap<NamedClass,Set<ObjectProperty>>(); + private Map<NamedClass,Set<DatatypeProperty>> mgbd = new TreeMap<NamedClass,Set<DatatypeProperty>>(); + private Map<NamedClass,Set<DatatypeProperty>> mgdd = new TreeMap<NamedClass,Set<DatatypeProperty>>(); + private Map<NamedClass,Set<DatatypeProperty>> mgsd = new TreeMap<NamedClass,Set<DatatypeProperty>>(); + + // concept comparator + private ConceptComparator conceptComparator = new ConceptComparator(); + + // splits for double datatype properties in ascening order + private Map<DatatypeProperty,List<Double>> splits = null; //new TreeMap<DatatypeProperty,List<Double>>(); + + /** + * how many splits will be made for double data properties + */ + private int maxNrOfSplits = 10; + + // data structure for a simple frequent pattern matching preprocessing phase + private int frequencyThreshold = CommonConfigOptions.valueFrequencyThresholdDefault; + private Map<ObjectProperty, Map<Individual, Integer>> valueFrequency = new HashMap<ObjectProperty, Map<Individual, Integer>>(); + // data structure with identified frequent values + private Map<ObjectProperty, Set<Individual>> frequentValues = new HashMap<ObjectProperty, Set<Individual>>(); + // frequent data values + private Map<DatatypeProperty, Set<Constant>> frequentDataValues = new HashMap<DatatypeProperty, Set<Constant>>(); + private Map<DatatypeProperty, Map<Constant, Integer>> dataValueFrequency = new HashMap<DatatypeProperty, Map<Constant, Integer>>(); + private boolean useDataHasValueConstructor = false; + + // staistics + public long mComputationTimeNs = 0; + public long topComputationTimeNs = 0; + + private boolean applyAllFilter = true; + private boolean applyExistsFilter = true; + private boolean useAllConstructor = true; + private boolean useExistsConstructor = true; + private boolean useHasValueConstructor = false; + private boolean useCardinalityRestrictions = true; + private boolean useNegation = true; + private boolean useBooleanDatatypes = true; + private boolean useDoubleDatatypes = true; + @SuppressWarnings("unused") + private boolean useStringDatatypes = false; + private boolean disjointChecks = true; + private boolean instanceBasedDisjoints = true; + + private boolean dropDisjuncts = false; + + private boolean isDisjunctionAllowed = true; + + // caches for reasoner queries + private Map<Description,Map<Description,Boolean>> cachedDisjoints = new TreeMap<Description,Map<Description,Boolean>>(conceptComparator); + + + public RhoDRDown2008(AbstractReasonerComponent reasoningService) { + this.rs = reasoningService; + this.subHierarchy = rs.getClassHierarchy(); + } + + public RhoDRDown2008(AbstractReasonerComponent reasoner, ClassHierarchy subHierarchy, Description startClass) { + this.rs = reasoner; + this.subHierarchy = subHierarchy; + this.startClass = startClass; + } + + public void init() { + // query reasoner for domains and ranges + // (because they are used often in the operator) + + if (logger.isDebugEnabled()) + logger.debug("[Refinement operator] " + this.getClass().getSimpleName() + " created (splits: " + (splits!=null?"using Splitter":"fix split="+maxNrOfSplits) + ")"); //actran + + + for(ObjectProperty op : rs.getObjectProperties()) { + opDomains.put(op, rs.getDomain(op)); + opRanges.put(op, rs.getRange(op)); + + if(useHasValueConstructor) { + // init + Map<Individual, Integer> opMap = new TreeMap<Individual, Integer>(); + valueFrequency.put(op, opMap); + + // sets ordered by corresponding individual (which we ignore) + Collection<SortedSet<Individual>> fillerSets = rs.getPropertyMembers(op).values(); + for(SortedSet<Individual> fillerSet : fillerSets) { + for(Individual i : fillerSet) { + // System.out.println("op " + op + " i " + i); + Integer value = opMap.get(i); + + if(value != null) { + opMap.put(i, value+1); + } else { + opMap.put(i, 1); + } + } + } + + // keep only frequent patterns + Set<Individual> frequentInds = new TreeSet<Individual>(); + for(Individual i : opMap.keySet()) { + if(opMap.get(i) >= frequencyThreshold) { + frequentInds.add(i); + // break; + } + } + frequentValues.put(op, frequentInds); + + } + + } + + //logger.trace("[rho] Init ObjectProperty: " + opDomains); + //logger.trace("[rho] Init ObjectProperty: " + opRanges); + + for(DatatypeProperty dp : rs.getDatatypeProperties()) { + dpDomains.put(dp, rs.getDomain(dp)); + + if(useDataHasValueConstructor) { + Map<Constant, Integer> dpMap = new TreeMap<Constant, Integer>(); + dataValueFrequency.put(dp, dpMap); + + // sets ordered by corresponding individual (which we ignore) + Collection<SortedSet<Constant>> fillerSets = rs.getDatatypeMembers(dp).values(); + for(SortedSet<Constant> fillerSet : fillerSets) { + for(Constant i : fillerSet) { + // System.out.println("op " + op + " i " + i); + Integer value = dpMap.get(i); + + if(value != null) { + dpMap.put(i, value+1); + } else { + dpMap.put(i, 1); + } + } + } + + // keep only frequent patterns + Set<Constant> frequentInds = new TreeSet<Constant>(); + for(Constant i : dpMap.keySet()) { + if(dpMap.get(i) >= frequencyThreshold) { + logger.trace("adding value "+i+", because "+dpMap.get(i) +">="+frequencyThreshold); + frequentInds.add(i); + } + } + frequentDataValues.put(dp, frequentInds); + } + } + + // we do not need the temporary set anymore and let the + // garbage collector take care of it + valueFrequency = null; + dataValueFrequency = null; + + + // compute splits for double datatype properties + if (this.splits == null) { + splits = new TreeMap<DatatypeProperty,List<Double>>(); + for(DatatypeProperty dp : rs.getDoubleDatatypeProperties()) + computeSplits(dp); + + if (logger.isDebugEnabled()) { + logger.debug("[RhoDRDown2008] Uses original split strategy: " + splits); + } + } + + + + // determine the maximum number of fillers for each role + // (up to a specified cardinality maximum) + if(useCardinalityRestrictions) { + for(ObjectProperty op : rs.getObjectProperties()) { + int maxFillers = 0; + Map<Individual,SortedSet<Individual>> opMembers = rs.getPropertyMembers(op); + for(SortedSet<Individual> inds : opMembers.values()) { + if(inds.size()>maxFillers) + maxFillers = inds.size(); + if(maxFillers >= cardinalityLimit) { + maxFillers = cardinalityLimit; + break; + } + } + maxNrOfFillers.put(op, maxFillers); + } + } + + } + + /* (non-Javadoc) + * @see org.dllearner.algorithms.refinement.RefinementOperator#refine(org.dllearner.core.owl.Description) + */ + @Override + public Set<Description> refine(Description concept) { + throw new RuntimeException(); + } + + @Override + public Set<Description> refine(Description description, int maxLength) { + // check that maxLength is valid + if(maxLength < description.getLength()) { + throw new Error("length has to be at least description length (description: " + description + ", max length: " + maxLength + ")"); + } + return refine(description, maxLength, null, startClass); + } + + /* (non-Javadoc) + * @see org.dllearner.algorithms.refinement.RefinementOperator#refine(org.dllearner.core.owl.Description, int, java.util.List) + */ + @Override + public Set<Description> refine(Description description, int maxLength, + List<Description> knownRefinements) { + return refine(description, maxLength, knownRefinements, startClass); + } + + @SuppressWarnings({"unchecked"}) + public Set<Description> refine(Description description, int maxLength, + List<Description> knownRefinements, Description currDomain) { + + // System.out.println("|- " + description + " " + currDomain + " " + maxLength); + + //logger.trace(" [rho-refine] Refine called for: " + description); + //logger.trace(" \t\tmaxLength: " + maxLength + "; knownRefinrements: " + knownRefinements +"; currDomain: " + currDomain); + + + // actions needing to be performed if this is the first time the + // current domain is used + if(!(currDomain instanceof Thing) && !topARefinementsLength.containsKey(currDomain)) + topARefinementsLength.put((NamedClass)currDomain, 0); + + // check whether using list or set makes more sense + // here; and whether HashSet or TreeSet should be used + // => TreeSet because duplicates are possible + Set<Description> refinements = new TreeSet<Description>(conceptComparator); + + // used as temporary variable + Set<Description> tmp = new HashSet<Description>(); + + if(description instanceof Thing) { + // extends top refinements if necessary + if(currDomain instanceof Thing) { + if(maxLength>topRefinementsLength) + computeTopRefinements(maxLength); + refinements = (TreeSet<Description>) topRefinementsCumulative.get(maxLength).clone(); + } else { + if(maxLength>topARefinementsLength.get(currDomain)) { + computeTopRefinements(maxLength, (NamedClass) currDomain); + } + refinements = (TreeSet<Description>) topARefinementsCumulative.get(currDomain).get(maxLength).clone(); + } + // refinements.addAll(subHierarchy.getMoreSpecialConcepts(description)); + } else if(description instanceof Nothing) { + // cannot be further refined + } else if(description instanceof NamedClass) { + refinements.addAll(subHierarchy.getSubClasses(description)); + refinements.remove(new Nothing()); + } else if (description instanceof Negation && description.getChild(0) instanceof NamedClass) { + + tmp = subHierarchy.getSuperClasses(description.getChild(0)); + + for(Description c : tmp) { + if(!(c instanceof Thing)) + refinements.add(new Negation(c)); + } + } else if (description instanceof Intersection) { + + // refine one of the elements + for(Description child : description.getChildren()) { + + // refine the child; the new max length is the current max length minus + // the currently considered concept plus the length of the child + // TODO: add better explanation + tmp = refine(child, maxLength - description.getLength()+child.getLength(),null,currDomain); + + // create new intersection + for(Description c : tmp) { + List<Description> newChildren = (List<Description>)((LinkedList<Description>)description.getChildren()).clone(); + newChildren.add(c); + newChildren.remove(child); + Intersection mc = new Intersection(newChildren); + + // clean concept and transform it to ordered negation normal form + // (non-recursive variant because only depth 1 was modified) + ConceptTransformation.cleanConceptNonRecursive(mc); + ConceptTransformation.transformToOrderedNegationNormalFormNonRecursive(mc, conceptComparator); + + // check whether the intersection is OK (sanity checks), then add it + if(checkIntersection(mc)) + refinements.add(mc); + } + + } + + } else if (description instanceof Union) { + // refine one of the elements + for(Description child : description.getChildren()) { + + // System.out.println("union child: " + child + " " + maxLength + " " + description.getLength() + " " + child.getLength()); + + // refine child + tmp = refine(child, maxLength - description.getLength()+child.getLength(),null,currDomain); + + // construct intersection (see above) + for(Description c : tmp) { + List<Description> newChildren = new LinkedList<Description>(description.getChildren()); + newChildren.remove(child); + newChildren.add(c); + Union md = new Union(newChildren); + + // transform to ordered negation normal form + ConceptTransformation.transformToOrderedNegationNormalFormNonRecursive(md, conceptComparator); + // note that we do not have to call clean here because a disjunction will + // never be nested in another disjunction in this operator + + refinements.add(md); + } + + } + + // if enabled, we can remove elements of the disjunction + if(dropDisjuncts) { + // A1 OR A2 => {A1,A2} + if(description.getChildren().size() == 2) { + refinements.add(description.getChild(0)); + refinements.add(description.getChild(1)); + } else { + // copy children list and remove a different element in each turn + for(int i=0; i<description.getChildren().size(); i++) { + List<Description> newChildren = new LinkedList<Description>(description.getChildren()); + newChildren.remove(i); + Union md = new Union(newChildren); + refinements.add(md); + } + } + } + + } else if (description instanceof ObjectSomeRestriction) { + ObjectPropertyExpression role = ((ObjectQuantorRestriction)description).getRole(); + Description range = opRanges.get(role); + + // rule 1: EXISTS r.D => EXISTS r.E + tmp = refine(description.getChild(0), maxLength-2, null, range); + + for(Description c : tmp) + refinements.add(new ObjectSomeRestriction(((ObjectQuantorRestriction)description).getRole(),c)); + + // rule 2: EXISTS r.D => EXISTS s.D or EXISTS r^-1.D => EXISTS s^-1.D + // currently inverse roles are not supported + ObjectProperty ar = (ObjectProperty) role; + Set<ObjectProperty> moreSpecialRoles = rs.getSubProperties(ar); + for(ObjectProperty moreSpecialRole : moreSpecialRoles) + refinements.add(new ObjectSomeRestriction(moreSpecialRole, description.getChild(0))); + + // rule 3: EXISTS r.D => >= 2 r.D + // (length increases by 1 so we have to check whether max length is sufficient) + if(useCardinalityRestrictions) { + if(maxLength > description.getLength() && maxNrOfFillers.get(ar)>1) { + ObjectMinCardinalityRestriction min = new ObjectMinCardinalityRestriction(2,role,description.getChild(0)); + refinements.add(min); + } + } + + // rule 4: EXISTS r.TOP => EXISTS r.{value} + if(useHasValueConstructor && description.getChild(0) instanceof Thing) { + // watch out for frequent patterns + Set<Individual> frequentInds = frequentValues.get(role); + if(frequentInds != null) { + for(Individual ind : frequentInds) { + ObjectValueRestriction ovr = new ObjectValueRestriction((ObjectProperty)role, ind); + refinements.add(ovr); + } + } + } + + } else if (description instanceof ObjectAllRestriction) { + ObjectPropertyExpression role = ((ObjectQuantorRestriction)description).getRole(); + Description range = opRanges.get(role); + + // rule 1: ALL r.D => ALL r.E + tmp = refine(description.getChild(0), maxLength-2, null, range); + + for(Description c : tmp) { + refinements.add(new ObjectAllRestriction(((ObjectQuantorRestriction)description).getRole(),c)); + } + + // rule 2: ALL r.D => ALL r.BOTTOM if D is a most specific atomic concept + if(description.getChild(0) instanceof NamedClass && tmp.size()==0) { + refinements.add(new ObjectAllRestriction(((ObjectQuantorRestriction)description).getRole(),new Nothing())); + } + + // rule 3: ALL r.D => ALL s.D or ALL r^-1.D => ALL s^-1.D + // currently inverse roles are not supported + ObjectProperty ar = (ObjectProperty) role; + Set<ObjectProperty> moreSpecialRoles = rs.getSubProperties(ar); + for(ObjectProperty moreSpecialRole : moreSpecialRoles) { + refinements.add(new ObjectAllRestriction(moreSpecialRole, description.getChild(0))); + } + + // rule 4: ALL r.D => <= (maxFillers-1) r.D + // (length increases by 1 so we have to check whether max length is sufficient) + // => commented out because this is acutally not a downward refinement + // if(useCardinalityRestrictions) { + // if(maxLength > description.getLength() && maxNrOfFillers.get(ar)>1) { + // ObjectMaxCardinalityRestriction max = new ObjectMaxCardinalityRestriction(maxNrOfFillers.get(ar)-1,role,description.getChild(0)); + // refinements.add(max); + // } + // } + } else if (description instanceof ObjectCardinalityRestriction) { + ObjectPropertyExpression role = ((ObjectCardinalityRestriction)description).getRole(); + Description range = opRanges.get(role); + int number = ((ObjectCardinalityRestriction)description).getCardinality(); + if(description instanceof ObjectMaxCardinalityRestriction) { + // rule 1: <= x r.C => <= x r.D + tmp = refine(description.getChild(0), maxLength-3, null, range); + + for(Description d : tmp) { + refinements.add(new ObjectMaxCardinalityRestriction(number,role,d)); + } + + // rule 2: <= x r.C => <= (x-1) r.C + ObjectMaxCardinalityRestriction max = (ObjectMaxCardinalityRestriction) description; + // int number = max.getNumber(); + if(number > 1) + refinements.add(new ObjectMaxCardinalityRestriction(number-1,max.getRole(),max.getChild(0))); + + } else if(description instanceof ObjectMinCardinalityRestriction) { + tmp = refine(description.getChild(0), maxLength-3, null, range); + + for(Description d : tmp) { + refinements.add(new ObjectMinCardinalityRestriction(number,role,d)); + } + + // >= x r.C => >= (x+1) r.C + ObjectMinCardinalityRestriction min = (ObjectMinCardinalityRestriction) description; + // int number = min.getNumber(); + if(number < maxNrOfFillers.get(min.getRole())) + refinements.add(new ObjectMinCardinalityRestriction(number+1,min.getRole(),min.getChild(0))); + } + } else if (description instanceof DatatypeSomeRestriction) { + + DatatypeSomeRestriction dsr = (DatatypeSomeRestriction) description; + DatatypeProperty dp = (DatatypeProperty) dsr.getRestrictedPropertyExpression(); + DataRange dr = dsr.getDataRange(); + if(dr instanceof DoubleMaxValue) { + double value = ((DoubleMaxValue)dr).getValue(); + // find out which split value was used + int splitIndex = splits.get(dp).lastIndexOf(value); + if(splitIndex == -1) + throw new Error("split error"); + int newSplitIndex = splitIndex - 1; + if(newSplitIndex >= 0) { + DoubleMaxValue max = new DoubleMaxValue(splits.get(dp).get(newSplitIndex)); + DatatypeSomeRestriction newDSR = new DatatypeSomeRestriction(dp,max); + refinements.add(newDSR); + // System.out.println(description + " => " + newDSR); + } + } else if(dr instanceof DoubleMinValue) { + double value = ((DoubleMinValue)dr).getValue(); + // find out which split value was used + int splitIndex = splits.get(dp).lastIndexOf(value); + if(splitIndex == -1) + throw new Error("split error"); + int newSplitIndex = splitIndex + 1; + if(newSplitIndex < splits.get(dp).size()) { + DoubleMinValue min = new DoubleMinValue(splits.get(dp).get(newSplitIndex)); + DatatypeSomeRestriction newDSR = new DatatypeSomeRestriction(dp,min); + refinements.add(newDSR); + } + } + } else if (description instanceof StringValueRestriction) { + StringValueRestriction svr = (StringValueRestriction) description; + DatatypeProperty dp = svr.getRestrictedPropertyExpression(); + Set<DatatypeProperty> subDPs = rs.getSubProperties(dp); + for(DatatypeProperty subDP : subDPs) { + refinements.add(new StringValueRestriction(subDP, svr.getStringValue())); + } + } + + // if a refinement is not Bottom, Top, ALL r.Bottom a refinement of top can be appended + if(!(description instanceof Thing) && !(description instanceof Nothing) + && !(description instanceof ObjectAllRestriction && description.getChild(0) instanceof Nothing)) { + // -1 because of the AND symbol which is appended + int topRefLength = maxLength - description.getLength() - 1; + + // maybe we have to compute new top refinements here + if(currDomain instanceof Thing) { + if(topRefLength > topRefinementsLength) + computeTopRefinements(topRefLength); + } else if(topRefLength > topARefinementsLength.get(currDomain)) + computeTopRefinements(topRefLength,(NamedClass)currDomain); + + if(topRefLength>0) { + Set<Description> topRefs; + if(currDomain instanceof Thing) + topRefs = topRefinementsCumulative.get(topRefLength); + else + topRefs = topARefinementsCumulative.get(currDomain).get(topRefLength); + + for(Description c : topRefs) { + // true if refinement should be skipped due to filters, + // false otherwise + boolean skip = false; + + // if a refinement of of the form ALL r, we check whether ALL r + // does not occur already + if(applyAllFilter) { + if(c instanceof ObjectAllRestriction) { + for(Description child : description.getChildren()) { + if(child instanceof ObjectAllRestriction) { + ObjectPropertyExpression r1 = ((ObjectAllRestriction)c).getRole(); + ObjectPropertyExpression r2 = ((ObjectAllRestriction)child).getRole(); + if(r1.toString().equals(r2.toString())) + skip = true; + } + } + } + } + + // check for double datatype properties + /* + if(c instanceof DatatypeSomeRestriction && + description instanceof DatatypeSomeRestriction) { + DataRange dr = ((DatatypeSomeRestriction)c).getDataRange(); + DataRange dr2 = ((DatatypeSomeRestriction)description).getDataRange(); + // it does not make sense to have statements like height >= 1.8 AND height >= 1.7 + if((dr instanceof DoubleMaxValue && dr2 instanceof DoubleMaxValue) + ||(dr instanceof DoubleMinValue && dr2 instanceof DoubleMinValue)) + skip = true; + }*/ + + // perform a disjointness check when named classes are added; + // this can avoid a lot of superfluous computation in the algorithm e.g. + // when A1 looks good, so many refinements of the form (A1 OR (A2 AND A3)) + // are generated which are all equal to A1 due to disjointness of A2 and A3 + if(disjointChecks && c instanceof NamedClass && description instanceof NamedClass && isDisjoint(description, c)) { + skip = true; + // System.out.println(c + " ignored when refining " + description); + } + + if(!skip) { + Intersection mc = new Intersection(); + mc.addChild(description); + mc.addChild(c); + + // clean and transform to ordered negation normal form + ConceptTransformation.cleanConceptNonRecursive(mc); + ConceptTransformation.transformToOrderedNegationNormalFormNonRecursive(mc, conceptComparator); + + // last check before intersection is added + if(checkIntersection(mc)) + refinements.add(mc); + } + } + } + } + + // for(Description refinement : refinements) { + // if((refinement instanceof Intersection || refinement instanceof Union) && refinement.getChildren().size()<2) { + // System.out.println(description + " " + refinement + " " + currDomain + " " + maxLength); + // System.exit(0); + // } + // } + + + + return refinements; + } + + // when a child of an intersection is refined and reintegrated into the + // intersection, we can perform some sanity checks; + // method returns true if everything is OK and false otherwise + // TODO: can be implemented more efficiently if the newly added child + // is given as parameter + public static boolean checkIntersection(Intersection intersection) { + // rule 1: max. restrictions at most once + boolean maxDoubleOccurence = false; + // rule 2: min restrictions at most once + boolean minDoubleOccurence = false; + // rule 3: no double occurences of boolean datatypes + TreeSet<DatatypeProperty> occuredDP = new TreeSet<DatatypeProperty>(); + // rule 4: no double occurences of hasValue restrictions + TreeSet<ObjectProperty> occuredVR = new TreeSet<ObjectProperty>(); + + for(Description child : intersection.getChildren()) { + if(child instanceof DatatypeSomeRestriction) { + DataRange dr = ((DatatypeSomeRestriction)child).getDataRange(); + if(dr instanceof DoubleMaxValue) { + if(maxDoubleOccurence) + return false; + else + maxDoubleOccurence = true; + } else if(dr instanceof DoubleMinValue) { + if(minDoubleOccurence) + return false; + else + minDoubleOccurence = true; + } + } else if(child instanceof BooleanValueRestriction) { + DatatypeProperty dp = (DatatypeProperty) ((BooleanValueRestriction)child).getRestrictedPropertyExpression(); + // System.out.println("dp: " + dp); + // return false if the boolean property exists already + if(!occuredDP.add(dp)) + return false; + } else if(child instanceof ObjectValueRestriction) { + ObjectProperty op = (ObjectProperty) ((ObjectValueRestriction)child).getRestrictedPropertyExpression(); + if(!occuredVR.add(op)) + return false; + } + // System.out.println(child.getClass()); + } + return true; + } + + /** + * By default, the operator does not specialize e.g. (A or B) to A, because + * it only guarantees weak completeness. Under certain circumstances, e.g. + * refinement of a fixed given concept, it can be useful to allow such + * refinements, which can be done by passing the parameter true to this method. + * @param dropDisjuncts Whether to remove disjuncts in refinement process. + */ + public void setDropDisjuncts(boolean dropDisjuncts) { + this.dropDisjuncts = dropDisjuncts; + } + + private void computeTopRefinements(int maxLength) { + computeTopRefinements(maxLength, null); + } + + private void computeTopRefinements(int maxLength, NamedClass domain) { + long topComputationTimeStartNs = System.nanoTime(); + + if(domain == null && m.size() == 0) + computeM(); + + if(domain != null && !mA.containsKey(domain)) + computeM(domain); + + int refinementsLength; + + if(domain == null) { + refinementsLength = topRefinementsLength; + } else { + if(!topARefinementsLength.containsKey(domain)) + topARefinementsLength.put(domain,0); + + refinementsLength = topARefinementsLength.get(domain); + } + + + // compute all possible combinations of the disjunction + for(int i = refinementsLength+1; i <= maxLength; i++) { + combos.put(i,MathOperations.getCombos(i, mMaxLength)); + + // initialise the refinements with empty sets + if(domain == null) { + topRefinements.put(i, new TreeSet<Description>(conceptComparator)); + } else { + if(!topARefinements.containsKey(domain)) + topARefinements.put(domain, new TreeMap<Integer,SortedSet<Description>>()); + topARefinements.get(domain).put(i, new TreeSet<Description>(conceptComparator)); + } + + for(List<Integer> combo : combos.get(i)) { + + // combination is a single number => try to use M + if(combo.size()==1) { + // note we cannot use "put" instead of "addAll" because there + // can be several combos for one length + if(domain == null) + topRefinements.get(i).addAll(m.get(i)); + else + topARefinements.get(domain).get(i).addAll(mA.get(domain).get(i)); + // combinations has several numbers => generate disjunct + } else if (isDisjunctionAllowed) { + + // check whether the combination makes sense, i.e. whether + // all lengths mentioned in it have corresponding elements + // e.g. when negation is deactivated there won't be elements of + // length 2 in M + boolean validCombo = true; + for(Integer j : combo) { + if((domain == null && m.get(j).size()==0) || + (domain != null && mA.get(domain).get(j).size()==0)) + validCombo = false; + } + + + if(validCombo) { + + SortedSet<Union> baseSet = new TreeSet<Union>(conceptComparator); + for(Integer j : combo) { + if(domain == null) + baseSet = MathOperations.incCrossProduct(baseSet, m.get(j)); + else + baseSet = MathOperations.incCrossProduct(baseSet, mA.get(domain).get(j)); + } + + // convert all concepts in ordered negation normal form + for(Description concept : baseSet) { + ConceptTransformation.transformToOrderedForm(concept, conceptComparator); + } + + // apply the exists filter (throwing out all refinements with + // double \exists r for any r) + // TODO: similar filtering can be done for boolean datatype + // properties + if(applyExistsFilter) { + Iterator<Union> it = baseSet.iterator(); + while(it.hasNext()) { + if(MathOperations.containsDoubleObjectSomeRestriction(it.next())) + it.remove(); + } + } + + // add computed refinements + if(domain == null) + topRefinements.get(i).addAll(baseSet); + else + topARefinements.get(domain).get(i).addAll(baseSet); + + } + } + } //for loop + + // create cumulative versions of refinements such that they can + // be accessed easily + TreeSet<Description> cumulativeRefinements = new TreeSet<Description>(conceptComparator); + for(int j=1; j<=i; j++) { + if(domain == null) { + cumulativeRefinements.addAll(topRefinements.get(j)); + } else { + cumulativeRefinements.addAll(topARefinements.get(domain).get(j)); + } + } + + if(domain == null) { + topRefinementsCumulative.put(i, cumulativeRefinements); + } else { + if(!topARefinementsCumulative.containsKey(domain)) + topARefinementsCumulative.put(domain, new TreeMap<Integer, TreeSet<Description>>()); + topARefinementsCumulative.get(domain).put(i, cumulativeRefinements); + } + } //for loop for disjunction + + + // register new top refinements length + if(domain == null) + topRefinementsLength = maxLength; + else + topARefinementsLength.put(domain,maxLength); + + topComputationTimeNs += System.nanoTime() - topComputationTimeStartNs; + } + + // compute M_\top + private void computeM() { + long mComputationTimeStartNs = System.nanoTime(); + + // initialise all possible lengths (1 to 3) //actran: 3 or 4? + for(int i=1; i<=mMaxLength; i++) { + m.put(i, new TreeSet<Description>(conceptComparator)); + } + + + //actran: computes the most general atomic concepts (subclasses of Thing) + SortedSet<Description> m1 = subHierarchy.getSubClasses(new Thing()); + m.put(1,m1); + + + //actran: computes negated most specific atomic concepts + SortedSet<Description> m2 = new TreeSet<Description>(conceptComparator); + if(useNegation) { + Set<Description> m2tmp = subHierarchy.getSuperClasses(new Nothing()); + for(Description c : m2tmp) { + if(!(c instanceof Thing)) { + m2.add(new Negation(c)); + } + } + } + + // boolean datatypes, e.g. testPositive = true + if(useBooleanDatatypes) { + Set<DatatypeProperty> booleanDPs = rs.getBooleanDatatypeProperties(); + for(DatatypeProperty dp : booleanDPs) { + m2.add(new BooleanValueRestriction(dp,true)); + m2.add(new BooleanValueRestriction(dp,false)); + } + } + m.put(2,m2); + + SortedSet<Description> m3 = new TreeSet<Description>(conceptComparator); + if(useExistsConstructor) { + // only uses most general roles + for(ObjectProperty r : rs.getMostGeneralProperties()) { + m3.add(new ObjectSomeRestriction(r, new Thing())); + } + } + + if(useAllConstructor) { + // we allow \forall r.\top here because otherwise the operator + // becomes too difficult to manage due to dependencies between + // M_A and M_A' where A'=ran(r) + for(ObjectProperty r : rs.getMostGeneralProperties()) { + m3.add(new ObjectAllRestriction(r, new Thing())); + } + } + + if(useDoubleDatatypes) { + Set<DatatypeProperty> doubleDPs = rs.getDoubleDatatypeProperties(); + for(DatatypeProperty dp : doubleDPs) { + if ((splits.get(dp) != null) && (splits.get(dp).size() > 0)) { + DoubleMaxValue max = new DoubleMaxValue(splits.get(dp).get(splits.get(dp).size()-1)); + DoubleMinValue min = new DoubleMinValue(splits.get(dp).get(0)); + m3.add(new DatatypeSomeRestriction(dp,max)); + m3.add(new DatatypeSomeRestriction(dp,min)); + } + } + } + + if(useDataHasValueConstructor) { + Set<DatatypeProperty> stringDPs = rs.getStringDatatypeProperties(); + for(DatatypeProperty dp : stringDPs) { + // loop over frequent values + Set<Constant> freqValues = frequentDataValues.get(dp); + for(Constant c : freqValues) { + m3.add(new StringValueRestriction(dp, c.getLiteral())); + } + } + } + + m.put(3,m3); + + SortedSet<Description> m4 = new TreeSet<Description>(conceptComparator); + if(useCardinalityRestrictions) { + for(ObjectProperty r : rs.getMostGeneralProperties()) { + int maxFillers = maxNrOfFillers.get(r); + // zero fillers: <= -1 r.C does not make sense + // one filler: <= 0 r.C is equivalent to NOT EXISTS r.C, + // but we still keep it, because ALL r.NOT C may be difficult to reach + if(maxFillers > 0) + m4.add(new ObjectMaxCardinalityRestriction(maxFillers-1, r, new Thing())); + } + } + m.put(4,m4); + + mComputationTimeNs += System.nanoTime() - mComputationTimeStartNs; + } + + // computation of the set M_A + // a major difference compared to the ILP 2007 \rho operator is that + // M is finite and contains elements of length (currently) at most 3 + private void computeM(NamedClass nc) { + long mComputationTimeStartNs = System.nanoTime(); + + // System.out.println(nc); + + mA.put(nc, new TreeMap<Integer,SortedSet<Description>>()); + // initialise all possible lengths (1 to 3) + for(int i=1; i<=mMaxLength; i++) { + mA.get(nc).put(i, new TreeSet<Description>(conceptComparator)); + } + + // incomplete, prior implementation + // SortedSet<Description> m1 = subHierarchy.getSubClasses(nc); + // mA.get(nc).put(1,m1); + + // most general classes, which are not disjoint with nc and provide real refinement + SortedSet<Description> m1 = getClassCandidates(nc); + mA.get(nc).put(1,m1); + + // most specific negated classes, which are not disjoint with nc + SortedSet<Description> m2 = new TreeSet<Description>(); + if(useNegation) { + m2 = getNegClassCandidates(nc); + mA.get(nc).put(2,m2); + } + + // System.out.println("m1 " + "(" + nc + "): " + m1); + // System.out.println("m2 " + "(" + nc + "): " + m2); + + /* + SortedSet<Description> m2 = new TreeSet<Description>(conceptComparator); + if(useNegation) { + // the definition in the paper is more complex, but actually + // we only have to insert the most specific concepts satisfying + // the mentioned restrictions; there is no need to implement a + // recursive method because for A subClassOf A' we have not A' + // subClassOf A and thus: if A and B are disjoint then also A' + // and B; if not A AND B = B then also not A' AND B = B + // 2010/03: the latter is not correct => a recursive method is needed + SortedSet<Description> m2tmp = subHierarchy.getSuperClasses(new Nothing()); + + for(Description c : m2tmp) { +// if(c instanceof Thing) +// m2.add(c); +// else { + // we obviously do not add \top (\top refines \top does not make sense) + if(!(c instanceof Thing)) { + NamedClass a = (NamedClass) c; + if(!isNotADisjoint(a, nc) && isNotAMeaningful(a, nc)) + m2.add(new Negation(a)); + } + } + } + */ + + // compute applicable properties + computeMg(nc); + + // boolean datatypes, e.g. testPositive = true + if(useBooleanDatatypes) { + Set<DatatypeProperty> booleanDPs = mgbd.get(nc); + for(DatatypeProperty dp : booleanDPs) { + m2.add(new BooleanValueRestriction(dp,true)); + m2.add(new BooleanValueRestriction(dp,false)); + } + } + + mA.get(nc).put(2,m2); + + SortedSet<Description> m3 = new TreeSet<Description>(conceptComparator); + if(useExistsConstructor) { + for(ObjectProperty r : mgr.get(nc)) { + m3.add(new ObjectSomeRestriction(r, new Thing())); + } + } + + if(useAllConstructor) { + // we allow \forall r.\top here because otherwise the operator + // becomes too difficult to manage due to dependencies between + // M_A and M_A' where A'=ran(r) + for(ObjectProperty r : mgr.get(nc)) { + m3.add(new ObjectAllRestriction(r, new Thing())); + } + } + + if(useDoubleDatatypes) { + Set<DatatypeProperty> doubleDPs = mgdd.get(nc); + // System.out.println("cached disjoints " + cachedDisjoints); + // System.out.println("appOP " + appOP); + // System.out.println("appBD " + appBD); + // System.out.println("appDD " + appDD); + // System.out.println("mgr " + mgr); + // System.out.println("mgbd " + mgbd); + // System.out.println("mgdd " + mgdd); + + for(DatatypeProperty dp : doubleDPs) { + if((splits.get(dp) != null) && (splits.get(dp).size() > 0)) { + DoubleMaxValue max = new DoubleMaxValue(splits.get(dp).get(splits.get(dp).size()-1)); + DoubleMinValue min = new DoubleMinValue(splits.get(dp).get(0)); + m3.add(new DatatypeSomeRestriction(dp,max)); + m3.add(new DatatypeSomeRestriction(dp,min)); + } + } + } + + if(useDataHasValueConstructor) { + Set<DatatypeProperty> stringDPs = mgsd.get(nc); + for(DatatypeProperty dp : stringDPs) { + // loop over frequent values + Set<Constant> freqValues = frequentDataValues.get(dp); + for(Constant c : freqValues) { + m3.add(new StringValueRestriction(dp, c.getLiteral())); + } + } + } + + mA.get(nc).put(3,m3); + + SortedSet<Description> m4 = new TreeSet<Description>(conceptComparator); + if(useCardinalityRestrictions) { + for(ObjectProperty r : mgr.get(nc)) { + int maxFillers = maxNrOfFillers.get(r); + // zero fillers: <= -1 r.C does not make sense + // one filler: <= 0 r.C is equivalent to NOT EXISTS r.C, + // but we still keep it, because ALL r.NOT C may be difficult to reach + if(maxFillers > 0) + m4.add(new ObjectMaxCardinalityRestriction(maxFillers-1, r, new Thing())); + } + } + mA.get(nc).put(4,m4); + + // System.out.println(mA.get(nc)); + + mComputationTimeNs += System.nanoTime() - mComputationTimeStartNs; + } + + // get candidates for a refinement of \top restricted to a class B + public SortedSet<Description> getClassCandidates(NamedClass index) { + return getClassCandidatesRecursive(index, Thing.instance); + } + + private SortedSet<Description> getClassCandidatesRecursive(Description index, Description upperClass) { + SortedSet<Description> candidates = new TreeSet<Description>(); + // System.out.println("index " + index + " upper class " + upperClass); + + // we descend the subsumption hierarchy to ensure that we get + // the most general concepts satisfying the criteria + for(Description candidate : subHierarchy.getSubClasses(upperClass)) { + // System.out.println("testing " + candidate + " ... "); + + // NamedClass candidate = (NamedClass) d; + // check disjointness with index (if not no further traversal downwards is necessary) + if(!isDisjoint(candidate,index)) { + // System.out.println( " passed disjointness test ... "); + // check whether the class is meaningful, i.e. adds something to the index + // to do this, we need to make sure that the class is not a superclass of the + // index (otherwise we get nothing new) - for instance based disjoints, we + // make sure that there is at least one individual, which is not already in the + // upper class + boolean meaningful; + if(instanceBasedDisjoints) { + // bug: tests should be performed against the index, not the upper class + // SortedSet<Individual> tmp = rs.getIndividuals(upperClass); + SortedSet<Individual> tmp = rs.getIndividuals(index); + tmp.removeAll(rs.getIndividuals(candidate)); + // System.out.println(" instances of " + index + " and not " + candidate + ": " + tmp.size()); + meaningful = tmp.size() != 0; + } else { + meaningful = !isDisjoint(new Negation(candidate),index); + } + + if(meaningful) { + // candidate went successfully through all checks + candidates.add(candidate); + // System.out.println(" real refinement"); + } else { + // descend subsumption hierarchy to find candidates + // System.out.println(" enter recursion"); + candidates.addAll(getClassCandidatesRecursive(index, candidate)); + } + } + // else { + // System.out.println(" ruled out, because it is disjoint"); + // } + } + // System.out.println("cc method exit"); + return candidates; + } + + // get candidates for a refinement of \top restricted to a class B + public SortedSet<Description> getNegClassCandidates(NamedClass index) { + return getNegClassCandidatesRecursive(index, Nothing.instance); + } + + private SortedSet<Description> getNegClassCandidatesRecursive(Description index, Description lowerClass) { + SortedSet<Description> candidates = new TreeSet<Description>(conceptComparator); + // System.out.println("index " + index + " lower class " + lowerClass); + + for(Description candidate : subHierarchy.getSuperClasses(lowerClass)) { + if(!(candidate instanceof Thing)) { + // System.out.println("candidate: " + candidate); + // check disjointness with index/range (should not be disjoint otherwise not useful) + if(!isDisjoint(new Negation(candidate),index)) { + boolean meaningful; + // System.out.println("not disjoint"); + if(instanceBasedDisjoints) { + SortedSet<Individual> tmp = rs.getIndividuals(index); + tmp.removeAll(rs.getIndividuals(new Negation(candidate))); + meaningful = tmp.size() != 0; + // System.out.println("instances " + tmp.size()); + } else { + meaningful = !isDisjoint(candidate,index); + } + + if(meaningful) { + candidates.add(new Negation(candidate)); + } else { + candidates.addAll(getNegClassCandidatesRecursive(index, candidate)); + } + } + } + } + return candidates; + } + + private void computeMg(NamedClass domain) { + // compute the applicable properties if this has not been done yet + if(appOP.get(domain) == null) + computeApp(domain); + + // initialise mgr, mgbd, mgdd, mgsd + mgr.put(domain, new TreeSet<ObjectProperty>()); + mgbd.put(domain, new TreeSet<DatatypeProperty>()); + mgdd.put(domain, new TreeSet<DatatypeProperty>()); + mgsd.put(domain, new TreeSet<DatatypeProperty>()); + + SortedSet<ObjectProperty> mostGeneral = rs.getMostGeneralProperties(); + computeMgrRecursive(domain, mostGeneral, mgr.get(domain)); + SortedSet<DatatypeProperty> mostGeneralDP = rs.getMostGeneralDatatypeProperties(); + // we make the (reasonable) assumption here that all sub and super + // datatype properties have the same type (e.g. boolean, integer, double) + Set<DatatypeProperty> mostGeneralBDP = Helper.intersection(mostGeneralDP, rs.getBooleanDatatypeProperties()); + Set<DatatypeProperty> mostGeneralDDP = Helper.intersection(mostGeneralDP, rs.getDoubleDatatypeProperties()); + Set<DatatypeProperty> mostGeneralSDP = Helper.intersection(mostGeneralDP, rs.getStringDatatypeProperties()); + computeMgbdRecursive(domain, mostGeneralBDP, mgbd.get(domain)); + computeMgddRecursive(domain, mostGeneralDDP, mgdd.get(domain)); + computeMgsdRecursive(domain, mostGeneralSDP, mgsd.get(domain)); + } + + private void computeMgrRecursive(NamedClass domain, Set<ObjectProperty> currProperties, Set<ObjectProperty> mgrTmp) { + for(ObjectProperty prop : currProperties) { + if(appOP.get(domain).contains(prop)) + mgrTmp.add(prop); + else + computeMgrRecursive(domain, rs.getSubProperties(prop), mgrTmp); + } + } + + private void computeMgbdRecursive(NamedClass domain, Set<DatatypeProperty> currProperties, Set<DatatypeProperty> mgbdTmp) { + for(DatatypeProperty prop : currProperties) { + if(appBD.get(domain).contains(prop)) + mgbdTmp.add(prop); + else + computeMgbdRecursive(domain, rs.getSubProperties(prop), mgbdTmp); + } + } + + private void computeMgddRecursive(NamedClass domain, Set<DatatypeProperty> currProperties, Set<DatatypeProperty> mgddTmp) { + for(DatatypeProperty prop : currProperties) { + if(appDD.get(domain).contains(prop)) + mgddTmp.add(prop); + else + computeMgddRecursive(domain, rs.getSubProperties(prop), mgddTmp); + } + } + + private void computeMgsdRecursive(NamedClass domain, Set<DatatypeProperty> currProperties, Set<DatatypeProperty> mgsdTmp) { + for(DatatypeProperty prop : currProperties) { + if(appDD.get(domain).contains(prop)) + mgsdTmp.add(prop); + else + computeMgsdRecursive(domain, rs.getSubProperties(prop), mgsdTmp); + } + } + + // computes the set of applicable properties for a given class + private void computeApp(NamedClass domain) { + // object properties + Set<ObjectProperty> mostGeneral = rs.getObjectProperties(); + Set<ObjectProperty> applicableRoles = new TreeSet<ObjectProperty>(); + for(ObjectProperty role : mostGeneral) { + // TODO: currently we just rely on named classes as roles, + // instead of computing dom(r) and ran(r) + Description d = rs.getDomain(role); + if(!isDisjoint(domain,d)) + applicableRoles.add(role); + } + appOP.put(domain, applicableRoles); + + // boolean datatype properties + Set<DatatypeProperty> mostGeneralBDPs = rs.getBooleanDatatypeProperties(); + Set<DatatypeProperty> applicableBDPs = new TreeSet<DatatypeProperty>(); + for(DatatypeProperty role : mostGeneralBDPs) { + // Description d = (NamedClass) rs.getDomain(role); + Description d = rs.getDomain(role); + if(!isDisjoint(domain,d)) + applicableBDPs.add(role); + } + appBD.put(domain, applicableBDPs); + + // double datatype properties + Set<DatatypeProperty> mostGeneralDDPs = rs.getDoubleDatatypeProperties(); + Set<DatatypeProperty> applicableDDPs = new TreeSet<DatatypeProperty>(); + for(DatatypeProperty role : mostGeneralDDPs) { + // Description d = (NamedClass) rs.getDomain(role); + Description d = rs.getDomain(role); + // System.out.println("domain: " + d); + if(!isDisjoint(domain,d)) + applicableDDPs.add(role); + } + appDD.put(domain, applicableDDPs); + } + + // returns true of the intersection contains elements disjoint + // to the given description (if true adding the description to + // the intersection results in a description equivalent to bottom) + // e.g. OldPerson AND YoungPerson; Nitrogen-34 AND Tin-113 + // Note: currently we only check named classes in the intersection, + // it would be interesting to see whether it makes sense to extend this + // (advantage: less refinements, drawback: operator will need infinitely many + // reasoner queries in the long run) + @SuppressWarnings({"unused"}) + private boolean containsDisjoints(Intersection intersection, Description d) { + List<Description> children = intersection.getChildren(); + for(Description child : children) { + if(d instanceof Nothing) + return true; + else if(child instanceof NamedClass) { + if(isDisjoint((NamedClass)child, d)) + return true; + } + } + return false; + } + + private boolean isDisjoint(Description d1, Description d2) { + + // System.out.println("| " + d1 + " " + d2); + // System.out.println("| " + cachedDisjoints); + + // check whether we have cached this query + Map<Description,Boolean> tmp = cachedDisjoints.get(d1); + Boolean tmp2 = null; + if(tmp != null) + tmp2 = tmp.get(d2); + + // System.out.println("| " + tmp + " " + tmp2); + + if(tmp2==null) { + Boolean result; + if(instanceBasedDisjoints) { + result = isDisjointInstanceBased(d1,d2); + } else { + Description d = new Intersection(d1, d2); + result = rs.isSuperClassOf(new Nothing(), d); + } + // add the result to the cache (we add it twice such that + // the order of access does not matter) + + // System.out.println("| result: " + result); + + // create new entries if necessary + Map<Description,Boolean> map1 = new TreeMap<Description,Boolean>(conceptComparator); + Map<Description,Boolean> map2 = new TreeMap<Description,Boolean>(conceptComparator); + if(tmp == null) + cachedDisjoints.put(d1, map1); + if(!cachedDisjoints.containsKey(d2)) + cachedDisjoints.put(d2, map2); + + // add result symmetrically in the description matrix + cachedDisjoints.get(d1).put(d2, result); + cachedDisjoints.get(d2).put(d1, result); + // System.out.println("---"); + return result; + } else { + // System.out.println("==="); + return tmp2; + } + } + + private boolean isDisjointInstanceBased(Description d1, Description d2) { + SortedSet<Individual> d1Instances = rs.getIndividuals(d1); + SortedSet<Individual> d2Instances = rs.getIndividuals(d2); + // System.out.println(d1 + " " + d2); + // System.out.println(d1 + " " + d1Instances); + // System.out.println(d2 + " " + d2Instances); + for(Individual d1Instance : d1Instances) { + if(d2Instances.contains(d1Instance)) + return false; + } + return true; + } + + /* + // computes whether two classes are disjoint; this should be computed + // by the reasoner on... [truncated message content] |
From: <lor...@us...> - 2012-05-10 13:31:36
|
Revision: 3701 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3701&view=rev Author: lorenz_b Date: 2012-05-10 13:31:25 +0000 (Thu, 10 May 2012) Log Message: ----------- Added preliminary support for FILTERs. Added LGG test for Oxford dataset. Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/qtl/datastructures/QueryTree.java trunk/components-ext/src/main/java/org/dllearner/algorithm/qtl/datastructures/impl/QueryTreeImpl.java trunk/components-ext/src/main/java/org/dllearner/algorithm/qtl/impl/QueryTreeFactoryImpl.java trunk/components-ext/src/main/java/org/dllearner/algorithm/qtl/operations/lgg/LGGGeneratorImpl.java trunk/components-ext/src/test/java/org/dllearner/algorithm/qtl/LGGTest.java Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/qtl/datastructures/QueryTree.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/qtl/datastructures/QueryTree.java 2012-05-10 07:08:30 UTC (rev 3700) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/qtl/datastructures/QueryTree.java 2012-05-10 13:31:25 UTC (rev 3701) @@ -26,7 +26,9 @@ import org.dllearner.algorithm.qtl.datastructures.impl.QueryTreeImpl; +import com.hp.hpl.jena.datatypes.RDFDatatype; import com.hp.hpl.jena.query.Query; +import com.hp.hpl.jena.rdf.model.Literal; /** * @@ -129,8 +131,14 @@ String toSPARQLQueryString(boolean filtered); + Query toSPARQLQuery(); + int getTriplePatternCount(); Query toQuery(); + RDFDatatype getDatatype(); + + List<Literal> getLiterals(); + } Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/qtl/datastructures/impl/QueryTreeImpl.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/qtl/datastructures/impl/QueryTreeImpl.java 2012-05-10 07:08:30 UTC (rev 3700) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/qtl/datastructures/impl/QueryTreeImpl.java 2012-05-10 13:31:25 UTC (rev 3701) @@ -22,10 +22,13 @@ import java.io.PrintWriter; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.Comparator; +import java.util.Date; import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -33,15 +36,21 @@ import java.util.TreeSet; import java.util.regex.Pattern; +import javax.xml.bind.DatatypeConverter; + import org.dllearner.algorithm.qtl.datastructures.NodeRenderer; import org.dllearner.algorithm.qtl.datastructures.QueryTree; import org.dllearner.algorithm.qtl.filters.Filters; import com.hp.hpl.jena.datatypes.BaseDatatype; +import com.hp.hpl.jena.datatypes.RDFDatatype; +import com.hp.hpl.jena.datatypes.xsd.XSDDatatype; import com.hp.hpl.jena.graph.Node; import com.hp.hpl.jena.graph.Triple; import com.hp.hpl.jena.query.Query; import com.hp.hpl.jena.query.QueryFactory; +import com.hp.hpl.jena.query.Syntax; +import com.hp.hpl.jena.rdf.model.Literal; import com.hp.hpl.jena.sparql.syntax.ElementGroup; import com.hp.hpl.jena.sparql.syntax.ElementTriplesBlock; @@ -72,6 +81,8 @@ private boolean isLiteralNode = false; private boolean isResourceNode = false; + private List<Literal> literals = new ArrayList<Literal>(); + public QueryTreeImpl(N userObject) { this.userObject = userObject; @@ -80,7 +91,13 @@ edge2ChildrenMap = new HashMap<String, List<QueryTree<N>>>(); toStringRenderer = new NodeRenderer<N>() { public String render(QueryTree<N> object) { - return object.toString() + "(" + object.getId() + ")"; + String label = object.toString() + "(" + object.getId() + ")"; + if(object.isLiteralNode()){ + if(!object.getLiterals().isEmpty()){ + label += "Values: " + object.getLiterals(); + } + } + return label; } }; } @@ -672,6 +689,10 @@ return true; } + @Override + public Query toSPARQLQuery() { + return QueryFactory.create(toSPARQLQueryString(), Syntax.syntaxARQ); + } @Override public String toSPARQLQueryString() { @@ -680,8 +701,12 @@ } cnt = 0; StringBuilder sb = new StringBuilder(); - sb.append("SELECT ?x0 WHERE {\n"); - buildSPARQLQueryString(this, sb, false); + sb.append("SELECT DISTINCT ?x0 WHERE {\n"); + List<String> filters = new ArrayList<String>(); + buildSPARQLQueryString(this, sb, false, filters); + for(String filter : filters){ + sb.append(filter).append("\n"); + } sb.append("}"); return sb.toString(); } @@ -693,16 +718,23 @@ } cnt = 0; StringBuilder sb = new StringBuilder(); - sb.append("SELECT ?x0 WHERE {\n"); - buildSPARQLQueryString(this, sb, filtered); + List<String> filters = new ArrayList<String>(); + sb.append("SELECT DISTINCT ?x0 WHERE {\n"); + buildSPARQLQueryString(this, sb, filtered, filters); + for(String filter : filters){ + sb.append(filter).append("\n"); + } sb.append("}"); return sb.toString(); } - private void buildSPARQLQueryString(QueryTree<N> tree, StringBuilder sb, boolean filtered){ + private void buildSPARQLQueryString(QueryTree<N> tree, StringBuilder sb, boolean filtered, List<String> filters){ Object subject = null; if(tree.getUserObject().equals("?")){ subject = "?x" + cnt++; + if(tree.isLiteralNode() && !tree.getLiterals().isEmpty()){ + filters.add(getFilter(subject.toString(), tree.getLiterals())); + } } else { subject = "<" + tree.getUserObject() + ">"; } @@ -725,12 +757,61 @@ } sb.append(subject).append(" <").append(predicate).append("> ").append(object).append(".\n"); if(!objectIsResource){ - buildSPARQLQueryString(child, sb, filtered); + buildSPARQLQueryString(child, sb, filtered, filters); } } } } + private String getFilter(String varName, List<Literal> literals){ + String filter = "FILTER("; + + Literal min = getMin(literals); + filter += varName + ">=\"" + min.getLexicalForm() + "\"^^<" + min.getDatatypeURI() + ">"; + + filter += " && "; + + Literal max = getMax(literals); + filter += varName + "<=\"" + max.getLexicalForm() + "\"^^<" + min.getDatatypeURI() + ">"; + + filter += ")"; + return filter; + } + + private Literal getMin(List<Literal> literals){ + Iterator<Literal> iter = literals.iterator(); + Literal min = iter.next(); + Literal l; + while(iter.hasNext()){ + l = iter.next(); + if(l.getDatatype() == XSDDatatype.XSDinteger){ + min = (l.getInt() < min.getInt()) ? l : min; + } else if(l.getDatatype() == XSDDatatype.XSDdouble){ + min = (l.getDouble() < min.getDouble()) ? l : min; + } else if(l.getDatatype() == XSDDatatype.XSDdate){ + min = (DatatypeConverter.parseDate(l.getLexicalForm()).compareTo(DatatypeConverter.parseDate(min.getLexicalForm())) == -1) ? l : min; + } + } + return min; + } + + private Literal getMax(List<Literal> literals){ + Iterator<Literal> iter = literals.iterator(); + Literal max = iter.next(); + Literal l; + while(iter.hasNext()){ + l = iter.next(); + if(l.getDatatype() == XSDDatatype.XSDinteger){ + max = (l.getInt() > max.getInt()) ? l : max; + } else if(l.getDatatype() == XSDDatatype.XSDdouble){ + max = (l.getDouble() > max.getDouble()) ? l : max; + } else if(l.getDatatype() == XSDDatatype.XSDdate){ + max = (DatatypeConverter.parseDate(l.getLexicalForm()).compareTo(DatatypeConverter.parseDate(max.getLexicalForm())) == 1) ? l : max; + } + } + return max; + } + public Query toQuery(){ Query query = QueryFactory.make(); query.setQuerySelectType(); @@ -797,5 +878,29 @@ return triples; } + public void addLiteral(Literal l){ + literals.add(l); + } + + public List<Literal> getLiterals() { + return literals; + } + + public void addLiterals(Collection<Literal> literals) { + this.literals.addAll(literals); + } + + public RDFDatatype getDatatype(){ + if(isLiteralNode){ + if(!literals.isEmpty()){ + return literals.get(0).getDatatype(); + } else { + return null; + } + } else { + throw new UnsupportedOperationException("Node ist not a literal"); + } + } + } Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/qtl/impl/QueryTreeFactoryImpl.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/qtl/impl/QueryTreeFactoryImpl.java 2012-05-10 07:08:30 UTC (rev 3700) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/qtl/impl/QueryTreeFactoryImpl.java 2012-05-10 13:31:25 UTC (rev 3701) @@ -35,6 +35,8 @@ import org.dllearner.algorithm.qtl.filters.QuestionBasedStatementFilter; import org.dllearner.algorithm.qtl.filters.ZeroFilter; +import com.hp.hpl.jena.datatypes.RDFDatatype; +import com.hp.hpl.jena.datatypes.xsd.XSDDatatype; import com.hp.hpl.jena.rdf.model.Literal; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.Property; @@ -60,6 +62,8 @@ private Selector statementSelector = new SimpleSelector(); private com.hp.hpl.jena.util.iterator.Filter<Statement> keepFilter; + private int maxDepth = 3; + public QueryTreeFactoryImpl(){ comparator = new StatementComparator(); predicateFilters = new HashSet<String>(Filters.getAllFilterProperties()); @@ -139,7 +143,8 @@ QueryTreeImpl<String> tree = new QueryTreeImpl<String>(s.toString()); - fillTree(tree, resource2Statements); + int depth = 0; + fillTree(tree, resource2Statements, depth); tree.setUserObject("?"); return tree; @@ -160,7 +165,8 @@ fillMap(s, model, resource2Statements); QueryTreeImpl<String> tree = new QueryTreeImpl<String>(s.toString()); - fillTree(tree, resource2Statements); + int depth = 0; + fillTree(tree, resource2Statements, depth); tree.setUserObject("?"); return tree; @@ -178,7 +184,7 @@ resource2Statements.put(st.getSubject().toString(), statements); } statements.add(st); - if(st.getObject().isURIResource() && !resource2Statements.containsKey(st.getObject().asResource().getURI())){ + if((st.getObject().isResource()) && !resource2Statements.containsKey(st.getObject().toString())){ fillMap(st.getObject().asResource(), model, resource2Statements); } } @@ -201,54 +207,73 @@ statements.add(st); } QueryTreeImpl<String> tree = new QueryTreeImpl<String>(s.toString()); - fillTree(tree, resource2Statements); + int depth = 0; + fillTree(tree, resource2Statements, depth); tree.setUserObject("?"); return tree; } - private void fillTree(QueryTreeImpl<String> tree, SortedMap<String, SortedSet<Statement>> resource2Statements){ - tree.setId(nodeId++); - if(resource2Statements.containsKey(tree.getUserObject())){ - QueryTreeImpl<String> subTree; - Property predicate; - RDFNode object; - for(Statement st : resource2Statements.get(tree.getUserObject())){ - predicate = st.getPredicate(); - object = st.getObject(); - if(!predicateFilter.isRelevantResource(predicate.getURI())){ - continue; - } - if(predicateFilters.contains(st.getPredicate().toString())){ - continue; - } - if(object.isLiteral()){ - Literal lit = st.getLiteral(); - String escapedLit = lit.getLexicalForm().replace("\"", "\\\""); - StringBuilder sb = new StringBuilder(); - sb.append("\"").append(escapedLit).append("\""); - if(lit.getDatatypeURI() != null){ - sb.append("^^<").append(lit.getDatatypeURI()).append(">"); + private void fillTree(QueryTreeImpl<String> tree, SortedMap<String, SortedSet<Statement>> resource2Statements, int depth){ + depth++; + tree.setId(nodeId++); + if(resource2Statements.containsKey(tree.getUserObject())){ + QueryTreeImpl<String> subTree; + Property predicate; + RDFNode object; + for(Statement st : resource2Statements.get(tree.getUserObject())){ + predicate = st.getPredicate(); + object = st.getObject(); + if(!predicateFilter.isRelevantResource(predicate.getURI())){ + continue; } - if(!lit.getLanguage().isEmpty()){ - sb.append("@").append(lit.getLanguage()); + if(predicateFilters.contains(st.getPredicate().toString())){ + continue; } - subTree = new QueryTreeImpl<String>(sb.toString()); -// subTree = new QueryTreeImpl<String>(lit.toString()); - subTree.setId(nodeId++); - subTree.setLiteralNode(true); - tree.addChild(subTree, st.getPredicate().toString()); - } else if(objectFilter.isRelevantResource(object.asResource().getURI())){ - if(tree.getUserObjectPathToRoot().size() < 3 && - !tree.getUserObjectPathToRoot().contains(st.getObject().toString())){ - subTree = new QueryTreeImpl<String>(st.getObject().toString()); - subTree.setResourceNode(true); + if(object.isLiteral()){ + Literal lit = st.getLiteral(); + String escapedLit = lit.getLexicalForm().replace("\"", "\\\""); + StringBuilder sb = new StringBuilder(); + sb.append("\"").append(escapedLit).append("\""); + if(lit.getDatatypeURI() != null){ + sb.append("^^<").append(lit.getDatatypeURI()).append(">"); + } + if(!lit.getLanguage().isEmpty()){ + sb.append("@").append(lit.getLanguage()); + } + subTree = new QueryTreeImpl<String>(sb.toString()); +// subTree = new QueryTreeImpl<String>(lit.toString()); + subTree.setId(nodeId++); + subTree.setLiteralNode(true); + if(lit.getDatatype() == XSDDatatype.XSDinteger || lit.getDatatype() == XSDDatatype.XSDdouble || lit.getDatatype() == XSDDatatype.XSDdate){ + subTree.addLiteral(lit); + } tree.addChild(subTree, st.getPredicate().toString()); - fillTree(subTree, resource2Statements); + } else if(objectFilter.isRelevantResource(object.asResource().getURI())){ + if(object.asResource().isAnon()){ + System.out.println(object); + } + if(!tree.getUserObjectPathToRoot().contains(st.getObject().toString())){ + subTree = new QueryTreeImpl<String>(st.getObject().toString()); + subTree.setResourceNode(true); + tree.addChild(subTree, st.getPredicate().toString()); + if(depth < maxDepth){ + fillTree(subTree, resource2Statements, depth); + } + + } + } else if(object.isAnon()){ + if(depth < maxDepth && + !tree.getUserObjectPathToRoot().contains(st.getObject().toString())){ + subTree = new QueryTreeImpl<String>(st.getObject().toString()); + subTree.setResourceNode(true); + tree.addChild(subTree, st.getPredicate().toString()); + fillTree(subTree, resource2Statements, depth); + } } } } - } + depth--; } class StatementComparator implements Comparator<Statement>{ Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/qtl/operations/lgg/LGGGeneratorImpl.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/qtl/operations/lgg/LGGGeneratorImpl.java 2012-05-10 07:08:30 UTC (rev 3700) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/qtl/operations/lgg/LGGGeneratorImpl.java 2012-05-10 13:31:25 UTC (rev 3701) @@ -29,6 +29,7 @@ import org.dllearner.algorithm.qtl.datastructures.QueryTree; import org.dllearner.algorithm.qtl.datastructures.impl.QueryTreeImpl; +import com.hp.hpl.jena.datatypes.RDFDatatype; import com.jamonapi.Monitor; import com.jamonapi.MonitorFactory; @@ -145,6 +146,15 @@ lgg.setUserObject((N)"?"); } + if(tree1.isLiteralNode() && tree2.isLiteralNode()){ + RDFDatatype d1 = tree1.getDatatype(); + RDFDatatype d2 = tree2.getDatatype(); + if(d1 != null && d2 != null && d1 == d2){ + ((QueryTreeImpl<N>)lgg).addLiterals(((QueryTreeImpl<N>)tree1).getLiterals()); + ((QueryTreeImpl<N>)lgg).addLiterals(((QueryTreeImpl<N>)tree2).getLiterals()); + } + } + Set<QueryTreeImpl<N>> addedChildren; QueryTreeImpl<N> lggChild; for(Object edge : new TreeSet<Object>(tree1.getEdges())){ Modified: trunk/components-ext/src/test/java/org/dllearner/algorithm/qtl/LGGTest.java =================================================================== --- trunk/components-ext/src/test/java/org/dllearner/algorithm/qtl/LGGTest.java 2012-05-10 07:08:30 UTC (rev 3700) +++ trunk/components-ext/src/test/java/org/dllearner/algorithm/qtl/LGGTest.java 2012-05-10 13:31:25 UTC (rev 3701) @@ -19,7 +19,11 @@ */ package org.dllearner.algorithm.qtl; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -39,14 +43,23 @@ import org.dllearner.algorithm.qtl.operations.lgg.LGGGeneratorImpl; import org.dllearner.algorithm.qtl.util.ModelGenerator; import org.dllearner.algorithm.qtl.util.ModelGenerator.Strategy; +import org.dllearner.kb.sparql.ConciseBoundedDescriptionGenerator; +import org.dllearner.kb.sparql.ConciseBoundedDescriptionGeneratorImpl; import org.dllearner.kb.sparql.ExtractionDBCache; import org.dllearner.kb.sparql.SparqlEndpoint; import org.dllearner.kb.sparql.SparqlQuery; import org.junit.Assert; import org.junit.Test; +import com.hp.hpl.jena.query.Query; +import com.hp.hpl.jena.query.QueryExecutionFactory; +import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.query.ResultSetFormatter; import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; +import com.hp.hpl.jena.rdf.model.Statement; +import com.hp.hpl.jena.sparql.util.ModelUtils; import com.hp.hpl.jena.vocabulary.OWL; import com.hp.hpl.jena.vocabulary.RDF; import com.hp.hpl.jena.vocabulary.RDFS; @@ -61,6 +74,64 @@ private static final Logger logger = Logger.getLogger(LGGTest.class); +// @Test + public void testOxfordData(){ + Model model = ModelFactory.createOntologyModel(); + int depth = 3; + try { + model.read(new FileInputStream(new File("/home/lorenz/arbeit/papers/question-answering-iswc-2012/examples/ontology.ttl")), null, "TURTLE"); + System.out.println(model.size()); + model.read(new FileInputStream(new File("/home/lorenz/arbeit/papers/question-answering-iswc-2012/examples/finders.ttl")), "http://diadem.cs.ox.ac.uk/ontologies/real-estate#", "TURTLE"); + System.out.println(model.size()); +// model.read(new FileInputStream(new File("/home/lorenz/arbeit/papers/question-answering-iswc-2012/examples/martinco.ttl")), null, "TURTLE"); +// System.out.println(model.size()); +// model.write(new FileOutputStream(new File("/home/lorenz/arbeit/papers/question-answering-iswc-2012/examples/all.ttl")), "TURTLE", null); +// model.read(new FileInputStream(new File("/home/lorenz/arbeit/papers/question-answering-iswc-2012/examples/all.ttl")), null, "TURTLE"); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } + +// for(Statement s : model.listStatements().toList()){ +// System.out.println(s); +// } +// +// ResultSet rs1 = QueryExecutionFactory.create("SELECT * WHERE {?s <http://diadem.cs.ox.ac.uk/ontologies/real-estate#rooms> ?o. ?o ?p ?o1}", model).execSelect(); +// System.out.println(ResultSetFormatter.asText(rs1)); + + ConciseBoundedDescriptionGenerator cbd = new ConciseBoundedDescriptionGeneratorImpl(model); + QueryTreeFactory<String> qtf = new QueryTreeFactoryImpl(); + + List<String> posExamples = Arrays.asList("http://diadem.cs.ox.ac.uk/ontologies/real-estate#inst004", + "http://diadem.cs.ox.ac.uk/ontologies/real-estate#inst005"); + + List<QueryTree<String>> trees = new ArrayList<QueryTree<String>>(); + + //get the trees for the positive examples of depth 3 + QueryTree<String> tree; + for(String ex : posExamples){ + tree = qtf.getQueryTree(ex, cbd.getConciseBoundedDescription(ex, depth)); + trees.add(tree); + System.out.println(tree.getStringRepresentation()); + } + + //compute the LGG + LGGGenerator<String> lggGen = new LGGGeneratorImpl<String>(); + QueryTree<String> lgg = lggGen.getLGG(trees); + System.out.println("LGG:\n" + lgg.getStringRepresentation()); + Query q = lgg.toSPARQLQuery(); + System.out.println("Query:\n" + q); + + //run the SPARQL query against the data - should be return at least the positive examples + List<String> result = new ArrayList<String>(); + ResultSet rs = QueryExecutionFactory.create(q, model).execSelect(); + while(rs.hasNext()){ + result.add(rs.next().getResource("x0").getURI()); + } + System.out.println(result); + Assert.assertTrue(result.containsAll(posExamples)); + + } + @Test public void testLGGWithDBpediaExample(){ QueryTreeFactory<String> factory = new QueryTreeFactoryImpl(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <chr...@us...> - 2012-05-10 07:08:39
|
Revision: 3700 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3700&view=rev Author: christinaunger Date: 2012-05-10 07:08:30 +0000 (Thu, 10 May 2012) Log Message: ----------- [tbsl.exploration] repaired empty-property-template Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_Parser.jj trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicSlotBuilder.java Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_Parser.jj =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_Parser.jj 2012-05-09 15:34:39 UTC (rev 3699) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_Parser.jj 2012-05-10 07:08:30 UTC (rev 3700) @@ -502,7 +502,7 @@ //TOKEN: {<DR: (["?","!"])?(["a"-"z","A"-"Z","0"-"9","."])+>} TOKEN: {<A: (["a"-"z","A"-"Z","0"-"9"])+>} -TOKEN: {<B: (["a"-"z","A"-"Z","_",".","#","0"-"9"])+":"(["a"-"z","A"-"Z","_",".","#","0"-"9"])+>} +TOKEN: {<B: (["a"-"z","A"-"Z","_",".","#","0"-"9"])+":"(["a"-"z","A"-"Z","_",".","#","0"-"9"])+>} // oder eher: SLOT_([...])+ TOKEN: {<C: ["?","!"](["a"-"z","A"-"Z","0"-"9"])+>} Token dr() : { Token t; }{ (t=<A> | t=<C>) { return t; } } Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicSlotBuilder.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicSlotBuilder.java 2012-05-09 15:34:39 UTC (rev 3699) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicSlotBuilder.java 2012-05-10 07:08:30 UTC (rev 3700) @@ -337,11 +337,11 @@ slot = "SLOT_" + token + "/PROPERTY/"; String[] npAdjunct = {token, "(NP NP* (PP P:'" + token.toLowerCase() + "' DP[pobj]))", - "<x,l1,<e,t>,[ l1:[ | SLOT_" + token + "(p), p(x,y) ] ],[(l2,y,pobj,<<e,t>,t>)],[l2=l1],["+slot+"]>" + + "<x,l1,<e,t>,[ l1:[ | SLOT_" + token + "(x,y) ] ],[(l2,y,pobj,<<e,t>,t>)],[l2=l1],["+slot+"]>" + " ;; <x,l1,<e,t>,[ l1:[ | empty(x,y) ] ],[(l2,y,pobj,<<e,t>,t>)],[l2=l1],[]>"}; String[] vpAdjunct = {token, "(VP VP* (PP P:'" + token.toLowerCase() + "' DP[pobj]))", - "<x,l1,t,[ l1:[ | SLOT_" + token + "(p), p(x,y) ] ],[(l2,y,pobj,<<e,t>,t>)],[l2=l1],["+slot+"]>" + + "<x,l1,t,[ l1:[ | SLOT_" + token + "(x,y) ] ],[(l2,y,pobj,<<e,t>,t>)],[l2=l1],["+slot+"]>" + " ;; <x,l1,t,[ l1:[ | empty(x,y) ] ],[(l2,y,pobj,<<e,t>,t>)],[l2=l1],[]>"}; result.add(npAdjunct); result.add(vpAdjunct); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <chr...@us...> - 2012-05-09 15:34:45
|
Revision: 3699 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3699&view=rev Author: christinaunger Date: 2012-05-09 15:34:39 +0000 (Wed, 09 May 2012) Log Message: ----------- [tbsl] changed regex that provoked NullPointerException Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/ltag/parser/Preprocessor.java Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/ltag/parser/Preprocessor.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/ltag/parser/Preprocessor.java 2012-05-09 15:28:25 UTC (rev 3698) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/ltag/parser/Preprocessor.java 2012-05-09 15:34:39 UTC (rev 3699) @@ -88,8 +88,8 @@ Pattern vpassinPattern = Pattern.compile("\\s((\\w+)/VPASS.\\w+/IN)"); Pattern gerundinPattern = Pattern.compile("\\s((\\w+)/((VBG)|(VBN)).\\w+/IN)"); Pattern vprepPattern = Pattern.compile("\\s((\\w+)(?<!have)/V[A-Z]+\\s\\w+/(IN|TO))"); - Pattern whenPattern = Pattern.compile("(?i)(when/WRB\\s(.+\\s)(\\w+)/((V[A-Z]+)|(PASS[A-Z]+)))"); - Pattern wherePattern = Pattern.compile("(?i)(where/WRB\\s(.+\\s)(\\w+)/((V[A-Z]+)|(PASS[A-Z]+)))"); + Pattern whenPattern = Pattern.compile("\\A(when/WRB\\s(.+\\s)(\\w+)/((V[A-Z]+)|(PASS[A-Z]+)))"); + Pattern wherePattern = Pattern.compile("\\A(where/WRB\\s(.+\\s)(\\w+)/((V[A-Z]+)|(PASS[A-Z]+)))"); Pattern adjsPattern = Pattern.compile("((\\w+)/JJ.(\\w+)/JJ)"); Pattern adjnounPattern = Pattern.compile("((\\w+)(?<!many)/JJ.(\\w+)/NN(S)?)"); Pattern adjnprepPattern = Pattern.compile("((\\w+)(?<!many)/JJ.(\\w+)/NPREP)"); @@ -195,7 +195,7 @@ } m = whenPattern.matcher(condensedstring); while (m.find()) { - if (m.group(5).equals("VPREP")) { + if (m.group(4).equals("VPREP")) { if (VERBOSE) logger.trace("Replacing " + m.group(1) + " by " + m.group(2)+m.group(3)+"/WHENPREP"); condensedstring = condensedstring.replaceFirst(m.group(1),m.group(2) + m.group(3)+"/WHENPREP"); } else { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <chr...@us...> - 2012-05-09 15:28:37
|
Revision: 3698 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3698&view=rev Author: christinaunger Date: 2012-05-09 15:28:25 +0000 (Wed, 09 May 2012) Log Message: ----------- [tbsl] repaired parser/templator shortcomings (and probably broke something else ;) Modified Paths: -------------- trunk/components-ext/pom.xml trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/ltag/parser/Preprocessor.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_Parser.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_Parser.jj trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_ParserConstants.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_ParserTokenManager.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicSlotBuilder.java trunk/components-ext/src/main/resources/tbsl/lexicon/basic_english.lex Added Paths: ----------- trunk/components-ext/src/main/javacc/ trunk/components-ext/src/main/javacc/DRSParser.jj trunk/components-ext/src/main/javacc/DUDE_Parser.jj trunk/components-ext/src/main/javacc/LTAG_Parser.jj Modified: trunk/components-ext/pom.xml =================================================================== --- trunk/components-ext/pom.xml 2012-05-08 11:53:28 UTC (rev 3697) +++ trunk/components-ext/pom.xml 2012-05-09 15:28:25 UTC (rev 3698) @@ -165,7 +165,22 @@ </dependencies> <build> <plugins> + <!-- <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>javacc-maven-plugin</artifactId> + <version>2.6</version> + <executions> + <execution> + <id>javacc</id> + <goals> + <goal>javacc</goal> + </goals> + </execution> + </executions> + </plugin> + --> + <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> <configuration> Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/ltag/parser/Preprocessor.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/ltag/parser/Preprocessor.java 2012-05-08 11:53:28 UTC (rev 3697) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/ltag/parser/Preprocessor.java 2012-05-09 15:28:25 UTC (rev 3698) @@ -236,7 +236,7 @@ Pattern quotePattern2 = Pattern.compile("(``/``((.*)_)''/'')"); Pattern nnpPattern = Pattern.compile("\\s?((\\w+)/NNP[S]?\\s(\\w+))/NNP[S]?(\\W|$)"); Pattern nnPattern = Pattern.compile("\\s?((\\w+)/NN[S]?\\s(\\w+))/NN[S]?(\\W|$)"); - Pattern nnnnpPattern = Pattern.compile("\\s?((\\w+)/NNP[S]?)\\s(\\w+)/NN[S]?(\\W|$)"); + Pattern nnnnpPattern = Pattern.compile("\\s?((\\w+)/NNP[S]?\\s(\\w+)/NN[S]?)(\\W|$)"); m = quotePattern1.matcher(flat); while (m.find()) { @@ -265,7 +265,7 @@ } m = nnnnpPattern.matcher(flat); while (m.find()) { - flat = flat.replaceFirst(m.group(1),m.group(2) + "/JJ"); + flat = flat.replaceFirst(m.group(1),m.group(2) + "_" + m.group(3) + "/NNP" + m.group(4)); m = nnnnpPattern.matcher(flat); } Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_Parser.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_Parser.java 2012-05-08 11:53:28 UTC (rev 3697) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_Parser.java 2012-05-09 15:28:25 UTC (rev 3698) @@ -209,7 +209,7 @@ jj_consume_token(2); referent = dr(); jj_consume_token(2); - word = jj_consume_token(WORD); + word = word(); jj_consume_token(2); type = Type(); jj_consume_token(8); @@ -237,7 +237,7 @@ type.setResultType(result); {if (true) return type;} } else if (jj_2_15(2)) { - word = jj_consume_token(WORD); + word = word(); ElementaryType type=null; if (word.toString().equals("e")) type = new ElementaryType(ElemType.e); @@ -344,7 +344,7 @@ DRS drs1; DRS drs2; if (jj_2_29(2)) { - predicate = jj_consume_token(WORD); + predicate = word(); jj_consume_token(10); dr_list = DR_List(); jj_consume_token(8); @@ -476,9 +476,9 @@ Token type; SlotType slottype = null; List<String> words = null; - ref = jj_consume_token(WORD); + ref = word(); jj_consume_token(14); - type = jj_consume_token(WORD); + type = word(); jj_consume_token(14); if (jj_2_35(2)) { words = Word_List(); @@ -502,7 +502,7 @@ final public List<String> Word_List() throws ParseException { Token word; List<String> words = null; - word = jj_consume_token(WORD); + word = word(); if (jj_2_36(2)) { jj_consume_token(15); words = Word_List(); @@ -522,19 +522,31 @@ final public Token dr() throws ParseException { Token t; if (jj_2_37(2)) { - t = jj_consume_token(WORD); + t = jj_consume_token(A); } else if (jj_2_38(2)) { - t = jj_consume_token(DR); - } else if (jj_2_39(2)) { - t = jj_consume_token(QUOTED_STRING); + t = jj_consume_token(C); } else { jj_consume_token(-1); throw new ParseException(); } - {if (true) return t;} + {if (true) return t;} throw new Error("Missing return statement in function"); } + final public Token word() throws ParseException { + Token t; + if (jj_2_39(2)) { + t = jj_consume_token(A); + } else if (jj_2_40(2)) { + t = jj_consume_token(B); + } else { + jj_consume_token(-1); + throw new ParseException(); + } + {if (true) return t;} + throw new Error("Missing return statement in function"); + } + private boolean jj_2_1(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return !jj_3_1(); } @@ -808,6 +820,13 @@ finally { jj_save(38, xla); } } + private boolean jj_2_40(int xla) { + jj_la = xla; jj_lastpos = jj_scanpos = token; + try { return !jj_3_40(); } + catch(LookaheadSuccess ls) { return true; } + finally { jj_save(39, xla); } + } + private boolean jj_3_12() { if (jj_scan_token(2)) return true; if (jj_3R_1()) return true; @@ -815,12 +834,12 @@ } private boolean jj_3_16() { - if (jj_3R_7()) return true; + if (jj_3R_8()) return true; return false; } - private boolean jj_3R_12() { - if (jj_scan_token(WORD)) return true; + private boolean jj_3R_13() { + if (jj_3R_7()) return true; Token xsp; xsp = jj_scanpos; if (jj_3_36()) jj_scanpos = xsp; @@ -834,7 +853,7 @@ } private boolean jj_3_32() { - if (jj_3R_10()) return true; + if (jj_3R_11()) return true; return false; } @@ -848,13 +867,8 @@ return false; } - private boolean jj_3_39() { - if (jj_scan_token(QUOTED_STRING)) return true; - return false; - } - private boolean jj_3R_1() { - if (jj_3R_10()) return true; + if (jj_3R_11()) return true; return false; } @@ -865,38 +879,58 @@ private boolean jj_3_31() { if (jj_scan_token(13)) return true; - if (jj_3R_10()) return true; + if (jj_3R_11()) return true; return false; } - private boolean jj_3R_10() { + private boolean jj_3R_11() { if (jj_scan_token(LABEL)) return true; if (jj_scan_token(11)) return true; return false; } - private boolean jj_3_38() { - if (jj_scan_token(DR)) return true; + private boolean jj_3_40() { + if (jj_scan_token(B)) return true; return false; } - private boolean jj_3R_15() { - if (jj_scan_token(WORD)) return true; + private boolean jj_3R_16() { + if (jj_3R_7()) return true; if (jj_scan_token(14)) return true; return false; } + private boolean jj_3_38() { + if (jj_scan_token(C)) return true; + return false; + } + private boolean jj_3_23() { if (jj_scan_token(MOST)) return true; return false; } + private boolean jj_3_39() { + if (jj_scan_token(A)) return true; + return false; + } + private boolean jj_3_30() { - if (jj_3R_9()) return true; + if (jj_3R_10()) return true; if (jj_scan_token(6)) return true; return false; } + private boolean jj_3R_7() { + Token xsp; + xsp = jj_scanpos; + if (jj_3_39()) { + jj_scanpos = xsp; + if (jj_3_40()) return true; + } + return false; + } + private boolean jj_3_34() { if (jj_scan_token(2)) return true; if (jj_3R_4()) return true; @@ -904,29 +938,26 @@ } private boolean jj_3_37() { - if (jj_scan_token(WORD)) return true; + if (jj_scan_token(A)) return true; return false; } - private boolean jj_3R_9() { + private boolean jj_3R_10() { Token xsp; xsp = jj_scanpos; if (jj_3_37()) { jj_scanpos = xsp; - if (jj_3_38()) { - jj_scanpos = xsp; - if (jj_3_39()) return true; + if (jj_3_38()) return true; } - } return false; } private boolean jj_3R_3() { - if (jj_3R_14()) return true; + if (jj_3R_15()) return true; return false; } - private boolean jj_3R_16() { + private boolean jj_3R_17() { Token xsp; xsp = jj_scanpos; if (jj_3_29()) { @@ -943,19 +974,19 @@ } private boolean jj_3_29() { - if (jj_scan_token(WORD)) return true; + if (jj_3R_7()) return true; if (jj_scan_token(10)) return true; return false; } private boolean jj_3_19() { if (jj_scan_token(2)) return true; - if (jj_3R_8()) return true; + if (jj_3R_9()) return true; return false; } private boolean jj_3_15() { - if (jj_scan_token(WORD)) return true; + if (jj_3R_7()) return true; return false; } @@ -966,7 +997,7 @@ } private boolean jj_3R_4() { - if (jj_3R_15()) return true; + if (jj_3R_16()) return true; return false; } @@ -1009,7 +1040,7 @@ private boolean jj_3_33() { if (jj_scan_token(2)) return true; - if (jj_3R_11()) return true; + if (jj_3R_12()) return true; return false; } @@ -1031,8 +1062,8 @@ return false; } - private boolean jj_3R_8() { - if (jj_3R_16()) return true; + private boolean jj_3R_9() { + if (jj_3R_17()) return true; return false; } @@ -1051,7 +1082,7 @@ return false; } - private boolean jj_3R_13() { + private boolean jj_3R_14() { if (jj_scan_token(10)) return true; if (jj_scan_token(LABEL)) return true; return false; @@ -1063,13 +1094,13 @@ return false; } - private boolean jj_3R_11() { - if (jj_3R_9()) return true; + private boolean jj_3R_12() { + if (jj_3R_10()) return true; return false; } private boolean jj_3_17() { - if (jj_3R_8()) return true; + if (jj_3R_9()) return true; return false; } @@ -1091,19 +1122,19 @@ } private boolean jj_3_35() { - if (jj_3R_12()) return true; + if (jj_3R_13()) return true; return false; } private boolean jj_3_36() { if (jj_scan_token(15)) return true; - if (jj_3R_12()) return true; + if (jj_3R_13()) return true; return false; } private boolean jj_3_18() { if (jj_scan_token(2)) return true; - if (jj_3R_7()) return true; + if (jj_3R_8()) return true; return false; } @@ -1112,7 +1143,7 @@ return false; } - private boolean jj_3R_14() { + private boolean jj_3R_15() { Token xsp; xsp = jj_scanpos; if (jj_3_5()) { @@ -1137,12 +1168,12 @@ } private boolean jj_3R_2() { - if (jj_3R_13()) return true; + if (jj_3R_14()) return true; return false; } - private boolean jj_3R_7() { - if (jj_3R_9()) return true; + private boolean jj_3R_8() { + if (jj_3R_10()) return true; Token xsp; xsp = jj_scanpos; if (jj_3_18()) jj_scanpos = xsp; @@ -1173,7 +1204,7 @@ private static void jj_la1_init_1() { jj_la1_1 = new int[] {}; } - final private JJCalls[] jj_2_rtns = new JJCalls[39]; + final private JJCalls[] jj_2_rtns = new JJCalls[40]; private boolean jj_rescan = false; private int jj_gc = 0; @@ -1357,7 +1388,7 @@ /** Generate ParseException. */ public ParseException generateParseException() { jj_expentries.clear(); - boolean[] la1tokens = new boolean[33]; + boolean[] la1tokens = new boolean[34]; if (jj_kind >= 0) { la1tokens[jj_kind] = true; jj_kind = -1; @@ -1374,7 +1405,7 @@ } } } - for (int i = 0; i < 33; i++) { + for (int i = 0; i < 34; i++) { if (la1tokens[i]) { jj_expentry = new int[1]; jj_expentry[0] = i; @@ -1401,7 +1432,7 @@ private void jj_rescan_token() { jj_rescan = true; - for (int i = 0; i < 39; i++) { + for (int i = 0; i < 40; i++) { try { JJCalls p = jj_2_rtns[i]; do { @@ -1447,6 +1478,7 @@ case 36: jj_3_37(); break; case 37: jj_3_38(); break; case 38: jj_3_39(); break; + case 39: jj_3_40(); break; } } p = p.next; Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_Parser.jj =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_Parser.jj 2012-05-08 11:53:28 UTC (rev 3697) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_Parser.jj 2012-05-09 15:28:25 UTC (rev 3698) @@ -206,7 +206,7 @@ Type type; } { - "(" label=<LABEL> "," referent = dr() "," word=<WORD> "," type = Type() ")" + "(" label=<LABEL> "," referent = dr() "," word=word() "," type = Type() ")" { Argument argument = new Argument(); argument.setLabel(new Label(label.toString())); @@ -234,7 +234,7 @@ | - word = <WORD> + word = word() { ElementaryType type=null; if (word.toString().equals("e")) @@ -330,7 +330,7 @@ } { - predicate=<WORD> "(" dr_list=DR_List() ")" + predicate=word() "(" dr_list=DR_List() ")" { Simple_DRS_Condition condition; @@ -443,7 +443,7 @@ List<String> words = null; } { - ref = <WORD> "/" type = <WORD> "/" (words = Word_List())? + ref = word() "/" type = word() "/" (words = Word_List())? { if (words == null) { @@ -465,7 +465,7 @@ List<String> words = null; } { - word = <WORD> ("^" words = Word_List())? + word = word() ("^" words = Word_List())? { if (words == null) { @@ -498,14 +498,20 @@ TOKEN: {<LABEL: "l"(["0"-"9"])+>} -TOKEN: {<WORD: (["a"-"z","A"-"Z","_",".","#",":","0"-"9"])+>} +//TOKEN: {<WORD: (["a"-"z","A"-"Z","_",".","#",":","0"-"9"])+>} +//TOKEN: {<DR: (["?","!"])?(["a"-"z","A"-"Z","0"-"9","."])+>} -TOKEN: {<DR: (["?","!"])?(["a"-"z","A"-"Z","0"-"9","."])+>} +TOKEN: {<A: (["a"-"z","A"-"Z","0"-"9"])+>} +TOKEN: {<B: (["a"-"z","A"-"Z","_",".","#","0"-"9"])+":"(["a"-"z","A"-"Z","_",".","#","0"-"9"])+>} +TOKEN: {<C: ["?","!"](["a"-"z","A"-"Z","0"-"9"])+>} +Token dr() : { Token t; }{ (t=<A> | t=<C>) { return t; } } +Token word() : { Token t; }{ (t=<A> | t=<B>) { return t; } } + +// Token label() : { Token t; }{ (t=<A> | t=<B> | t=<LABEL>) { return t; } } + TOKEN: {<QUOTED_STRING: "\'" (~["\'"])+ "\'" >} -Token dr() : { Token t; }{ (t=<WORD> | t=<DR> | t=<QUOTED_STRING>) { return t; } } - SKIP : { " " | "\t" | "\n" | "\r" } Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_ParserConstants.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_ParserConstants.java 2012-05-08 11:53:28 UTC (rev 3697) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_ParserConstants.java 2012-05-09 15:28:25 UTC (rev 3698) @@ -31,11 +31,13 @@ /** RegularExpression Id. */ int LABEL = 25; /** RegularExpression Id. */ - int WORD = 26; + int A = 26; /** RegularExpression Id. */ - int DR = 27; + int B = 27; /** RegularExpression Id. */ - int QUOTED_STRING = 28; + int C = 28; + /** RegularExpression Id. */ + int QUOTED_STRING = 29; /** Lexical state. */ int DEFAULT = 0; @@ -68,8 +70,9 @@ "\"NO\"", "\"HOWMANY\"", "<LABEL>", - "<WORD>", - "<DR>", + "<A>", + "<B>", + "<C>", "<QUOTED_STRING>", "\" \"", "\"\\t\"", Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_ParserTokenManager.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_ParserTokenManager.java 2012-05-08 11:53:28 UTC (rev 3697) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_ParserTokenManager.java 2012-05-09 15:28:25 UTC (rev 3698) @@ -23,18 +23,15 @@ switch (pos) { case 0: - if ((active0 & 0x800L) != 0L) - { - jjmatchedKind = 26; - return 2; - } if ((active0 & 0x1ff2280L) != 0L) { jjmatchedKind = 26; - return 8; + return 4; } return -1; case 1: + if ((active0 & 0x802000L) != 0L) + return 4; if ((active0 & 0x17f0280L) != 0L) { if (jjmatchedPos != 1) @@ -42,58 +39,56 @@ jjmatchedKind = 26; jjmatchedPos = 1; } - return 8; + return 4; } - if ((active0 & 0x802000L) != 0L) - return 8; return -1; case 2: if ((active0 & 0x17f0280L) != 0L) { jjmatchedKind = 26; jjmatchedPos = 2; - return 8; + return 4; } if ((active0 & 0x2000L) != 0L) - return 8; + return 4; return -1; case 3: if ((active0 & 0x1190200L) != 0L) { jjmatchedKind = 26; jjmatchedPos = 3; - return 8; + return 4; } if ((active0 & 0x660000L) != 0L) - return 8; + return 4; return -1; case 4: + if ((active0 & 0x10000L) != 0L) + return 4; if ((active0 & 0x1180200L) != 0L) { jjmatchedKind = 26; jjmatchedPos = 4; - return 8; + return 4; } - if ((active0 & 0x10000L) != 0L) - return 8; return -1; case 5: if ((active0 & 0x1180000L) != 0L) { jjmatchedKind = 26; jjmatchedPos = 5; - return 8; + return 4; } return -1; case 6: + if ((active0 & 0x1080000L) != 0L) + return 4; if ((active0 & 0x100000L) != 0L) { jjmatchedKind = 26; jjmatchedPos = 6; - return 8; + return 4; } - if ((active0 & 0x1080000L) != 0L) - return 8; return -1; default : return -1; @@ -217,7 +212,7 @@ return jjMoveStringLiteralDfa3_0(active0, 0x20000L); case 84: if ((active0 & 0x2000L) != 0L) - return jjStartNfaWithStates_0(2, 13, 8); + return jjStartNfaWithStates_0(2, 13, 4); break; case 87: return jjMoveStringLiteralDfa3_0(active0, 0x1000000L); @@ -247,7 +242,7 @@ break; case 69: if ((active0 & 0x40000L) != 0L) - return jjStartNfaWithStates_0(3, 18, 8); + return jjStartNfaWithStates_0(3, 18, 4); break; case 76: return jjMoveStringLiteralDfa4_0(active0, 0x100000L); @@ -257,15 +252,15 @@ return jjMoveStringLiteralDfa4_0(active0, 0x10000L); case 84: if ((active0 & 0x20000L) != 0L) - return jjStartNfaWithStates_0(3, 17, 8); + return jjStartNfaWithStates_0(3, 17, 4); break; case 87: if ((active0 & 0x200000L) != 0L) - return jjStartNfaWithStates_0(3, 21, 8); + return jjStartNfaWithStates_0(3, 21, 4); break; case 89: if ((active0 & 0x400000L) != 0L) - return jjStartNfaWithStates_0(3, 22, 8); + return jjStartNfaWithStates_0(3, 22, 4); break; case 112: return jjMoveStringLiteralDfa4_0(active0, 0x200L); @@ -293,7 +288,7 @@ return jjMoveStringLiteralDfa5_0(active0, 0x80000L); case 89: if ((active0 & 0x10000L) != 0L) - return jjStartNfaWithStates_0(4, 16, 8); + return jjStartNfaWithStates_0(4, 16, 4); break; case 101: return jjMoveStringLiteralDfa5_0(active0, 0x200L); @@ -343,11 +338,11 @@ return jjMoveStringLiteralDfa7_0(active0, 0x100000L); case 84: if ((active0 & 0x80000L) != 0L) - return jjStartNfaWithStates_0(6, 19, 8); + return jjStartNfaWithStates_0(6, 19, 4); break; case 89: if ((active0 & 0x1000000L) != 0L) - return jjStartNfaWithStates_0(6, 24, 8); + return jjStartNfaWithStates_0(6, 24, 4); break; default : break; @@ -367,7 +362,7 @@ { case 84: if ((active0 & 0x100000L) != 0L) - return jjStartNfaWithStates_0(7, 20, 8); + return jjStartNfaWithStates_0(7, 20, 4); break; default : break; @@ -388,7 +383,7 @@ private int jjMoveNfa_0(int startState, int curPos) { int startsAt = 0; - jjnewStateCnt = 8; + jjnewStateCnt = 11; int i = 1; jjstateSet[0] = startState; int kind = 0x7fffffff; @@ -403,33 +398,27 @@ { switch(jjstateSet[--i]) { - case 0: - if ((0x3ff400000000000L & l) != 0L) + case 4: + if ((0x3ff400800000000L & l) != 0L) + jjCheckNAddTwoStates(3, 4); + else if (curChar == 58) + jjCheckNAdd(5); + if ((0x3ff000000000000L & l) != 0L) { - if (kind > 27) - kind = 27; - jjCheckNAdd(4); - } - else if (curChar == 39) - jjCheckNAdd(6); - else if ((0x8000000200000000L & l) != 0L) - jjCheckNAdd(4); - if ((0x400400800000000L & l) != 0L) - { if (kind > 26) kind = 26; jjCheckNAdd(2); } break; - case 8: - if ((0x3ff400000000000L & l) != 0L) + case 0: + if ((0x3ff400800000000L & l) != 0L) + jjCheckNAddTwoStates(3, 4); + else if (curChar == 39) + jjCheckNAdd(9); + else if ((0x8000000200000000L & l) != 0L) + jjCheckNAdd(7); + if ((0x3ff000000000000L & l) != 0L) { - if (kind > 27) - kind = 27; - jjCheckNAdd(4); - } - if ((0x400400800000000L & l) != 0L) - { if (kind > 26) kind = 26; jjCheckNAdd(2); @@ -443,35 +432,46 @@ jjstateSet[jjnewStateCnt++] = 1; break; case 2: - if ((0x400400800000000L & l) == 0L) + if ((0x3ff000000000000L & l) == 0L) break; if (kind > 26) kind = 26; jjCheckNAdd(2); break; case 3: - if ((0x8000000200000000L & l) != 0L) - jjCheckNAdd(4); + if ((0x3ff400800000000L & l) != 0L) + jjCheckNAddTwoStates(3, 4); break; - case 4: - if ((0x3ff400000000000L & l) == 0L) + case 5: + if ((0x3ff400800000000L & l) == 0L) break; if (kind > 27) kind = 27; - jjCheckNAdd(4); + jjCheckNAdd(5); break; - case 5: - if (curChar == 39) - jjCheckNAdd(6); - break; case 6: - if ((0xffffff7fffffffffL & l) != 0L) - jjCheckNAddTwoStates(6, 7); + if ((0x8000000200000000L & l) != 0L) + jjCheckNAdd(7); break; case 7: - if (curChar == 39 && kind > 28) + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 28) kind = 28; + jjCheckNAdd(7); break; + case 8: + if (curChar == 39) + jjCheckNAdd(9); + break; + case 9: + if ((0xffffff7fffffffffL & l) != 0L) + jjCheckNAddTwoStates(9, 10); + break; + case 10: + if (curChar == 39 && kind > 29) + kind = 29; + break; default : break; } } while(i != startsAt); @@ -483,51 +483,54 @@ { switch(jjstateSet[--i]) { - case 0: + case 4: if ((0x7fffffe87fffffeL & l) != 0L) + jjCheckNAddTwoStates(3, 4); + if ((0x7fffffe07fffffeL & l) != 0L) { if (kind > 26) kind = 26; jjCheckNAdd(2); } - if ((0x7fffffe07fffffeL & l) != 0L) - { - if (kind > 27) - kind = 27; - jjCheckNAdd(4); - } - if (curChar == 108) - jjstateSet[jjnewStateCnt++] = 1; break; - case 8: + case 0: if ((0x7fffffe87fffffeL & l) != 0L) + jjCheckNAddTwoStates(3, 4); + if ((0x7fffffe07fffffeL & l) != 0L) { if (kind > 26) kind = 26; jjCheckNAdd(2); } - if ((0x7fffffe07fffffeL & l) != 0L) - { - if (kind > 27) - kind = 27; - jjCheckNAdd(4); - } + if (curChar == 108) + jjstateSet[jjnewStateCnt++] = 1; break; case 2: - if ((0x7fffffe87fffffeL & l) == 0L) + if ((0x7fffffe07fffffeL & l) == 0L) break; if (kind > 26) kind = 26; jjCheckNAdd(2); break; - case 4: - if ((0x7fffffe07fffffeL & l) == 0L) + case 3: + if ((0x7fffffe87fffffeL & l) != 0L) + jjCheckNAddTwoStates(3, 4); + break; + case 5: + if ((0x7fffffe87fffffeL & l) == 0L) break; if (kind > 27) kind = 27; - jjCheckNAdd(4); + jjstateSet[jjnewStateCnt++] = 5; break; - case 6: + case 7: + if ((0x7fffffe07fffffeL & l) == 0L) + break; + if (kind > 28) + kind = 28; + jjstateSet[jjnewStateCnt++] = 7; + break; + case 9: jjAddStates(0, 1); break; default : break; @@ -542,7 +545,7 @@ { switch(jjstateSet[--i]) { - case 6: + case 9: if ((jjbitVec0[i2] & l2) != 0L) jjAddStates(0, 1); break; @@ -557,14 +560,14 @@ kind = 0x7fffffff; } ++curPos; - if ((i = jjnewStateCnt) == (startsAt = 8 - (jjnewStateCnt = startsAt))) + if ((i = jjnewStateCnt) == (startsAt = 11 - (jjnewStateCnt = startsAt))) return curPos; try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { return curPos; } } } static final int[] jjnextStates = { - 6, 7, + 9, 10, }; /** Token literal values. */ @@ -573,21 +576,21 @@ "\163\143\157\160\145\50", "\50", "\72\133", "\174", "\116\117\124", "\57", "\136", "\105\126\105\122\131", "\115\117\123\124", "\123\117\115\105", "\124\110\105\115\117\123\124", "\124\110\105\114\105\101\123\124", "\101\106\105\127", "\115\101\116\131", "\116\117", -"\110\117\127\115\101\116\131", null, null, null, null, null, null, null, null, }; +"\110\117\127\115\101\116\131", null, null, null, null, null, null, null, null, null, }; /** Lexer state names. */ public static final String[] lexStateNames = { "DEFAULT", }; static final long[] jjtoToken = { - 0x1fffffffL, + 0x3fffffffL, }; static final long[] jjtoSkip = { - 0x1e0000000L, + 0x3c0000000L, }; protected SimpleCharStream input_stream; -private final int[] jjrounds = new int[8]; -private final int[] jjstateSet = new int[16]; +private final int[] jjrounds = new int[11]; +private final int[] jjstateSet = new int[22]; protected char curChar; /** Constructor. */ public DUDE_ParserTokenManager(SimpleCharStream stream){ @@ -614,7 +617,7 @@ { int i; jjround = 0x80000001; - for (i = 8; i-- > 0;) + for (i = 11; i-- > 0;) jjrounds[i] = 0x80000000; } Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicSlotBuilder.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicSlotBuilder.java 2012-05-08 11:53:28 UTC (rev 3697) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicSlotBuilder.java 2012-05-09 15:28:25 UTC (rev 3698) @@ -35,6 +35,14 @@ String type = "UNSPEC"; String slot; + /* PRONOUN HACK */ + if (pos.equals("PRP") || pos.equals("PRP$")) { + String[] pronEntry = {token, + "(DET DET:'" + token.toLowerCase() + "')", + "<x,l1,e,[ l1:[ x | ] ],[],[],[]>"}; + result.add(pronEntry); + } + /* NOUNS */ if (equalsOneOf(pos,noun)) { @@ -329,12 +337,12 @@ slot = "SLOT_" + token + "/PROPERTY/"; String[] npAdjunct = {token, "(NP NP* (PP P:'" + token.toLowerCase() + "' DP[pobj]))", - // "<x,l1,<e,t>,[ l1:[ | SLOT_" + token + "(p), p(x,y) ] ],[(l2,y,pobj,<<e,t>,t>)],[l2=l1],["+slot+"]>" + - "<x,l1,<e,t>,[ l1:[ | empty(x,y) ] ],[(l2,y,pobj,<<e,t>,t>)],[l2=l1],[]>"}; + "<x,l1,<e,t>,[ l1:[ | SLOT_" + token + "(p), p(x,y) ] ],[(l2,y,pobj,<<e,t>,t>)],[l2=l1],["+slot+"]>" + + " ;; <x,l1,<e,t>,[ l1:[ | empty(x,y) ] ],[(l2,y,pobj,<<e,t>,t>)],[l2=l1],[]>"}; String[] vpAdjunct = {token, "(VP VP* (PP P:'" + token.toLowerCase() + "' DP[pobj]))", - // "<x,l1,t,[ l1:[ | SLOT_" + token + "(p), p(x,y) ] ],[(l2,y,pobj,<<e,t>,t>)],[l2=l1],["+slot+"]>" + - "<x,l1,t,[ l1:[ | empty(x,y) ] ],[(l2,y,pobj,<<e,t>,t>)],[l2=l1],[]>"}; + "<x,l1,t,[ l1:[ | SLOT_" + token + "(p), p(x,y) ] ],[(l2,y,pobj,<<e,t>,t>)],[l2=l1],["+slot+"]>" + + " ;; <x,l1,t,[ l1:[ | empty(x,y) ] ],[(l2,y,pobj,<<e,t>,t>)],[l2=l1],[]>"}; result.add(npAdjunct); result.add(vpAdjunct); } Added: trunk/components-ext/src/main/javacc/DRSParser.jj =================================================================== --- trunk/components-ext/src/main/javacc/DRSParser.jj (rev 0) +++ trunk/components-ext/src/main/javacc/DRSParser.jj 2012-05-09 15:28:25 UTC (rev 3698) @@ -0,0 +1,259 @@ + +options { + LOOKAHEAD = 2; + CHOICE_AMBIGUITY_CHECK = 2; + OTHER_AMBIGUITY_CHECK = 1; + STATIC = false; + DEBUG_PARSER = false; + DEBUG_LOOKAHEAD = false; + DEBUG_TOKEN_MANAGER = false; + ERROR_REPORTING = true; + JAVA_UNICODE_ESCAPE = false; + UNICODE_INPUT = false; + IGNORE_CASE = false; + USER_TOKEN_MANAGER = false; + USER_CHAR_STREAM = false; + BUILD_PARSER = true; + BUILD_TOKEN_MANAGER = true; + SANITY_CHECK = true; + FORCE_LA_CHECK = false; +} + +PARSER_BEGIN(DRSParser) + +package org.dllearner.algorithm.tbsl.sem.drs.reader; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.dllearner.algorithm.tbsl.sem.drs.*; +import org.dllearner.algorithm.tbsl.sem.util.Label; + +public class DRSParser { + + /** Main entry point. */ + public static void main(String args[]) throws ParseException { + DRSParser parser = new DRSParser(System.in); + parser.Input(); + } + +} + +PARSER_END(DRSParser) + +/** Root production. */ +void Input() : +{} +{ + DRS() <EOF> +} + +/** DRS */ +DRS DRS() : +{ + Set<DiscourseReferent> dr_set = null; + Set<DRS_Condition> conditions = null; + DRS drs; + Token label = null; + +} +{ + (label=<LABEL> ":")? "[" (dr_set=DR_Set())? "|" (conditions=Condition_List())? "]" + { + if (dr_set == null) + { + dr_set = new HashSet<DiscourseReferent>(); + } + drs = new DRS(); + if (label != null) + { + drs.setLabel(label.toString()); + } + drs.setDiscourseReferents(dr_set); + if (conditions != null) + { + drs.setDRSConditions(conditions); + } + return drs; + } +} + +/** DR_Set*/ +Set<DiscourseReferent> DR_Set() : +{ + Token dr; + Set<DiscourseReferent> dr_set=null; +} +{ + dr = dr() ("," dr_set=DR_Set())? + { + if (dr_set == null) + { + dr_set= new HashSet<DiscourseReferent>(); + } + if (dr.toString().startsWith("?")) + { + dr_set.add(new DiscourseReferent(dr.toString().substring(1),true,false)); + } + else if (dr.toString().startsWith("!")) + { + dr_set.add(new DiscourseReferent(dr.toString().substring(1),false,true)); + } + else + { + dr_set.add(new DiscourseReferent(dr.toString(),false,false)); + } + return dr_set; + } +} + +Set<DRS_Condition> Condition_List() : +{ + DRS_Condition condition= null; + Set<DRS_Condition> conditions = null; +} +{ + condition=Condition() ("," conditions=Condition_List())? + { + if (conditions == null) + { + conditions = new HashSet<DRS_Condition>(); + } + conditions.add(condition); + return conditions; + } +} + +DRS_Condition Condition() : +{ + List<DiscourseReferent> dr_list; + Token dr1; + Token dr2; + Token dr; + Token predicate; + Token quantifier; + DRS drs1; + DRS drs2; +} +{ + + predicate=<WORD> "(" dr_list=DR_List() ")" + { + Simple_DRS_Condition condition; + + condition = new Simple_DRS_Condition(); + condition.setPredicate(predicate.toString()); + condition.setArguments(dr_list); + return condition; + } + + | + + dr1 = dr() "=" dr2 = dr() + { + Simple_DRS_Condition condition; + + condition = new Simple_DRS_Condition(); + condition.setPredicate("equal"); + condition.addArgument(new DiscourseReferent(dr1.toString())); + condition.addArgument(new DiscourseReferent(dr2.toString())); + return condition; + } + + | + + "NOT" drs1=DRS() + { + Negated_DRS drs = new Negated_DRS(); + drs.setDRS(drs1); + return drs; + } + + | + + drs1=DRS() (quantifier=<EVERY> | quantifier=<SOME> | quantifier=<AFEW> | quantifier=<MOST> | quantifier=<THEMOST> | quantifier=<THELEAST> | + quantifier=<HOWMANY> | quantifier=<MANY> | quantifier=<NO>) dr=dr() drs2=DRS() + { + Complex_DRS_Condition drs; + drs = new Complex_DRS_Condition(); + drs.setRestrictor(drs1); + drs.setScope(drs2); + drs.setReferent(new DiscourseReferent(dr.toString())); + + if (quantifier.toString().equals("EVERY")) {drs.setQuantifier(DRS_Quantifier.EVERY);} + if (quantifier.toString().equals("SOME")) {drs.setQuantifier(DRS_Quantifier.SOME);} + if (quantifier.toString().equals("MOST")) {drs.setQuantifier(DRS_Quantifier.MOST);} + if (quantifier.toString().equals("THEMOST")) {drs.setQuantifier(DRS_Quantifier.THEMOST);} + if (quantifier.toString().equals("THELEAST")) {drs.setQuantifier(DRS_Quantifier.THELEAST);} + if (quantifier.toString().equals("AFEW")) {drs.setQuantifier(DRS_Quantifier.FEW);} + if (quantifier.toString().equals("MANY")) {drs.setQuantifier(DRS_Quantifier.MANY);} + if (quantifier.toString().equals("HOWMANY")) {drs.setQuantifier(DRS_Quantifier.HOWMANY);} + if (quantifier.toString().equals("NO")) {drs.setQuantifier(DRS_Quantifier.NO);} + + return drs; + + } +} + +/** DR_List*/ +List<DiscourseReferent> DR_List() : +{ + Token dr; + List<DiscourseReferent> dr_list=null; +} +{ + dr = dr() ("," dr_list=DR_List())? + { + if (dr_list == null) + { + dr_list= new ArrayList<DiscourseReferent>(); + } + + if (dr.toString().startsWith("?")) { + dr_list.add(0,new DiscourseReferent(dr.toString().substring(1),true,false)); + } + else if (dr.toString().startsWith("?")) { + dr_list.add(0,new DiscourseReferent(dr.toString().substring(1),false,true)); + } + else { + dr_list.add(0,new DiscourseReferent(dr.toString(),false,false)); + } + + return dr_list; + } +} + + +TOKEN: {<EVERY: "EVERY">} + +TOKEN: {<MOST: "MOST">} + +TOKEN: {<SOME: "SOME">} + +TOKEN: {<THEMOST: "THEMOST">} + +TOKEN: {<THELEAST: "THELEAST">} + +TOKEN: {<AFEW: "AFEW">} + +TOKEN: {<MANY: "MANY">} + +TOKEN: {<NO: "NO">} + +TOKEN: {<HOWMANY: "HOWMANY">} + +TOKEN: {<LABEL: "l"(["0"-"9"])+>} + +TOKEN: {<WORD: (["a"-"z","A"-"Z","_",".","#",":"])+>} + +TOKEN: {<DR: (["?","!"])?(["a"-"z","A"-"Z","0"-"9","."])+>} + +TOKEN: {<QUOTED_STRING: "\'" (~["\'"])+ "\'" >} + +Token dr() : { Token t; }{ (t=<WORD> | t=<DR> | t=<QUOTED_STRING>) { return t; } } + +SKIP : { " " | "\t" | "\n" | "\r" } + + Property changes on: trunk/components-ext/src/main/javacc/DRSParser.jj ___________________________________________________________________ Added: svn:executable + * Copied: trunk/components-ext/src/main/javacc/DUDE_Parser.jj (from rev 3694, trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_Parser.jj) =================================================================== --- trunk/components-ext/src/main/javacc/DUDE_Parser.jj (rev 0) +++ trunk/components-ext/src/main/javacc/DUDE_Parser.jj 2012-05-09 15:28:25 UTC (rev 3698) @@ -0,0 +1,513 @@ + +options { + LOOKAHEAD = 2; + CHOICE_AMBIGUITY_CHECK = 2; + OTHER_AMBIGUITY_CHECK = 1; + STATIC = false; + DEBUG_PARSER = false; + DEBUG_LOOKAHEAD = false; + DEBUG_TOKEN_MANAGER = false; + ERROR_REPORTING = true; + JAVA_UNICODE_ESCAPE = false; + UNICODE_INPUT = false; + IGNORE_CASE = false; + USER_TOKEN_MANAGER = false; + USER_CHAR_STREAM = false; + BUILD_PARSER = true; + BUILD_TOKEN_MANAGER = true; + SANITY_CHECK = true; + FORCE_LA_CHECK = false; +} + +PARSER_BEGIN(DUDE_Parser) + +package org.dllearner.algorithm.tbsl.sem.dudes.reader; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.dllearner.algorithm.tbsl.sem.drs.*; +import org.dllearner.algorithm.tbsl.sem.dudes.data.*; +import org.dllearner.algorithm.tbsl.sem.util.*; +import org.dllearner.algorithm.tbsl.sparql.Slot; +import org.dllearner.algorithm.tbsl.sparql.SlotType; + +public class DUDE_Parser { + + /** Main entry point. */ + public static void main(String args[]) throws ParseException { + DUDE_Parser parser = new DUDE_Parser(System.in); + parser.Input(); + } + +} + +PARSER_END(DUDE_Parser) + +/** Root production. */ +void Input() : +{} +{ + DUDE() <EOF> +} + +/** DUDE */ +Dude DUDE() : +{ + Token referent; + Token label; + Type type; + List<DRS> drs_list = null; + List<DominanceConstraint> constraints = null; + List<Argument> arg_list = null; + List<Slot> slots = null; +} +{ + "<" referent = dr() "," label=<LABEL> "," type=Type() "," "[" (drs_list=DRS_List())? "]" "," "[" (arg_list = Arg_List())? "]" + "," "[" (constraints = DC_List())? "]" "," "[" (slots = Slot_List())? "]" ">" + { + Dude dude = new Dude(); + dude.setReferent(referent.toString()); + dude.setType(type); + if (drs_list != null) dude.setComponents(drs_list); + dude.setLabel(new Label(label.toString())); + if (arg_list != null) dude.setArguments(arg_list); + if (constraints != null) dude.setDominanceConstraints(constraints); + if (slots != null) dude.setSlots(slots); + return dude; + } +} + +DominanceConstraint DominanceConstraint() : +{ + Label label1; + Label label2; + Token domType; + DominanceConstraint dc = null; +} +{ + label1 = Label() "<" label2 = Label() + { + dc = new DominanceConstraint(label1,label2); + return dc; + } + + | + + label1 = Label() ">" label2 = Label() + { + dc = new DominanceConstraint(label2,label1); + return dc; + } + + | + + label1 = Label() "=" label2 = Label() + { + dc = new DominanceConstraint(label1,label2); + dc.setType(DomType.equal); + return dc; + } +} + + +Label Label() : +{ + Token label; +} +{ + + label = <LABEL> + { + return new Label(label.toString()); + } + + | + + "res(" label = <LABEL> ")" + { + return new Label(label.toString(),Position.res); + } + + | + + "scope(" label = <LABEL> ")" + { + return new Label(label.toString(),Position.scope); + } +} + + +List<DominanceConstraint> DC_List() : +{ +List<DominanceConstraint> dc_list = null; +DominanceConstraint dc = null; +} +{ + dc = DominanceConstraint() ("," dc_list = DC_List())? + { + if (dc_list == null) + { + dc_list = new ArrayList<DominanceConstraint>(); + } + + dc_list.add(0,dc); + return dc_list; + } + + +} + + +List<DRS> DRS_List() : +{ + DRS drs; + List<DRS> drs_list = null; +} +{ + drs = DRS() ("," drs_list = DRS_List())? + { + if (drs_list == null) + { + drs_list = new ArrayList<DRS>(); + } + + drs_list.add(0,drs); + return drs_list; + } +} + +List<Argument> Arg_List() : +{ + Argument argument; + List<Argument> arg_list = null; +} +{ + argument = Argument() ("," arg_list = Arg_List())? + { + if (arg_list == null) + { + arg_list = new ArrayList<Argument>(); + } + + arg_list.add(0,argument); + return arg_list; + } + +} + +Argument Argument() : +{ + Token label; + Token word; + Token referent; + Type type; +} +{ + "(" label=<LABEL> "," referent = dr() "," word=word() "," type = Type() ")" + { + Argument argument = new Argument(); + argument.setLabel(new Label(label.toString())); + argument.setReferent(referent.toString()); + argument.setAnchor(word.toString()); + argument.setType(type); + return argument; + } +} + +Type Type() : +{ + Type argument; + Type result; + Token word; +} +{ + "<" argument = Type() "," result = Type() ">" + { + CompositeType type = new CompositeType(); + type.setArgumentType(argument); + type.setResultType(result); + return type; + } + + | + + word = word() + { + ElementaryType type=null; + if (word.toString().equals("e")) + type = new ElementaryType(ElemType.e); + if (word.toString().equals("t")) + type = new ElementaryType(ElemType.t); + return type; + } +} + + +/** DRS */ +DRS DRS() : +{ + Set<DiscourseReferent> dr_set = null; + Set<DRS_Condition> conditions = null; + DRS drs; + Token label; + +} +{ + label=<LABEL> ":[" (dr_set=DR_Set())? "|" (conditions=Condition_List())? "]" + { + if (dr_set == null) + { + dr_set = new HashSet<DiscourseReferent>(); + } + drs = new DRS(); + drs.setLabel(label.toString()); + drs.setDiscourseReferents(dr_set); + if (conditions != null) + drs.setDRSConditions(conditions); + return drs; + } +} + +/** DR_Set*/ +Set<DiscourseReferent> DR_Set() : +{ + Token dr; + Set<DiscourseReferent> dr_set=null; +} +{ + dr = dr() ("," dr_set=DR_Set())? + { + if (dr_set == null) + { + dr_set= new HashSet<DiscourseReferent>(); + } + if (dr.toString().startsWith("?")) + { + dr_set.add(new DiscourseReferent(dr.toString().substring(1),true,false)); + } + else if (dr.toString().startsWith("!")) + { + dr_set.add(new DiscourseReferent(dr.toString().substring(1),false,true)); + } + else + { + dr_set.add(new DiscourseReferent(dr.toString(),false,false)); + } + return dr_set; + } +} + +Set<DRS_Condition> Condition_List() : +{ + DRS_Condition condition= null; + Set<DRS_Condition> conditions = null; +} +{ + condition=Condition() ("," conditions=Condition_List())? + { + if (conditions == null) + { + conditions = new HashSet<DRS_Condition>(); + } + conditions.add(condition); + return conditions; + } +} + +DRS_Condition Condition() : +{ + List<DiscourseReferent> dr_list; + Token dr1; + Token dr2; + Token dr; + Token predicate; + Token quantifier; + DRS drs1; + DRS drs2; +} +{ + + predicate=word() "(" dr_list=DR_List() ")" + { + Simple_DRS_Condition condition; + + condition = new Simple_DRS_Condition(); + condition.setPredicate(predicate.toString()); + condition.setArguments(dr_list); + return condition; + } + + | + + dr1 = dr() "=" dr2 = dr() + { + Simple_DRS_Condition condition; + + condition = new Simple_DRS_Condition(); + condition.setPredicate("equal"); + condition.addArgument(new DiscourseReferent(dr1.toString())); + condition.addArgument(new DiscourseReferent(dr2.toString())); + return condition; + } + + | + + "NOT" drs1=DRS() + { + Negated_DRS drs = new Negated_DRS(); + drs.setDRS(drs1); + return drs; + } + + | + + drs1=DRS() (quantifier=<EVERY> | quantifier=<SOME> | quantifier=<AFEW> | quantifier=<MOST> | quantifier=<THEMOST> | quantifier=<THELEAST> | + quantifier=<HOWMANY> | quantifier=<MANY> | quantifier=<NO>) dr=dr() drs2=DRS() + { + Complex_DRS_Condition drs; + drs = new Complex_DRS_Condition(); + drs.setRestrictor(drs1); + drs.setScope(drs2); + drs.setReferent(new DiscourseReferent(dr.toString())); + + if (quantifier.toString().equals("EVERY")) {drs.setQuantifier(DRS_Quantifier.EVERY);} + if (quantifier.toString().equals("SOME")) {drs.setQuantifier(DRS_Quantifier.SOME);} + if (quantifier.toString().equals("MOST")) {drs.setQuantifier(DRS_Quantifier.MOST);} + if (quantifier.toString().equals("THEMOST")) {drs.setQuantifier(DRS_Quantifier.THEMOST);} + if (quantifier.toString().equals("THELEAST")) {drs.setQuantifier(DRS_Quantifier.THELEAST);} + if (quantifier.toString().equals("AFEW")) {drs.setQuantifier(DRS_Quantifier.FEW);} + if (quantifier.toString().equals("MANY")) {drs.setQuantifier(DRS_Quantifier.MANY);} + if (quantifier.toString().equals("HOWMANY")) {drs.setQuantifier(DRS_Quantifier.HOWMANY);} + if (quantifier.toString().equals("NO")) {drs.setQuantifier(DRS_Quantifier.NO);} + + return drs; + + } +} + +/** DR_List*/ +List<DiscourseReferent> DR_List() : +{ + Token dr; + List<DiscourseReferent> dr_list=null; +} +{ + dr = dr() ("," dr_list=DR_List())? + { + if (dr_list == null) + { + dr_list= new ArrayList<DiscourseReferent>(); + } + + if (dr.toString().startsWith("?")) { + dr_list.add(0,new DiscourseReferent(dr.toString().substring(1),true,false)); + } + else if (dr.toString().startsWith("?")) { + dr_list.add(0,new DiscourseReferent(dr.toString().substring(1),false,true)); + } + else { + dr_list.add(0,new DiscourseReferent(dr.toString(),false,false)); + } + + return dr_list; + } +} + + +List<Slot> Slot_List() : +{ + Slot slot; + List<Slot> slots = null; +} +{ + slot = Slot() ("," slots = Slot_List())? + { + if (slots == null) + { + slots = new ArrayList<Slot>(); + } + + slots.add(slot); + return slots; + } +} + +Slot Slot() : +{ + Token ref; + Token type; + SlotType slottype = null; + List<String> words = null; +} +{ + ref = word() "/" type = word() "/" (words = Word_List())? + { + if (words == null) + { + words = new ArrayList<String>(); + } + if (type.toString().equals("CLASS")) { slottype = SlotType.CLASS; } + else if (type.toString().equals("RESOURCE")) { slottype = SlotType.RESOURCE; } + else if (type.toString().equals("PROPERTY")) { slottype = SlotType.PROPERTY; } + else if (type.toString().equals("SYMPROPERTY")) { slottype = SlotType.SYMPROPERTY; } + else { slottype = SlotType.UNSPEC; } + + return new Slot(ref.toString(),slottype,words); + } +} + +List<String> Word_List() : +{ + Token word; + List<String> words = null; +} +{ + word = word() ("^" words = Word_List())? + { + if (words == null) + { + words = new ArrayList<String>(); + } + + words.add(0,word.toString()); + return words; + } +} + + +TOKEN: {<EVERY: "EVERY">} + +TOKEN: {<MOST: "MOST">} + +TOKEN: {<SOME: "SOME">} + +TOKEN: {<THEMOST: "THEMOST">} + +TOKEN: {<THELEAST: "THELEAST">} + +TOKEN: {<AFEW: "AFEW">} + +TOKEN: {<MANY: "MANY">} + +TOKEN: {<NO: "NO">} + +TOKEN: {<HOWMANY: "HOWMANY">} + +TOKEN: {<LABEL: "l"(["0"-"9"])+>} + +TOKEN: {<WORD: (["a"-"z","A"-"Z","_",".","#",":","0"-"9"])+>} + +TOKEN: {<DR: (["?","!"])?(["a"-"z","A"-"Z","0"-"9","."])+>} + +TOKEN: {<QUOTED_STRING: "\'" (~["\'"])+ "\'" >} + +Token dr() : { Token t; }{ (t=<WORD> | t=<DR> | t=<QUOTED_STRING>) { return t; } } +Token word() : { Token t; }{ (t=<WORD> | t=<DR> | t=<QUOTED_STRING>) { return t; } } + +SKIP : { " " | "\t" | "\n" | "\r" } + + + Added: trunk/components-ext/src/main/javacc/LTAG_Parser.jj =================================================================== --- trunk/components-ext/src/main/javacc/LTAG_Parser.jj (rev 0) +++ trunk/components-ext/src/main/javacc/LTAG_Parser.jj 2012-05-09 15:28:25 UTC (rev 3698) @@ -0,0 +1,212 @@ + +options { + LOOKAHEAD = 5; + CHOICE_AMBIGUITY_CHECK = 2; + OTHER_AMBIGUITY_CHECK = 1; + STATIC = false; + DEBUG_PARSER = false; + DEBUG_LOOKAHEAD = false; + DEBUG_TOKEN_MANAGER = false; + ERROR_REPORTING = true; + JAVA_UNICODE_ESCAPE = false; + UNICODE_INPUT = false; + IGNORE_CASE = false; + USER_TOKEN_MANAGER = false; + USER_CHAR_STREAM = false; + BUILD_PARSER = true; + BUILD_TOKEN_MANAGER = true; + SANITY_CHECK = true; + FORCE_LA_CHECK = false; +} + +PARSER_BEGIN(LTAGTreeParser) + +package org.dllearner.algorithm.tbsl.ltag.reader; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import org.dllearner.algorithm.tbsl.ltag.data.*; +import org.dllearner.algorithm.tbsl.ltag.agreement.*; + +public class LTAGTreeParser { + + /** Main entry point. */ + public static void main(String args[]) throws ParseException { + LTAGTreeParser parser = new LTAGTreeParser(System.in); + parser.Input(); + } + +} + +PARSER_END(LTAGTreeParser) + +/** Root production. */ +void Input() : +{} +{ + Tree() <EOF> +} + +/** Tree */ +TreeNode Tree() : +{ + Category category; + String terminal = ""; + List<TreeNode> treelist; + Token word; + Feature feature = null; +} + +{ + // SubstNode with case constraints (e.g. DP[subj]|nom) + category = Cat() "[" word=<WORD> "]" ("{" feature=Feat() "}")? + { + SubstNode substnode = new SubstNode(word.toString(),category,feature); + return substnode; + } + + | + + // FootNode (e.g. S*) + category = Cat() "*" + { + FootNode footnode = new FootNode(category); + return footnode; + } + + | + + // FootNode with no adjunction allowed (e.g. ^S*) + "^" category = Cat() "*" + { + FootNode footnode = new FootNode(category); + footnode.setAdjConstraint(true); + return footnode; + } + + | + + // TreeNode which has case feature marked (e.g. (S|nom ...)) + "(" category = Cat() ("{" feature=Feat() "}")? treelist=TreeList() ")" + { + TreeNode tree = new Tree(); + tree.setCategory(category); + tree.setChildren(treelist); + tree.setParentForTree(); + tree.setFeature(feature); + return tree; + } + + | + + // TreeNode with no case feature an no adjunction allowed (e.g. (^S DP...)) + "(" "^" category = Cat() treelist=TreeList() ")" + { + TreeNode tree = new Tree(); + tree.setCategory(category); + tree.setChildren(treelist); + tree.setParentForTree(); + tree.setAdjConstraint(true); + return tree; + } + + | + + // TerminalNode with case feature (e.g. N|nom:'house') + category = Cat() ("{" feature=Feat() "}")? ":" "'" (terminal = Terminal())? "'" + { + TerminalNode node = new TerminalNode(terminal, category); + node.setCategory(category); + node.setFeature(feature); + return node; + } + +} + + +String Terminal() : +{ + Token word; + String terminal=null; +} +{ + word = <WORD> (terminal=Terminal())? + { + if (terminal != null) return word.toString() + " " + terminal; + return word.toString(); + } +} + +List<TreeNode> TreeList() : +{ + List<TreeNode> treelist = null; + TreeNode tree; +} +{ + + tree = Tree() (treelist=TreeList())? + { + if (treelist == null) + { + treelist = new ArrayList<TreeNode>(); + } + + treelist.add(0,tree); + return treelist; + } +} + + +Category Cat() : +{ + Token cat; +} +{ + cat=<CATEGORY> + { + if (cat.toString().equals("DP")) return Category.DP; + if (cat.toString().equals("NP")) return Category.NP; + if (cat.toString().equals("N")) return Category.N; + if (cat.toString().equals("S")) return Category.S; + if (cat.toString().equals("V")) return Category.V; + if (cat.toString().equals("P")) return Category.P; + if (cat.toString().equals("VP")) return Category.VP; + if (cat.toString().equals("PP")) return Category.PP; + if (cat.toString().equals("DET")) return Category.DET; + if (cat.toString().equals("WH")) return Category.WH; + if (cat.toString().equals("ADV")) return Category.ADV; + if (cat.toString().equals("ADJ")) return Category.ADJ; + if (cat.toString().equals("ADJCOMP")) return Category.ADJCOMP; + if (cat.toString().equals("PART")) return Category.PART; + if (cat.toString().equals("PUNCT")) return Category.PUNCT; + if (cat.toString().equals("CC")) return Category.CC; + if (cat.toString().equals("EX")) return Category.EX; + if (cat.toString().equals("NUM")) return Category.NUM; + if (cat.toString().equals("C")) return Category.C; + if (cat.toString().equals("NEG")) return Category.NEG; + } + +} + +Feature Feat() : +{ + Token raw; +} +{ + + raw=<WORD> + { + + return Feature.construct(raw.toString()); + } +} + +TOKEN: {<WORD: (["a"-"z"]|["0"-"9"]|["?"]|["-"]|["_"]|["!"]|[","]|[";"]|["."]|[":"]|["/"])+>} + +TOKEN: {<CATEGORY: (["A"-"Z"])+>} + +SKIP : { " " | "\t" | "\n" | "\r" } + + + Property changes on: trunk/components-ext/src/main/javacc/LTAG_Parser.jj ___________________________________________________________________ Added: svn:executable + * Modified: trunk/components-ext/src/main/resources/tbsl/lexicon/basic_english.lex =================================================================== --- trunk/components-ext/src/main/resources/tbsl/lexicon/basic_english.lex 2012-05-08 11:53:28 UTC (rev 3697) +++ trunk/components-ext/src/main/resources/tbsl/lexicon/basic_english.lex 2012-05-09 15:28:25 UTC (rev 3698) @@ -90,7 +90,7 @@ the least || (DET DET:'the' DET:'least') || <y, l1, e, [ l1:[ | l2:[ y | ] THELEAST y l3:[|] ] ], [], [],[]> // NECESSARY "CHEAT" - highest || (NP ADJ:'highest' NP*) || <x, l1, e, [ l1:[ | maximum(x) ] ], [], [],[]> ;; <x, l1, e, [ l1:[ j | SLOT_high(x,j), maximum(j) ] ],[],[],[ SLOT_high/PROPERTY/height ]> + highest || (NP ADJ:'highest' NP*) || <x, l1, e, [ l1:[ j | SLOT_high(x,j), maximum(j) ] ],[],[],[ SLOT_high/PROPERTY/height ]> ;; <x, l1, e, [ l1:[ | maximum(x) ] ], [], [],[]> // COUNT more than || (DP DET:'more' DET:'than' NUM[num] NP[np]) || <y,l1,<<e,t>,t>,[ l1:[ y,c | count_greater(y,z) ] ],[(l2,y,np,<e,t>),(l3,z,num,e)],[l2=l1,l3=l1],[]> ;; <y,l1,<<e,t>,t>,[ l1:[ y | greater(y,z) ] ],[(l2,y,np,<e,t>),(l3,z,num,e)],[l2=l1,l3=l1],[]> @@ -125,6 +125,7 @@ what || (DP WH:'what') || <x, l1, <<e,t>,t>, [ l1:[ ?x | ] ], [], [], []> which || (DP WH:'which') || <x, l1, <<e,t>,t>, [ l1:[ ?x | ] ], [], [], []> how many || (DP WH:'how' ADJ:'many' NP[noun]) || <y, l1, <<e,t>,t>, [ l1:[ | l2:[ y | ] HOWMANY y l3:[|] ] ], [ (l4,y,noun,<e,t>) ], [ l4=l2 ],[]> + how many || (DP WH:'how' ADJ:'many' NP[noun]) || <y, l1, <<e,t>,t>, [ l1:[ ?y | ] ], [ (l4,y,noun,<e,t>) ], [ l4=l1 ],[]> who || (DP WH:'who') || <x, l1, <<e,t>,t>, [ l1:[ ?x | ] ], [], [], []> whom || (DP WH:'whom') || <x, l1, <<e,t>,t>, [ l1:[ ?x | ] ], [], [], []> when || (S WH:'when' S[s]) || <x, l1, t, [ l1:[ ?x | SLOT_p(y,x) ] ], [(l2,y,s,t)], [l2=l1], [ SLOT_p/PROPERTY/date ]> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-05-08 11:53:40
|
Revision: 3697 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3697&view=rev Author: lorenz_b Date: 2012-05-08 11:53:28 +0000 (Tue, 08 May 2012) Log Message: ----------- Added REGEX filter option. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DataPropertyDomainAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointDataPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyRangeAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DataPropertyDomainAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DataPropertyDomainAxiomLearner.java 2012-05-07 14:28:49 UTC (rev 3696) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DataPropertyDomainAxiomLearner.java 2012-05-08 11:53:28 UTC (rev 3697) @@ -21,6 +21,7 @@ import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -28,6 +29,9 @@ import java.util.SortedSet; import java.util.TreeSet; +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.Level; +import org.apache.log4j.SimpleLayout; import org.dllearner.core.AbstractAxiomLearningAlgorithm; import org.dllearner.core.ComponentAnn; import org.dllearner.core.EvaluatedAxiom; @@ -42,9 +46,11 @@ import org.dllearner.kb.SparqlEndpointKS; import org.dllearner.kb.sparql.SparqlEndpoint; import org.dllearner.reasoning.SPARQLReasoner; +import org.semanticweb.owlapi.model.IRI; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.hp.hpl.jena.query.ParameterizedSparqlString; import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; @@ -56,8 +62,14 @@ @ConfigOption(name="propertyToDescribe", description="", propertyEditorClass=DataPropertyEditor.class) private DatatypeProperty propertyToDescribe; + private static final ParameterizedSparqlString singleQueryTemplate = new ParameterizedSparqlString("SELECT ?type (COUNT(DISTINCT ?ind) AS ?cnt) WHERE {?ind <%s> ?o. ?ind a ?type.}"); + + private Map<Individual, SortedSet<Description>> individual2Types; + public DataPropertyDomainAxiomLearner(SparqlEndpointKS ks){ this.ks = ks; + super.iterativeQueryTemplate = new ParameterizedSparqlString("SELECT DISTINCT ?ind ?type WHERE {?ind ?p ?o. ?ind a ?type.}"); + } public DatatypeProperty getPropertyToDescribe() { @@ -70,6 +82,7 @@ @Override public void start() { + iterativeQueryTemplate.setIri("p", propertyToDescribe.getName()); logger.info("Start learning..."); startTime = System.currentTimeMillis(); fetchedRows = 0; @@ -92,20 +105,49 @@ } } - //get subjects with types - Map<Individual, SortedSet<Description>> individual2Types = new HashMap<Individual, SortedSet<Description>>(); - boolean repeat = true; - int limit = 1000; - while(!terminationCriteriaSatisfied() && repeat){ - int ret = addIndividualsWithTypes(individual2Types, limit, fetchedRows); - currentlyBestAxioms = buildEvaluatedAxioms(individual2Types); - fetchedRows += 1000; - repeat = (ret == limit); - } + runIterativeQueryMode(); logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); } + + private void runSingleQueryMode(){ + + } + + private void runIterativeQueryMode(){ + individual2Types = new HashMap<Individual, SortedSet<Description>>(); + while(!terminationCriteriaSatisfied() && !fullDataLoaded){ + ResultSet rs = fetchData(); + processData(rs); + buildEvaluatedAxioms(); + } + } + + private void processData(ResultSet rs){ + QuerySolution qs; + Individual ind; + Description type; + SortedSet<Description> types; + int cnt = 0; + while(rs.hasNext()){ + cnt++; + qs = rs.next(); + if(qs.get("type").isURIResource()){ + types = new TreeSet<Description>(); + ind = new Individual(qs.getResource("ind").getURI()); + type = new NamedClass(qs.getResource("type").getURI()); + types.add(type); + if(reasoner.isPrepared()){ + if(reasoner.getClassHierarchy().contains(type)){ + types.addAll(reasoner.getClassHierarchy().getSuperClasses(type)); + } + } + addToMap(individual2Types, ind, types); + } + } + lastRowCount = cnt; + } - private List<EvaluatedAxiom> buildEvaluatedAxioms(Map<Individual, SortedSet<Description>> individual2Types){ + private void buildEvaluatedAxioms(){ List<EvaluatedAxiom> axioms = new ArrayList<EvaluatedAxiom>(); Map<Description, Integer> result = new HashMap<Description, Integer>(); for(Entry<Individual, SortedSet<Description>> entry : individual2Types.entrySet()){ @@ -134,46 +176,14 @@ axioms.add(evalAxiom); } - return axioms; + currentlyBestAxioms = axioms; } - private int addIndividualsWithTypes(Map<Individual, SortedSet<Description>> ind2Types, int limit, int offset){ - String query = String.format("PREFIX owl: <http://www.w3.org/2002/07/owl#> SELECT DISTINCT ?ind ?type WHERE {?ind <%s> ?o. ?ind a ?type. ?type a owl:Class} LIMIT %d OFFSET %d", propertyToDescribe.getName(), limit, offset); + public static void main(String[] args) throws Exception{ + org.apache.log4j.Logger.getRootLogger().addAppender(new ConsoleAppender(new SimpleLayout())); + org.apache.log4j.Logger.getRootLogger().setLevel(Level.INFO); + org.apache.log4j.Logger.getLogger(DataPropertyDomainAxiomLearner.class).setLevel(Level.INFO); -// String query = String.format("SELECT DISTINCT ?ind ?type WHERE {?ind a ?type. {SELECT ?ind {?ind <%s> ?o.} LIMIT %d OFFSET %d}}", propertyToDescribe.getName(), limit, offset); - - ResultSet rs = executeSelectQuery(query); - Individual ind; - Description newType; - QuerySolution qs; - SortedSet<Description> types; - int cnt = 0; - while(rs.hasNext()){ - cnt++; - qs = rs.next(); - if(qs.get("type").isURIResource()){ - ind = new Individual(qs.getResource("ind").getURI()); - newType = new NamedClass(qs.getResource("type").getURI()); - types = ind2Types.get(ind); - if(types == null){ - types = new TreeSet<Description>(); - ind2Types.put(ind, types); - } - types.add(newType); - Set<Description> superClasses; - if(reasoner.isPrepared()){ - if(reasoner.getClassHierarchy().contains(newType)){ - superClasses = reasoner.getClassHierarchy().getSuperClasses(newType); - types.addAll(superClasses); - } - - } - } - } - return cnt; - } - - public static void main(String[] args) throws Exception{ SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveAKSW()); SPARQLReasoner reasoner = new SPARQLReasoner(ks); @@ -181,9 +191,10 @@ DataPropertyDomainAxiomLearner l = new DataPropertyDomainAxiomLearner(new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveAKSW())); l.setReasoner(reasoner); - l.setPropertyToDescribe(new DatatypeProperty("http://dbpedia.org/ontology/birthDate")); + l.setPropertyToDescribe(new DatatypeProperty("http://dbpedia.org/ontology/AutomobileEngine/height")); l.setMaxExecutionTimeInSeconds(10); - l.setReturnOnlyNewAxioms(true); + l.addFilterNamespace("http://dbpedia.org/ontology/"); +// l.setReturnOnlyNewAxioms(true); l.init(); l.start(); System.out.println(l.getCurrentlyBestEvaluatedAxioms(5)); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointDataPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointDataPropertyAxiomLearner.java 2012-05-07 14:28:49 UTC (rev 3696) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointDataPropertyAxiomLearner.java 2012-05-08 11:53:28 UTC (rev 3697) @@ -255,8 +255,8 @@ public static void main(String[] args) throws Exception{ DisjointDataPropertyAxiomLearner l = new DisjointDataPropertyAxiomLearner(new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveAKSW())); - l.setPropertyToDescribe(new DatatypeProperty("http://dbpedia.org/ontology/position")); - l.setMaxExecutionTimeInSeconds(20); + l.setPropertyToDescribe(new DatatypeProperty("http://dbpedia.org/ontology/accessDate")); + l.setMaxExecutionTimeInSeconds(10); l.init(); l.getReasoner().precomputeDataPropertyPopularity(); l.start(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java 2012-05-07 14:28:49 UTC (rev 3696) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java 2012-05-08 11:53:28 UTC (rev 3697) @@ -19,7 +19,6 @@ package org.dllearner.algorithms.properties; -import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -29,11 +28,15 @@ import java.util.SortedSet; import java.util.TreeSet; +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.Level; +import org.apache.log4j.SimpleLayout; import org.dllearner.core.AbstractAxiomLearningAlgorithm; import org.dllearner.core.ComponentAnn; import org.dllearner.core.EvaluatedAxiom; import org.dllearner.core.config.ConfigOption; import org.dllearner.core.config.ObjectPropertyEditor; +import org.dllearner.core.owl.DatatypePropertyDomainAxiom; import org.dllearner.core.owl.Description; import org.dllearner.core.owl.Individual; import org.dllearner.core.owl.NamedClass; @@ -41,11 +44,13 @@ import org.dllearner.core.owl.ObjectPropertyDomainAxiom; import org.dllearner.core.owl.Thing; import org.dllearner.kb.SparqlEndpointKS; +import org.dllearner.kb.sparql.ExtractionDBCache; import org.dllearner.kb.sparql.SparqlEndpoint; import org.dllearner.reasoning.SPARQLReasoner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.hp.hpl.jena.query.ParameterizedSparqlString; import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; @@ -54,11 +59,15 @@ private static final Logger logger = LoggerFactory.getLogger(ObjectPropertyDomainAxiomLearner.class); + private Map<Individual, SortedSet<Description>> individual2Types; + + @ConfigOption(name="propertyToDescribe", description="", propertyEditorClass=ObjectPropertyEditor.class) private ObjectProperty propertyToDescribe; public ObjectPropertyDomainAxiomLearner(SparqlEndpointKS ks){ this.ks = ks; + super.iterativeQueryTemplate = new ParameterizedSparqlString("SELECT DISTINCT ?ind ?type WHERE {?ind ?p ?o. ?ind a ?type.}"); } public ObjectProperty getPropertyToDescribe() { @@ -71,12 +80,13 @@ @Override public void start() { + iterativeQueryTemplate.setIri("p", propertyToDescribe.getName()); logger.info("Start learning..."); startTime = System.currentTimeMillis(); fetchedRows = 0; currentlyBestAxioms = new ArrayList<EvaluatedAxiom>(); - if(reasoner.isPrepared()){ + if(returnOnlyNewAxioms){ //get existing domains Description existingDomain = reasoner.getDomain(propertyToDescribe); if(existingDomain != null){ @@ -93,20 +103,49 @@ } } - //get subjects with types - Map<Individual, SortedSet<Description>> individual2Types = new HashMap<Individual, SortedSet<Description>>(); - boolean repeat = true; - int limit = 1000; - while(!terminationCriteriaSatisfied() && repeat){ - int ret = addIndividualsWithTypes(individual2Types, limit, fetchedRows); - currentlyBestAxioms = buildEvaluatedAxioms(individual2Types); - fetchedRows += 1000; - repeat = (ret == limit); - } + runIterativeQueryMode(); logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); } + + private void runSingleQueryMode(){ + + } + + private void runIterativeQueryMode(){ + individual2Types = new HashMap<Individual, SortedSet<Description>>(); + while(!terminationCriteriaSatisfied() && !fullDataLoaded){ + ResultSet rs = fetchData(); + processData(rs); + buildEvaluatedAxioms(); + } + } + + private void processData(ResultSet rs){ + QuerySolution qs; + Individual ind; + Description type; + SortedSet<Description> types; + int cnt = 0; + while(rs.hasNext()){ + cnt++; + qs = rs.next(); + if(qs.get("type").isURIResource()){ + types = new TreeSet<Description>(); + ind = new Individual(qs.getResource("ind").getURI()); + type = new NamedClass(qs.getResource("type").getURI()); + types.add(type); + if(reasoner.isPrepared()){ + if(reasoner.getClassHierarchy().contains(type)){ + types.addAll(reasoner.getClassHierarchy().getSuperClasses(type)); + } + } + addToMap(individual2Types, ind, types); + } + } + lastRowCount = cnt; + } - private List<EvaluatedAxiom> buildEvaluatedAxioms(Map<Individual, SortedSet<Description>> individual2Types){ + private void buildEvaluatedAxioms(){ List<EvaluatedAxiom> axioms = new ArrayList<EvaluatedAxiom>(); Map<Description, Integer> result = new HashMap<Description, Integer>(); for(Entry<Individual, SortedSet<Description>> entry : individual2Types.entrySet()){ @@ -135,57 +174,25 @@ axioms.add(evalAxiom); } - return axioms; + currentlyBestAxioms = axioms; } - private int addIndividualsWithTypes(Map<Individual, SortedSet<Description>> ind2Types, int limit, int offset){ - String query = String.format("PREFIX owl: <http://www.w3.org/2002/07/owl#> SELECT DISTINCT ?ind ?type WHERE {?ind <%s> ?o. ?ind a ?type. ?type a owl:Class} LIMIT %d OFFSET %d", propertyToDescribe.getName(), limit, offset); + public static void main(String[] args) throws Exception{ + org.apache.log4j.Logger.getRootLogger().addAppender(new ConsoleAppender(new SimpleLayout())); + org.apache.log4j.Logger.getRootLogger().setLevel(Level.INFO); + org.apache.log4j.Logger.getLogger(DataPropertyDomainAxiomLearner.class).setLevel(Level.INFO); -// String query = String.format("SELECT DISTINCT ?ind ?type WHERE {?ind a ?type. {SELECT ?ind {?ind <%s> ?o.} LIMIT %d OFFSET %d}}", propertyToDescribe.getName(), limit, offset); + SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveAKSW()); - ResultSet rs = executeSelectQuery(query); - Individual ind; - Description newType; - QuerySolution qs; - SortedSet<Description> types; - int cnt = 0; - while(rs.hasNext()){ - cnt++; - qs = rs.next(); - if(qs.get("type").isURIResource()){ - ind = new Individual(qs.getResource("ind").getURI()); - newType = new NamedClass(qs.getResource("type").getURI()); - types = ind2Types.get(ind); - if(types == null){ - types = new TreeSet<Description>(); - ind2Types.put(ind, types); - } - types.add(newType); - Set<Description> superClasses; - if(reasoner.isPrepared()){ - if(reasoner.getClassHierarchy().contains(newType)){ - superClasses = reasoner.getClassHierarchy().getSuperClasses(newType); - types.addAll(superClasses); - } - - } - } - - } - return cnt; - } - - public static void main(String[] args) throws Exception{ - SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpedia()); - SPARQLReasoner reasoner = new SPARQLReasoner(ks); reasoner.prepareSubsumptionHierarchy(); ObjectPropertyDomainAxiomLearner l = new ObjectPropertyDomainAxiomLearner(ks); l.setReasoner(reasoner); - l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/Automobile/fuelCapacity")); + l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/currency")); l.setMaxExecutionTimeInSeconds(10); + l.addFilterNamespace("http://dbpedia.org/ontology/"); // l.setReturnOnlyNewAxioms(true); l.init(); l.start(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyRangeAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyRangeAxiomLearner.java 2012-05-07 14:28:49 UTC (rev 3696) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyRangeAxiomLearner.java 2012-05-08 11:53:28 UTC (rev 3697) @@ -24,7 +24,6 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; @@ -52,6 +51,7 @@ public class ObjectPropertyRangeAxiomLearner extends AbstractAxiomLearningAlgorithm { private static final Logger logger = LoggerFactory.getLogger(ObjectPropertyRangeAxiomLearner.class); + private Map<Individual, SortedSet<Description>> individual2Types; @ConfigOption(name="propertyToDescribe", description="", propertyEditorClass=ObjectPropertyEditor.class) private ObjectProperty propertyToDescribe; @@ -70,12 +70,13 @@ @Override public void start() { + iterativeQueryTemplate.setIri("p", propertyToDescribe.getName()); logger.info("Start learning..."); startTime = System.currentTimeMillis(); fetchedRows = 0; currentlyBestAxioms = new ArrayList<EvaluatedAxiom>(); - if(reasoner.isPrepared()){ + if(returnOnlyNewAxioms){ //get existing ranges Description existingRange = reasoner.getRange(propertyToDescribe); if(existingRange != null){ @@ -92,20 +93,49 @@ } } - //get objects with types - Map<Individual, SortedSet<Description>> individual2Types = new HashMap<Individual, SortedSet<Description>>(); - boolean repeat = true; - int limit = 1000; - while(!terminationCriteriaSatisfied() && repeat){ - int ret = addIndividualsWithTypes(individual2Types, limit, fetchedRows); - currentlyBestAxioms = buildEvaluatedAxioms(individual2Types); - fetchedRows += 1000; - repeat = (ret == limit); - } + runIterativeQueryMode(); logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); } - private List<EvaluatedAxiom> buildEvaluatedAxioms(Map<Individual, SortedSet<Description>> individual2Types){ + private void runSingleQueryMode(){ + + } + + private void runIterativeQueryMode(){ + individual2Types = new HashMap<Individual, SortedSet<Description>>(); + while(!terminationCriteriaSatisfied() && !fullDataLoaded){ + ResultSet rs = fetchData(); + processData(rs); + buildEvaluatedAxioms(); + } + } + + private void processData(ResultSet rs){ + QuerySolution qs; + Individual ind; + Description type; + SortedSet<Description> types; + int cnt = 0; + while(rs.hasNext()){ + cnt++; + qs = rs.next(); + if(qs.get("type").isURIResource()){ + types = new TreeSet<Description>(); + ind = new Individual(qs.getResource("ind").getURI()); + type = new NamedClass(qs.getResource("type").getURI()); + types.add(type); + if(reasoner.isPrepared()){ + if(reasoner.getClassHierarchy().contains(type)){ + types.addAll(reasoner.getClassHierarchy().getSuperClasses(type)); + } + } + addToMap(individual2Types, ind, types); + } + } + lastRowCount = cnt; + } + + private void buildEvaluatedAxioms(){ List<EvaluatedAxiom> axioms = new ArrayList<EvaluatedAxiom>(); Map<Description, Integer> result = new HashMap<Description, Integer>(); for(Entry<Individual, SortedSet<Description>> entry : individual2Types.entrySet()){ @@ -134,52 +164,18 @@ axioms.add(evalAxiom); } - return axioms; + currentlyBestAxioms = axioms; } - private int addIndividualsWithTypes(Map<Individual, SortedSet<Description>> ind2Types, int limit, int offset){ - String query = String.format("PREFIX owl: <http://www.w3.org/2002/07/owl#> SELECT DISTINCT ?ind ?type WHERE {?s <%s> ?ind. ?ind a ?type. ?type a owl:Class} LIMIT %d OFFSET %d", propertyToDescribe.getName(), limit, offset); - -// String query = String.format("SELECT DISTINCT ?ind ?type WHERE {?ind a ?type. {SELECT ?ind {?ind <%s> ?o.} LIMIT %d OFFSET %d}}", propertyToDescribe.getName(), limit, offset); - - ResultSet rs = executeSelectQuery(query); - Individual ind; - Description newType; - QuerySolution qs; - SortedSet<Description> types; - int cnt = 0; - while(rs.hasNext()){ - cnt++; - qs = rs.next(); - ind = new Individual(qs.getResource("ind").getURI()); - newType = new NamedClass(qs.getResource("type").getURI()); - types = ind2Types.get(ind); - if(types == null){ - types = new TreeSet<Description>(); - ind2Types.put(ind, types); - } - types.add(newType); - Set<Description> superClasses; - if(reasoner.isPrepared()){ - if(reasoner.getClassHierarchy().contains(newType)){ - superClasses = reasoner.getClassHierarchy().getSuperClasses(newType); - types.addAll(superClasses); - } - - } - } - return cnt; - } - public static void main(String[] args) throws Exception{ - SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveAKSW()); + SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpedia()); SPARQLReasoner reasoner = new SPARQLReasoner(ks); reasoner.prepareSubsumptionHierarchy(); ObjectPropertyRangeAxiomLearner l = new ObjectPropertyRangeAxiomLearner(ks); l.setReasoner(reasoner); - l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/routeTypeAbbreviation")); + l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/ideology")); l.setMaxExecutionTimeInSeconds(10); // l.setReturnOnlyNewAxioms(true); l.init(); Modified: trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2012-05-07 14:28:49 UTC (rev 3696) +++ trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2012-05-08 11:53:28 UTC (rev 3697) @@ -21,6 +21,7 @@ import java.net.SocketTimeoutException; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; @@ -49,7 +50,10 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import com.hp.hpl.jena.graph.Node; import com.hp.hpl.jena.ontology.OntClass; +import com.hp.hpl.jena.query.ParameterizedSparqlString; +import com.hp.hpl.jena.query.Query; import com.hp.hpl.jena.query.QueryExecution; import com.hp.hpl.jena.query.QueryExecutionFactory; import com.hp.hpl.jena.query.ResultSet; @@ -57,7 +61,15 @@ import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; import com.hp.hpl.jena.sparql.engine.http.QueryExceptionHTTP; +import com.hp.hpl.jena.sparql.expr.E_Equals; +import com.hp.hpl.jena.sparql.expr.E_Regex; +import com.hp.hpl.jena.sparql.expr.E_Str; +import com.hp.hpl.jena.sparql.expr.ExprVar; +import com.hp.hpl.jena.sparql.expr.NodeValue; import com.hp.hpl.jena.sparql.resultset.ResultSetMem; +import com.hp.hpl.jena.sparql.syntax.ElementFilter; +import com.hp.hpl.jena.sparql.syntax.ElementGroup; +import com.hp.hpl.jena.sparql.util.NodeFactory; import com.hp.hpl.jena.util.iterator.Filter; import com.hp.hpl.jena.vocabulary.OWL2; import com.hp.hpl.jena.vocabulary.RDF; @@ -93,6 +105,17 @@ protected boolean forceSPARQL_1_0_Mode = false; + protected int chunkCount = 0; + protected int chunkSize = 1000; + protected int offset = 0; + protected int lastRowCount = 0; + + protected boolean fullDataLoaded = false; + + private List<String> filterNamespaces = new ArrayList<String>(); + + protected ParameterizedSparqlString iterativeQueryTemplate; + public AbstractAxiomLearningAlgorithm() { existingAxioms = new TreeSet<Axiom>(new AxiomComparator()); } @@ -259,7 +282,7 @@ } } - protected ResultSet executeSelectQuery(String query) { + protected ResultSet executeSelectQuery(String query) {System.out.println(query); logger.debug("Sending query\n{} ...", query); if(ks.isRemote()){ SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); @@ -274,7 +297,12 @@ return rs; } catch (QueryExceptionHTTP e) { if(e.getCause() instanceof SocketTimeoutException){ - logger.warn("Got timeout"); + if(timeout){ + logger.warn("Got timeout"); + } else { + logger.trace("Got local timeout"); + } + } else { logger.error("Exception executing query", e); } @@ -381,6 +409,87 @@ return 2 * precision * recall / (precision + recall); } + protected ResultSet fetchData(){ + setChunkConditions(); + if(!fullDataLoaded){ + Query query = buildQuery(); + offset += chunkSize; + ResultSet rs = executeSelectQuery(query.toString()); + chunkCount++; + return rs; + } + return new ResultSetMem(); + } + + private void setChunkConditions() { + // adapt chunk size if needed + if (chunkCount == 1 && lastRowCount < chunkSize) { + logger.info("Adapting chunk size from " + chunkSize + " to " + lastRowCount); + chunkSize = lastRowCount; + offset = lastRowCount; + } + + // check if full data was loaded + if(chunkCount != 0){ + fullDataLoaded = (lastRowCount == 0) || (lastRowCount < chunkSize); + if (fullDataLoaded) { + logger.info("Loaded whole data. Early termination."); + } + } + } + + private Query buildQuery(){ + Query query = iterativeQueryTemplate.asQuery(); + for(String ns : filterNamespaces){ + ((ElementGroup)query.getQueryPattern()).addElementFilter( + new ElementFilter( + new E_Regex( + new E_Str(new ExprVar(Node.createVariable("type"))), + ns, ""))); + } + query.setLimit(chunkSize); + query.setOffset(offset); + return query; + } + + public void addFilterNamespace(String namespace){ + filterNamespaces.add(namespace); + } + + protected <K,T extends Set<V>, V> void addToMap(Map<K, T> map, K key, V value ){ + T values = map.get(key); + if(values == null){ + try { + values = (T) values.getClass().newInstance(); + } catch (InstantiationException e) { + e.printStackTrace(); + } catch (IllegalAccessException e) { + e.printStackTrace(); + } + values.add(value); + } + values.add(value); + } + + protected <K,T extends Set<V>, V> void addToMap(Map<K, T> map, K key, Collection<V> newValues ){ + T values = map.get(key); + if(values == null){ + try { + values = (T) newValues.getClass().newInstance(); + } catch (InstantiationException e) { + e.printStackTrace(); + } catch (IllegalAccessException e) { + e.printStackTrace(); + } + map.put(key, values); + } + values.addAll(newValues); + } + + private void adaptChunkCount(){ + + } + class OWLFilter extends Filter<OntClass>{ @Override Modified: trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-05-07 14:28:49 UTC (rev 3696) +++ trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-05-08 11:53:28 UTC (rev 3697) @@ -219,6 +219,8 @@ private OWLReasoner reasoner; private OWLDataFactory factory = new OWLDataFactoryImpl(); + private static final String NAMESPACE = "http://dbpedia.org/ontology"; + private SPARQLReasoner sparqlReasoner; public EnrichmentEvaluation(SparqlEndpoint endpoint) { @@ -370,6 +372,7 @@ AxiomLearningAlgorithm learner = algorithmClass.getConstructor( SparqlEndpointKS.class).newInstance(ks); ((AbstractAxiomLearningAlgorithm)learner).setReasoner(sparqlReasoner); + ((AbstractAxiomLearningAlgorithm)learner).addFilterNamespace(NAMESPACE); ConfigHelper.configure(learner, "propertyToDescribe", property.toString()); ConfigHelper.configure(learner, "maxExecutionTimeInSeconds", maxExecutionTimeInSeconds); @@ -459,6 +462,7 @@ AxiomLearningAlgorithm learner = algorithmClass.getConstructor( SparqlEndpointKS.class).newInstance(ks); ((AbstractAxiomLearningAlgorithm)learner).setReasoner(sparqlReasoner); + ((AbstractAxiomLearningAlgorithm)learner).addFilterNamespace(NAMESPACE); ConfigHelper.configure(learner, "propertyToDescribe", property.toString()); ConfigHelper.configure(learner, "maxExecutionTimeInSeconds", maxExecutionTimeInSeconds); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2012-05-07 14:29:00
|
Revision: 3696 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3696&view=rev Author: jenslehmann Date: 2012-05-07 14:28:49 +0000 (Mon, 07 May 2012) Log Message: ----------- allow list of components to be set externally; marked some methods as deprecated Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java Modified: trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java 2012-05-07 14:12:56 UTC (rev 3695) +++ trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java 2012-05-07 14:28:49 UTC (rev 3696) @@ -117,6 +117,15 @@ } /** + * Explicitly sets the list of components to use. This will (re-)initialise the + * component manager the next time the singleton instance is retrieved. + */ + public static void setComponentClassNames(List<String> componentClassNames) { + AnnComponentManager.componentClassNames = componentClassNames; + cm = null; + } + + /** * Gets the singleton instance of <code>ComponentManager</code>. * @return The singleton <code>ComponentManager</code> instance. */ @@ -175,13 +184,10 @@ */ public BidiMap<Class<? extends Component>, String> getComponentsNamedShort() { return componentNamesShort; - } - - public boolean isCompatible() { - return false; } // gets all components which this component can be plugged into + @Deprecated public Collection<Class<? extends Component>> getPluggableComponents(Class<? extends Component> component) { Collection<Class<? extends Component>> pluggableComponents = new LinkedList<Class<? extends Component>>(); for(Class<? extends Component> comp : components) { @@ -193,7 +199,8 @@ } // should return true if there exists a constructor in "compound" which can take - // "component" as argument (in any argument positions) + // "component" as argument (in any argument positions) + @Deprecated public boolean isPluggable(Class<? extends Component> compound, Class<? extends Component> argument) { try { Constructor<?>[] constructors = compound.getDeclaredConstructors(); @@ -211,6 +218,7 @@ return false; } + @Deprecated public boolean isCompatible(Class<? extends Component> compound, Class<? extends Component>... arguments) { if(areValidComponentConstructorArguments(arguments)) { throw new Error("Please order arguments by their class names."); @@ -218,6 +226,7 @@ return hasMatchingConstructor(compound, arguments); } + @Deprecated private boolean hasMatchingConstructor(Class<? extends Component> compound, Class<? extends Component>... arguments) { try { Constructor<?>[] constructors = compound.getDeclaredConstructors(); @@ -251,6 +260,7 @@ * @param arguments Argument classes. * @return True of the order of arguments is correct and false otherwise. */ + @Deprecated public boolean areValidComponentConstructorArguments(Class<? extends Component>... arguments) { for(int i=0; i<arguments.length; i++) { if(arguments[i].getName().compareTo(arguments[i+1].getName())<0) { @@ -260,8 +270,13 @@ return true; } - // method lists all core interfaces implemented by a component (directly or indirectly) - // TODO: incomplete + /** + * Convenience method to retrieve core types of a component. The main use case for this + * is for automatic documentation generation. + * + * @param component A component. + * @return The list of core interfaces the component implemnets. + */ public static List<Class<? extends Component>> getCoreComponentTypes(Class<? extends Component> component) { List<Class<? extends Component>> types = new LinkedList<Class<? extends Component>>(); if(KnowledgeSource.class.isAssignableFrom(component)) { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <sha...@us...> - 2012-05-07 14:13:07
|
Revision: 3695 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3695&view=rev Author: shadowtm Date: 2012-05-07 14:12:56 +0000 (Mon, 07 May 2012) Log Message: ----------- Cleaned up some of the reasoning code in the OWLAPIReasoner so that it depends on one type of KnowledgeSource (OWLOntologyKnowledgeSource) rather than the generic KnowlegeSource. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/OWLOntologyKnowledgeSource.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/JenaToOwlapiConverter.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java Modified: trunk/components-core/src/main/java/org/dllearner/kb/OWLOntologyKnowledgeSource.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/OWLOntologyKnowledgeSource.java 2012-05-07 11:56:11 UTC (rev 3694) +++ trunk/components-core/src/main/java/org/dllearner/kb/OWLOntologyKnowledgeSource.java 2012-05-07 14:12:56 UTC (rev 3695) @@ -1,5 +1,6 @@ package org.dllearner.kb; +import org.dllearner.core.KnowledgeSource; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyManager; @@ -11,7 +12,7 @@ * * This interface represents objects which can return an OWLOntology representation of itself. */ -public interface OWLOntologyKnowledgeSource { +public interface OWLOntologyKnowledgeSource extends KnowledgeSource{ /** * Create an OWL Ontology associated with the specified manager. Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/JenaToOwlapiConverter.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/JenaToOwlapiConverter.java 2012-05-07 11:56:11 UTC (rev 3694) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/JenaToOwlapiConverter.java 2012-05-07 14:12:56 UTC (rev 3695) @@ -23,12 +23,14 @@ private static Logger log = LoggerFactory.getLogger(JenaToOwlapiConverter.class); /** - * - * @param model - * @return + * Convert a Jena Model to an OWL Ontology. + * + * @param model The model to convert + * @param manager The OWL Ontology Manager - this has to be passed in rather than using the default one in order to be thread safe. + * @return The converted OWL Ontology linked to the specified OWL Ontology Manager. */ - public OWLOntology convert(Model model) { - OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + public OWLOntology convert(Model model, OWLOntologyManager manager) { + OWLOntology ontology=null; try { StringWriter writer = new StringWriter(); Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java 2012-05-07 11:56:11 UTC (rev 3694) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java 2012-05-07 14:12:56 UTC (rev 3695) @@ -11,9 +11,11 @@ import org.dllearner.core.ComponentInitException; import org.dllearner.core.KnowledgeSource; import org.dllearner.core.config.ConfigOption; +import org.dllearner.kb.OWLOntologyKnowledgeSource; import org.dllearner.utilities.JamonMonitorLogger; import org.dllearner.utilities.analyse.TypeOntology; import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -30,7 +32,7 @@ import com.jamonapi.MonitorFactory; @ComponentAnn(name = "efficient SPARQL fragment extractor", shortName = "sparqls", version = 0.1) -public class SparqlSimpleExtractor implements KnowledgeSource { +public class SparqlSimpleExtractor implements KnowledgeSource, OWLOntologyKnowledgeSource { @ConfigOption(name = "endpointURL", description = "URL of the SPARQL endpoint", required = true) private String endpointURL = null; @@ -52,7 +54,6 @@ @ConfigOption(name = "ontologySchemaUrls", description = "List of Ontology Schema URLs", required = true) private List<String> ontologySchemaUrls = null; - private OWLOntology owlOntology; private SchemaIndexer indexer; private static Logger log = LoggerFactory.getLogger(SparqlSimpleExtractor.class); @@ -214,8 +215,7 @@ log.debug("{}", model); } } - JenaToOwlapiConverter converter = new JenaToOwlapiConverter(); - owlOntology = converter.convert(this.model); + monIndexing.stop(); monComp.stop(); log.info("*******Simple SPARQL Extractor********"); @@ -286,13 +286,6 @@ this.recursionDepth = recursionDepth; } - /** - * @return - */ - public OWLOntology getOWLOntology() { - return owlOntology; - } - public List<String> getOntologySchemaUrls() { return ontologySchemaUrls; } @@ -308,4 +301,10 @@ public void setTboxfilter(String tboxfilter) { this.tboxfilter = tboxfilter; } + + @Override + public OWLOntology createOWLOntology(OWLOntologyManager manager) { + JenaToOwlapiConverter converter = new JenaToOwlapiConverter(); + return converter.convert(this.model,manager); + } } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java 2012-05-07 11:56:11 UTC (rev 3694) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java 2012-05-07 14:12:56 UTC (rev 3695) @@ -227,71 +227,45 @@ if (source instanceof OWLOntologyKnowledgeSource) { ontology = ((OWLOntologyKnowledgeSource) source).createOWLOntology(manager); owlAPIOntologies.add(ontology); - } else if(source instanceof SparqlSimpleExtractor) { - ontology=((SparqlSimpleExtractor) source).getOWLOntology(); - manager=ontology.getOWLOntologyManager(); - owlAPIOntologies.add(ontology); + }else{ + //This reasoner requires an ontology to process + throw new ComponentInitException("OWL API Reasoner Requires an OWLKnowledgeSource. Received a KS of type: " + source.getClass().getName()); } - if (source instanceof OWLFile || source instanceof SparqlKnowledgeSource || source instanceof SparqlSimpleExtractor || source instanceof OWLAPIOntology) { + directImports.addAll(ontology.getImportsDeclarations()); - directImports.addAll(ontology.getImportsDeclarations()); - try { - // imports includes the ontology itself - //FIXME this line throws the strange error - Set<OWLOntology> imports = manager.getImportsClosure(ontology); - allImports.addAll(imports); + try { + // imports includes the ontology itself + //FIXME this line throws the strange error + Set<OWLOntology> imports = manager.getImportsClosure(ontology); + allImports.addAll(imports); // System.out.println(imports); - for (OWLOntology ont : imports) { - classes.addAll(ont.getClassesInSignature()); - owlObjectProperties.addAll(ont.getObjectPropertiesInSignature()); - owlDatatypeProperties.addAll(ont.getDataPropertiesInSignature()); - owlIndividuals.addAll(ont.getIndividualsInSignature()); - } - - } catch (UnknownOWLOntologyException uooe) { - logger.error("UnknownOWLOntologyException occured, imports were not loaded! This is a bug, which has not been fixed yet."); + for (OWLOntology ont : imports) { + classes.addAll(ont.getClassesInSignature()); + owlObjectProperties.addAll(ont.getObjectPropertiesInSignature()); + owlDatatypeProperties.addAll(ont.getDataPropertiesInSignature()); + owlIndividuals.addAll(ont.getIndividualsInSignature()); } - // if several knowledge sources are included, then we can only - // guarantee that the base URI is from one of those sources (there - // can't be more than one); but we will take care that all prefixes are - // correctly imported - OWLOntologyFormat format = manager.getOntologyFormat(ontology); - if (format instanceof PrefixOWLOntologyFormat) { - prefixes.putAll(((PrefixOWLOntologyFormat) format).getPrefixName2PrefixMap()); - baseURI = ((PrefixOWLOntologyFormat) format).getDefaultPrefix(); - prefixes.remove(""); - } + } catch (UnknownOWLOntologyException uooe) { + logger.error("UnknownOWLOntologyException occured, imports were not loaded! This is a bug, which has not been fixed yet."); + } - // all other sources are converted to KB and then to an - // OWL API ontology - } else { - - //KB Files - KB kb = ((AbstractKnowledgeSource)source).toKB(); - - if (!(source instanceof OWLOntologyKnowledgeSource)) { - //Not sure if this will ever get hit, but leaving in for backward compatibility. - IRI ontologyURI = IRI.create("http://example.com"); - ontology = null; - try { - ontology = manager.createOntology(ontologyURI); - } catch (OWLOntologyCreationException e) { - throw new RuntimeException(e); - } - OWLAPIAxiomConvertVisitor.fillOWLOntology(manager, ontology, kb); - owlAPIOntologies.add(ontology); - } - - allImports.add(ontology); - atomicConcepts.addAll(kb.findAllAtomicConcepts()); - atomicRoles.addAll(kb.findAllAtomicRoles()); - individuals.addAll(kb.findAllIndividuals()); - // TODO: add method to find datatypes + // if several knowledge sources are included, then we can only + // guarantee that the base URI is from one of those sources (there + // can't be more than one); but we will take care that all prefixes are + // correctly imported + OWLOntologyFormat format = manager.getOntologyFormat(ontology); + if (format instanceof PrefixOWLOntologyFormat) { + prefixes.putAll(((PrefixOWLOntologyFormat) format).getPrefixName2PrefixMap()); + baseURI = ((PrefixOWLOntologyFormat) format).getDefaultPrefix(); + prefixes.remove(""); } + } + + //Now merge all of the knowledge sources into one ontology instance. try { //The following line illustrates a problem with using different OWLOntologyManagers. This can manifest itself if we have multiple sources who were created with different manager instances. //ontology = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://dl-learner/all"), new HashSet<OWLOntology>(owlAPIOntologies)); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <chr...@us...> - 2012-05-07 13:15:30
|
Revision: 3694 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3694&view=rev Author: christinaunger Date: 2012-05-07 11:56:11 +0000 (Mon, 07 May 2012) Log Message: ----------- [tbsl] repaired some stuff causing parse failures Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/converter/DRS2BasicSPARQL_Converter.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/ltag/parser/GrammarFilter.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/ltag/parser/Preprocessor.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_Parser.jj trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicSlotBuilder.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicTemplator.java trunk/components-ext/src/main/resources/tbsl/lexicon/basic_english.lex trunk/components-ext/src/test/java/org/dllearner/algorithm/tbsl/GoldTagger.java Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/converter/DRS2BasicSPARQL_Converter.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/converter/DRS2BasicSPARQL_Converter.java 2012-05-06 21:35:52 UTC (rev 3693) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/converter/DRS2BasicSPARQL_Converter.java 2012-05-07 11:56:11 UTC (rev 3694) @@ -1,9 +1,6 @@ package org.dllearner.algorithm.tbsl.converter; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; +import java.util.*; import org.dllearner.algorithm.tbsl.sem.drs.Complex_DRS_Condition; import org.dllearner.algorithm.tbsl.sem.drs.DRS; @@ -241,6 +238,35 @@ p.setTarget(simple.getArguments().get(1).getValue()); temp.addConditions(p); } + else if (simple.getArguments().size() == 3) { + Path p = new Path(); + p.setStart(simple.getArguments().get(0).getValue()); + p.setVia(predicate); + String newword = null; + Slot del = null; + for (Slot s : slots) { + if (s.getAnchor().equals(simple.getArguments().get(1).getValue())) { + newword = s.getWords().get(0); + del = s; + break; + } + } + if (newword != null) { + for (Slot s : slots) { + if (s.getAnchor().equals(predicate)) { + boolean date = false; + if (s.getWords().get(0).endsWith(" date")) date = true; + newword = s.getWords().get(0).replace(" date","") + " " + newword; + if (date) newword += " date"; + s.setWords(Arrays.asList(newword)); + break; + } + } + if (del != null) slots.remove(del); + } + p.setTarget(simple.getArguments().get(2).getValue()); + temp.addConditions(p); + } } else if (predicate.equals("count")) { if (simple.getArguments().size() == 1) { Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/ltag/parser/GrammarFilter.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/ltag/parser/GrammarFilter.java 2012-05-06 21:35:52 UTC (rev 3693) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/ltag/parser/GrammarFilter.java 2012-05-07 11:56:11 UTC (rev 3694) @@ -151,11 +151,11 @@ */ String[] tokenParts = token.split(" "); if (tokenParts.length > 2) { - + for (String anchor : grammar.getWildCardAnchors()) { if (token.matches(anchor)) { - + foundCandidates = true; coveredTokens.add(token); Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/ltag/parser/Preprocessor.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/ltag/parser/Preprocessor.java 2012-05-06 21:35:52 UTC (rev 3693) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/ltag/parser/Preprocessor.java 2012-05-07 11:56:11 UTC (rev 3694) @@ -195,8 +195,13 @@ } m = whenPattern.matcher(condensedstring); while (m.find()) { + if (m.group(5).equals("VPREP")) { + if (VERBOSE) logger.trace("Replacing " + m.group(1) + " by " + m.group(2)+m.group(3)+"/WHENPREP"); + condensedstring = condensedstring.replaceFirst(m.group(1),m.group(2) + m.group(3)+"/WHENPREP"); + } else { if (VERBOSE) logger.trace("Replacing " + m.group(1) + " by " + m.group(2)+m.group(3)+"/WHEN"); condensedstring = condensedstring.replaceFirst(m.group(1),m.group(2) + m.group(3)+"/WHEN"); + } } m = wherePattern.matcher(condensedstring); while (m.find()) { @@ -210,13 +215,13 @@ } m = adjnounPattern.matcher(condensedstring); while (m.find()) { - if (VERBOSE) logger.trace("Replacing " + m.group(1) + " by " + m.group(2)+"_"+m.group(3)+"/JJNN"); - condensedstring = condensedstring.replaceFirst(m.group(1),m.group(2)+"_"+m.group(3)+"/JJNN"); + if (VERBOSE) logger.trace("Replacing " + m.group(1) + " by " + m.group(2)+"_"+m.group(3)+"/NN"); + condensedstring = condensedstring.replaceFirst(m.group(1),m.group(2)+"_"+m.group(3)+"/NN"); } m = adjnprepPattern.matcher(condensedstring); while (m.find()) { - if (VERBOSE) logger.trace("Replacing " + m.group(1) + " by " + m.group(2)+"_"+m.group(3)+"/JJNPREP"); - condensedstring = condensedstring.replaceFirst(m.group(1),m.group(2)+"_"+m.group(3)+"/JJNPREP"); + if (VERBOSE) logger.trace("Replacing " + m.group(1) + " by " + m.group(2)+"_"+m.group(3)+"/NPREP"); + condensedstring = condensedstring.replaceFirst(m.group(1),m.group(2)+"_"+m.group(3)+"/NPREP"); } return condensedstring; Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_Parser.jj =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_Parser.jj 2012-05-06 21:35:52 UTC (rev 3693) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/sem/dudes/reader/DUDE_Parser.jj 2012-05-07 11:56:11 UTC (rev 3694) @@ -52,44 +52,44 @@ { DUDE() <EOF> } - -/** DUDE */ -Dude DUDE() : -{ - Token referent; - Token label; - Type type; - List<DRS> drs_list = null; - List<DominanceConstraint> constraints = null; + +/** DUDE */ +Dude DUDE() : +{ + Token referent; + Token label; + Type type; + List<DRS> drs_list = null; + List<DominanceConstraint> constraints = null; List<Argument> arg_list = null; - List<Slot> slots = null; -} -{ + List<Slot> slots = null; +} +{ "<" referent = dr() "," label=<LABEL> "," type=Type() "," "[" (drs_list=DRS_List())? "]" "," "[" (arg_list = Arg_List())? "]" - "," "[" (constraints = DC_List())? "]" "," "[" (slots = Slot_List())? "]" ">" - { - Dude dude = new Dude(); - dude.setReferent(referent.toString()); - dude.setType(type); - if (drs_list != null) dude.setComponents(drs_list); - dude.setLabel(new Label(label.toString())); - if (arg_list != null) dude.setArguments(arg_list); + "," "[" (constraints = DC_List())? "]" "," "[" (slots = Slot_List())? "]" ">" + { + Dude dude = new Dude(); + dude.setReferent(referent.toString()); + dude.setType(type); + if (drs_list != null) dude.setComponents(drs_list); + dude.setLabel(new Label(label.toString())); + if (arg_list != null) dude.setArguments(arg_list); if (constraints != null) dude.setDominanceConstraints(constraints); - if (slots != null) dude.setSlots(slots); - return dude; - } -} - -DominanceConstraint DominanceConstraint() : -{ - Label label1; + if (slots != null) dude.setSlots(slots); + return dude; + } +} + +DominanceConstraint DominanceConstraint() : +{ + Label label1; Label label2; Token domType; - DominanceConstraint dc = null; -} + DominanceConstraint dc = null; +} { label1 = Label() "<" label2 = Label() - { + { dc = new DominanceConstraint(label1,label2); return dc; } @@ -105,21 +105,21 @@ | label1 = Label() "=" label2 = Label() - { + { dc = new DominanceConstraint(label1,label2); dc.setType(DomType.equal); return dc; - } -} - - -Label Label() : -{ - Token label; -} + } +} + + +Label Label() : { - - label = <LABEL> + Token label; +} +{ + + label = <LABEL> { return new Label(label.toString()); } @@ -127,219 +127,219 @@ | "res(" label = <LABEL> ")" - { + { return new Label(label.toString(),Position.res); } | "scope(" label = <LABEL> ")" - { + { return new Label(label.toString(),Position.scope); } -} - - -List<DominanceConstraint> DC_List() : -{ -List<DominanceConstraint> dc_list = null; -DominanceConstraint dc = null; -} -{ - dc = DominanceConstraint() ("," dc_list = DC_List())? - { - if (dc_list == null) - { - dc_list = new ArrayList<DominanceConstraint>(); - } - - dc_list.add(0,dc); - return dc_list; - } - - } - -List<DRS> DRS_List() : -{ - DRS drs; - List<DRS> drs_list = null; -} -{ - drs = DRS() ("," drs_list = DRS_List())? - { - if (drs_list == null) - { - drs_list = new ArrayList<DRS>(); - } - - drs_list.add(0,drs); - return drs_list; - } -} - -List<Argument> Arg_List() : -{ - Argument argument; - List<Argument> arg_list = null; -} -{ - argument = Argument() ("," arg_list = Arg_List())? - { - if (arg_list == null) - { - arg_list = new ArrayList<Argument>(); - } - - arg_list.add(0,argument); - return arg_list; - } - -} - -Argument Argument() : -{ - Token label; - Token word; - Token referent; - Type type; -} -{ - "(" label=<LABEL> "," referent = dr() "," word=<WORD> "," type = Type() ")" - { - Argument argument = new Argument(); - argument.setLabel(new Label(label.toString())); - argument.setReferent(referent.toString()); - argument.setAnchor(word.toString()); - argument.setType(type); - return argument; - } -} - -Type Type() : -{ - Type argument; - Type result; - Token word; -} -{ - "<" argument = Type() "," result = Type() ">" - { - CompositeType type = new CompositeType(); - type.setArgumentType(argument); - type.setResultType(result); - return type; - } - - | - - word = <WORD> - { - ElementaryType type=null; - if (word.toString().equals("e")) - type = new ElementaryType(ElemType.e); - if (word.toString().equals("t")) - type = new ElementaryType(ElemType.t); - return type; - } -} - +List<DominanceConstraint> DC_List() : +{ +List<DominanceConstraint> dc_list = null; +DominanceConstraint dc = null; +} +{ + dc = DominanceConstraint() ("," dc_list = DC_List())? + { + if (dc_list == null) + { + dc_list = new ArrayList<DominanceConstraint>(); + } + + dc_list.add(0,dc); + return dc_list; + } + + +} + + +List<DRS> DRS_List() : +{ + DRS drs; + List<DRS> drs_list = null; +} +{ + drs = DRS() ("," drs_list = DRS_List())? + { + if (drs_list == null) + { + drs_list = new ArrayList<DRS>(); + } + + drs_list.add(0,drs); + return drs_list; + } +} + +List<Argument> Arg_List() : +{ + Argument argument; + List<Argument> arg_list = null; +} +{ + argument = Argument() ("," arg_list = Arg_List())? + { + if (arg_list == null) + { + arg_list = new ArrayList<Argument>(); + } + + arg_list.add(0,argument); + return arg_list; + } + +} + +Argument Argument() : +{ + Token label; + Token word; + Token referent; + Type type; +} +{ + "(" label=<LABEL> "," referent = dr() "," word=<WORD> "," type = Type() ")" + { + Argument argument = new Argument(); + argument.setLabel(new Label(label.toString())); + argument.setReferent(referent.toString()); + argument.setAnchor(word.toString()); + argument.setType(type); + return argument; + } +} + +Type Type() : +{ + Type argument; + Type result; + Token word; +} +{ + "<" argument = Type() "," result = Type() ">" + { + CompositeType type = new CompositeType(); + type.setArgumentType(argument); + type.setResultType(result); + return type; + } + + | + + word = <WORD> + { + ElementaryType type=null; + if (word.toString().equals("e")) + type = new ElementaryType(ElemType.e); + if (word.toString().equals("t")) + type = new ElementaryType(ElemType.t); + return type; + } +} + + /** DRS */ DRS DRS() : -{ - Set<DiscourseReferent> dr_set = null; - Set<DRS_Condition> conditions = null; - DRS drs; - Token label; - +{ + Set<DiscourseReferent> dr_set = null; + Set<DRS_Condition> conditions = null; + DRS drs; + Token label; + } -{ - label=<LABEL> ":[" (dr_set=DR_Set())? "|" (conditions=Condition_List())? "]" - { - if (dr_set == null) - { - dr_set = new HashSet<DiscourseReferent>(); - } - drs = new DRS(); - drs.setLabel(label.toString()); - drs.setDiscourseReferents(dr_set); - if (conditions != null) - drs.setDRSConditions(conditions); - return drs; +{ + label=<LABEL> ":[" (dr_set=DR_Set())? "|" (conditions=Condition_List())? "]" + { + if (dr_set == null) + { + dr_set = new HashSet<DiscourseReferent>(); + } + drs = new DRS(); + drs.setLabel(label.toString()); + drs.setDiscourseReferents(dr_set); + if (conditions != null) + drs.setDRSConditions(conditions); + return drs; } -} - +} + /** DR_Set*/ Set<DiscourseReferent> DR_Set() : -{ - Token dr; - Set<DiscourseReferent> dr_set=null; +{ + Token dr; + Set<DiscourseReferent> dr_set=null; } -{ - dr = dr() ("," dr_set=DR_Set())? - { - if (dr_set == null) - { - dr_set= new HashSet<DiscourseReferent>(); - } - if (dr.toString().startsWith("?")) - { +{ + dr = dr() ("," dr_set=DR_Set())? + { + if (dr_set == null) + { + dr_set= new HashSet<DiscourseReferent>(); + } + if (dr.toString().startsWith("?")) + { dr_set.add(new DiscourseReferent(dr.toString().substring(1),true,false)); } else if (dr.toString().startsWith("!")) { dr_set.add(new DiscourseReferent(dr.toString().substring(1),false,true)); } - else - { - dr_set.add(new DiscourseReferent(dr.toString(),false,false)); - } - return dr_set; + else + { + dr_set.add(new DiscourseReferent(dr.toString(),false,false)); + } + return dr_set; } -} - -Set<DRS_Condition> Condition_List() : -{ - DRS_Condition condition= null; - Set<DRS_Condition> conditions = null; -} -{ - condition=Condition() ("," conditions=Condition_List())? - { - if (conditions == null) - { - conditions = new HashSet<DRS_Condition>(); - } - conditions.add(condition); - return conditions; - } -} - -DRS_Condition Condition() : -{ - List<DiscourseReferent> dr_list; - Token dr1; +} + +Set<DRS_Condition> Condition_List() : +{ + DRS_Condition condition= null; + Set<DRS_Condition> conditions = null; +} +{ + condition=Condition() ("," conditions=Condition_List())? + { + if (conditions == null) + { + conditions = new HashSet<DRS_Condition>(); + } + conditions.add(condition); + return conditions; + } +} + +DRS_Condition Condition() : +{ + List<DiscourseReferent> dr_list; + Token dr1; Token dr2; - Token dr; - Token predicate; - Token quantifier; - DRS drs1; - DRS drs2; -} + Token dr; + Token predicate; + Token quantifier; + DRS drs1; + DRS drs2; +} { - predicate=<WORD> "(" dr_list=DR_List() ")" - { - Simple_DRS_Condition condition; - - condition = new Simple_DRS_Condition(); - condition.setPredicate(predicate.toString()); - condition.setArguments(dr_list); - return condition; - } - + predicate=<WORD> "(" dr_list=DR_List() ")" + { + Simple_DRS_Condition condition; + + condition = new Simple_DRS_Condition(); + condition.setPredicate(predicate.toString()); + condition.setArguments(dr_list); + return condition; + } + | dr1 = dr() "=" dr2 = dr() @@ -352,54 +352,54 @@ condition.addArgument(new DiscourseReferent(dr2.toString())); return condition; } - - | - - "NOT" drs1=DRS() - { - Negated_DRS drs = new Negated_DRS(); - drs.setDRS(drs1); - return drs; - } - - | - + + | + + "NOT" drs1=DRS() + { + Negated_DRS drs = new Negated_DRS(); + drs.setDRS(drs1); + return drs; + } + + | + drs1=DRS() (quantifier=<EVERY> | quantifier=<SOME> | quantifier=<AFEW> | quantifier=<MOST> | quantifier=<THEMOST> | quantifier=<THELEAST> | - quantifier=<HOWMANY> | quantifier=<MANY> | quantifier=<NO>) dr=dr() drs2=DRS() - { - Complex_DRS_Condition drs; - drs = new Complex_DRS_Condition(); - drs.setRestrictor(drs1); - drs.setScope(drs2); + quantifier=<HOWMANY> | quantifier=<MANY> | quantifier=<NO>) dr=dr() drs2=DRS() + { + Complex_DRS_Condition drs; + drs = new Complex_DRS_Condition(); + drs.setRestrictor(drs1); + drs.setScope(drs2); drs.setReferent(new DiscourseReferent(dr.toString())); - + if (quantifier.toString().equals("EVERY")) {drs.setQuantifier(DRS_Quantifier.EVERY);} - if (quantifier.toString().equals("SOME")) {drs.setQuantifier(DRS_Quantifier.SOME);} + if (quantifier.toString().equals("SOME")) {drs.setQuantifier(DRS_Quantifier.SOME);} if (quantifier.toString().equals("MOST")) {drs.setQuantifier(DRS_Quantifier.MOST);} if (quantifier.toString().equals("THEMOST")) {drs.setQuantifier(DRS_Quantifier.THEMOST);} - if (quantifier.toString().equals("THELEAST")) {drs.setQuantifier(DRS_Quantifier.THELEAST);} - if (quantifier.toString().equals("AFEW")) {drs.setQuantifier(DRS_Quantifier.FEW);} + if (quantifier.toString().equals("THELEAST")) {drs.setQuantifier(DRS_Quantifier.THELEAST);} + if (quantifier.toString().equals("AFEW")) {drs.setQuantifier(DRS_Quantifier.FEW);} if (quantifier.toString().equals("MANY")) {drs.setQuantifier(DRS_Quantifier.MANY);} if (quantifier.toString().equals("HOWMANY")) {drs.setQuantifier(DRS_Quantifier.HOWMANY);} - if (quantifier.toString().equals("NO")) {drs.setQuantifier(DRS_Quantifier.NO);} - - return drs; - - } -} - + if (quantifier.toString().equals("NO")) {drs.setQuantifier(DRS_Quantifier.NO);} + + return drs; + + } +} + /** DR_List*/ List<DiscourseReferent> DR_List() : -{ - Token dr; - List<DiscourseReferent> dr_list=null; +{ + Token dr; + List<DiscourseReferent> dr_list=null; } -{ - dr = dr() ("," dr_list=DR_List())? - { - if (dr_list == null) - { - dr_list= new ArrayList<DiscourseReferent>(); +{ + dr = dr() ("," dr_list=DR_List())? + { + if (dr_list == null) + { + dr_list= new ArrayList<DiscourseReferent>(); } if (dr.toString().startsWith("?")) { @@ -410,9 +410,9 @@ } else { dr_list.add(0,new DiscourseReferent(dr.toString(),false,false)); - } + } - return dr_list; + return dr_list; } } @@ -445,15 +445,15 @@ { ref = <WORD> "/" type = <WORD> "/" (words = Word_List())? { - if (words == null) - { + if (words == null) + { words = new ArrayList<String>(); } if (type.toString().equals("CLASS")) { slottype = SlotType.CLASS; } - else if (type.toString().equals("RESOURCE")) { slottype = SlotType.RESOURCE; } + else if (type.toString().equals("RESOURCE")) { slottype = SlotType.RESOURCE; } else if (type.toString().equals("PROPERTY")) { slottype = SlotType.PROPERTY; } else if (type.toString().equals("SYMPROPERTY")) { slottype = SlotType.SYMPROPERTY; } - else { slottype = SlotType.UNSPEC; } + else { slottype = SlotType.UNSPEC; } return new Slot(ref.toString(),slottype,words); } @@ -476,8 +476,8 @@ return words; } } - + TOKEN: {<EVERY: "EVERY">} TOKEN: {<MOST: "MOST">} @@ -498,7 +498,7 @@ TOKEN: {<LABEL: "l"(["0"-"9"])+>} -TOKEN: {<WORD: (["a"-"z","A"-"Z","_",".","#",":"])+>} +TOKEN: {<WORD: (["a"-"z","A"-"Z","_",".","#",":","0"-"9"])+>} TOKEN: {<DR: (["?","!"])?(["a"-"z","A"-"Z","0"-"9","."])+>} Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicSlotBuilder.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicSlotBuilder.java 2012-05-06 21:35:52 UTC (rev 3693) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicSlotBuilder.java 2012-05-07 11:56:11 UTC (rev 3694) @@ -11,7 +11,7 @@ private String[] noun = {"NN","NNS","NNP","NNPS","NPREP","JJNN","JJNPREP"}; private String[] adjective = {"JJ","JJR","JJS","JJH"}; - private String[] verb = {"VB","VBD","VBG","VBN","VBP","VBZ","PASSIVE","PASSPART","VPASS","VPASSIN","GERUNDIN","VPREP","WHEN","WHERE"}; + private String[] verb = {"VB","VBD","VBG","VBN","VBP","VBZ","PASSIVE","PASSPART","VPASS","VPASSIN","GERUNDIN","VPREP","WHEN","WHENPREP","WHERE"}; private String[] preps = {"IN","TO"}; public BasicSlotBuilder() { @@ -206,11 +206,16 @@ result.add(vEntry); } else if (pos.equals("VB")) { - String[] whEntry = {token, + String[] whEntry1 = {token, "(S DP[obj] (VP DP[subj] V:'" + token + "'))", "<x,l1,t,[ l1:[|], l4:[ p | SLOT_" + token + "(x,y) ] ],[(l2,x,subj,<<e,t>,t>),(l3,y,obj,<<e,t>,t>)],[ l2<l1,l3<l1,l4<scope(l2),l4<scope(l3) ],[" + slot + "]>" + " ;; <x,l1,t,[ l1:[|], l4:[ | empty(x,y) ] ],[(l2,x,subj,<<e,t>,t>),(l3,y,obj,<<e,t>,t>)],[ l2<l1,l3<l1,l4<scope(l2),l4<scope(l3) ],[]>"}; - result.add(whEntry); + String[] whEntry2 = {token, + "(S DP[subj] (VP V:'" + token + "' DP[obj] ))", + "<x,l1,t,[ l1:[|], l4:[ p | SLOT_" + token + "(x,y) ] ],[(l2,x,subj,<<e,t>,t>),(l3,y,obj,<<e,t>,t>)],[ l2<l1,l3<l1,l4<scope(l2),l4<scope(l3) ],[" + slot + "]>" + + " ;; <x,l1,t,[ l1:[|], l4:[ | empty(x,y) ] ],[(l2,x,subj,<<e,t>,t>),(l3,y,obj,<<e,t>,t>)],[ l2<l1,l3<l1,l4<scope(l2),l4<scope(l3) ],[]>"}; + result.add(whEntry1); + result.add(whEntry2); } else if (pos.equals("VBG") || pos.equals("VBN")) { String[] gerEntry = {token, @@ -225,11 +230,27 @@ } else if (pos.equals("WHEN")) { slot = "SLOT_" + token + "/PROPERTY/" + token + "_date"; - String[] whenEntry = {token, + String[] whenEntry1 = {token, "(S DP[subj] (VP V:'" + token + "'))", "<x,l1,t,[ l1:[ ?y,p | SLOT_" + token + "(x,y) ] ],[(l2,x,subj,<<e,t>,t>)],[ l2=l1 ],[ " + slot + " ]>"}; - result.add(whenEntry); + String[] whenEntry2 = {token, + "(S DP[subj] (VP V:'" + token + "' DP[obj]))", + "<x,l1,t,[ l1:[ ?y,p | SLOT_" + token + "(x,z,y) ] ],[(l2,x,subj,<<e,t>,t>),(l3,z,obj,<<e,t>,t>)],[ l2=l1,l3=l1 ],[ " + slot + " ]>"}; + result.add(whenEntry1); + result.add(whenEntry2); } + else if (pos.equals("WHENPREP")) { + System.out.println(" >>>> " + token); // DEBUG + slot = "SLOT_" + token + "/PROPERTY/" + token + "_date"; + String[] whenprepEntry1 = {token, + "(S DP[subj] (VP V:'" + token + "' DP[pobj]))", + "<x,l1,t,[ l1:[ ?y,p | SLOT_" + token + "(x,z,y) ] ],[(l2,x,subj,<<e,t>,t>),(l3,z,pobj,<<e,t>,t>)],[ l2=l1,l3=l1 ],[ " + slot + " ]>"}; + String[] whenprepEntry2 = {token, + "(S DP[subj] (VP V:'" + token + "' NP[pobj]))", + "<x,l1,t,[ l1:[ ?y,p,z | SLOT_" + token + "(x,z,y) ] ],[(l2,x,subj,<<e,t>,t>),(l3,z,pobj,<e,t>)],[ l2=l1,l3=l1 ],[ " + slot + " ]>"}; + result.add(whenprepEntry1); + result.add(whenprepEntry2); + } else if (pos.equals("WHERE")) { slot = "SLOT_" + token + "/PROPERTY/" + token + "_place"; String[] whereEntry = {token, @@ -245,10 +266,18 @@ slot = "SLOT_" + token + "/PROPERTY/" + token; /* ADJECTIVE */ if (pos.equals("JJ")) { - String[] adjEntry = {token, + String[] adjEntry1 = {token, "(NP ADJ:'" + token.toLowerCase() + "' NP*)", - "<x,l1,<e,t>,[ l1:[ j | SLOT_" + token + "(x,j) ] ],[],[],["+slot+"]>"}; - result.add(adjEntry); + "<x,l1,<e,t>,[ l1:[ j | SLOT_" + token + "(x,j) ] ],[],[],["+slot+"]>"}; +// String[] adjEntry2 = {"is .+ " + token, +// "(S DP[subject] (VP V:'is' ADJ:'" + token.toLowerCase() + "'))", +// "<x,l1,<e,t>,[ l1:[ | SLOT_" + token + "(x) ] ],[(l2,x,subject,<<e,t>,t>)],[l2=l1],["+slot+"]>"}; +// String[] adjEntry3 = {"is .+ " + token, +// "(S (VP V:'is' DP[subject] ADJ:'" + token.toLowerCase() + "'))", +// "<x,l1,<e,t>,[ l1:[ | SLOT_" + token + "(x) ] ],[(l2,x,subject,<<e,t>,t>)],[l2=l1],["+slot+"]>"}; + result.add(adjEntry1); +// result.add(adjEntry2); +// result.add(adjEntry3); } if (pos.equals("JJH")) { String[] howEntry = {"how "+token, Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicTemplator.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicTemplator.java 2012-05-06 21:35:52 UTC (rev 3693) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/templator/BasicTemplator.java 2012-05-07 11:56:11 UTC (rev 3694) @@ -45,7 +45,7 @@ } g = LTAG_Constructor.construct(grammarFiles); - + tagger = new StanfordPartOfSpeechTagger(); // tagger = new ApachePartOfSpeechTagger(); Modified: trunk/components-ext/src/main/resources/tbsl/lexicon/basic_english.lex =================================================================== --- trunk/components-ext/src/main/resources/tbsl/lexicon/basic_english.lex 2012-05-06 21:35:52 UTC (rev 3693) +++ trunk/components-ext/src/main/resources/tbsl/lexicon/basic_english.lex 2012-05-07 11:56:11 UTC (rev 3694) @@ -18,8 +18,12 @@ is there || (S V:'is' C:'there' DP[dp]) || <x, l1, t, [ l1:[ | ] ], [ (l2,x,dp,<<e,t>,t>) ], [ l2=l1 ],[]> are there || (S V:'are' C:'there' DP[dp]) || <x, l1, t, [ l1:[ | ] ], [ (l2,x,dp,<<e,t>,t>) ], [ l2=l1 ],[]> + has there been || (S V:'has' C:'there' V:'been' DP[dp]) || <x, l1, t, [ l1:[ | ] ], [ (l2,x,dp,<<e,t>,t>) ], [ l2=l1 ],[]> + have there been || (S V:'have' C:'there' V:'been' DP[dp]) || <x, l1, t, [ l1:[ | ] ], [ (l2,x,dp,<<e,t>,t>) ], [ l2=l1 ],[]> is there || (S DP[dp] (VP V:'is' C:'there')) || <x, l1, t, [ l1:[ | ] ], [ (l2,x,dp,<<e,t>,t>) ], [ l2=l1 ],[]> are there || (S DP[dp] (VP V:'are' C:'there')) || <x, l1, t, [ l1:[ | ] ], [ (l2,x,dp,<<e,t>,t>) ], [ l2=l1 ],[]> + have there been || (S DP[dp] (VP V:'have' C:'there' V:'been')) || <x, l1, t, [ l1:[ | ] ], [ (l2,x,dp,<<e,t>,t>) ], [ l2=l1 ],[]> + has there been || (S DP[dp] (VP V:'has' C:'there' V:'been')) || <x, l1, t, [ l1:[ | ] ], [ (l2,x,dp,<<e,t>,t>) ], [ l2=l1 ],[]> // TO BE: YES/NO QUESTIONS @@ -75,8 +79,9 @@ least || (ADJ DET:'least' ADJ*) || <x,l1,<e,t>,[ l1:[ | minimum(a,x,x) ] ], [],[],[]> - how many || (DET DET:'how' DET:'many') || <x,l1,e, [ l1:[ ?x | ] ], [],[],[]> - how many || (DET DET:'how' DET:'many') || <x,l1,e, [ l1:[ | count(x) ] ], [],[],[]> + how many || (DET DET:'how' DET:'many') || <x,l1,e, [ l1:[ ?x | ] ], [],[],[]> + how many || (DET DET:'how' DET:'many') || <x,l1,e, [ l1:[ | count(x) ] ], [],[],[]> + how often || (DP DET:'how' DET:'often') || <x,l1,<<e,t>,t>, [ l1:[ | count(x) ] ], [],[],[]> a || (DET DET:'a') || <x,l1,e, [ l1:[ x |] ], [],[],[]> an || (DET DET:'an') || <x,l1,e, [ l1:[ x |] ], [],[],[]> which || (DET DET:'which') || <x,l1,e, [ l1:[ ?x |] ], [],[],[]> @@ -102,14 +107,18 @@ also || (DP ADV:'also' DP*) || <x,l1,<<e,t>,t>,[ l1:[|] ],[],[],[]> has || (S DP[subject] (VP V:'has' DP[object])) || <x, l1, t, [ l1:[ | ], l2:[ | empty(x,y) ] ], [ (l3,x,subject,<<e,t>,t>), (l4,y,object,<<e,t>,t>) ], [ l3<l1, l4<l1, l2<scope(l3), l2<scope(l4) ],[]> - have || (S DP[subject] (VP V:'have' DP[object])) || <x, l1, t, [ l1:[ | ], l2:[ | empty(x,y) ] ], [ (l3,x,subject,<<e,t>,t>), (l4,y,object,<<e,t>,t>) ], [ l3<l1, l4<l1, l2<scope(l3), l2<scope(l4) ],[]> - had || (S DP[subject] (VP V:'had' DP[object])) || <x, l1, t, [ l1:[ | ], l2:[ | empty(x,y) ] ], [ (l3,x,subject,<<e,t>,t>), (l4,y,object,<<e,t>,t>) ], [ l3<l1, l4<l1, l2<scope(l3), l2<scope(l4) ],[]> + have || (S DP[subject] (VP V:'have' DP[object])) || <x, l1, t, [ l1:[ | ], l2:[ | empty(x,y) ] ], [ (l3,x,subject,<<e,t>,t>), (l4,y,object,<<e,t>,t>) ], [ l3<l1, l4<l1, l2<scope(l3), l2<scope(l4) ],[]> + have || (S DP[object] (VP DP[subject] V:'have')) || <x, l1, t, [ l1:[ | ], l2:[ | empty(x,y) ] ], [ (l3,x,subject,<<e,t>,t>), (l4,y,object,<<e,t>,t>) ], [ l3<l1, l4<l1, l2<scope(l3), l2<scope(l4) ],[]> + had || (S DP[subject] (VP V:'had' DP[object])) || <x, l1, t, [ l1:[ | ], l2:[ | empty(x,y) ] ], [ (l3,x,subject,<<e,t>,t>), (l4,y,object,<<e,t>,t>) ], [ l3<l1, l4<l1, l2<scope(l3), l2<scope(l4) ],[]> // with || (NP NP* (PP P:'with' DP[dp])) || <x,l1,<e,t>,[ l1:[| empty(x,y) ] ],[(l2,y,dp,<<e,t>,t>)],[l2=l1],[]> - +// of || (NP NP* (PP P:'of' DP[dp])) || <x,l1,<e,t>,[ l1:[| empty(x,y) ] ],[(l2,y,dp,<<e,t>,t>)],[l2=l1],[]> + people || (NP N:'people') || <x,l1,<e,t>,[ l1:[|] ],[],[],[]> + still || (ADJ ADJ:'still' ADJ*) || <x,l1,<e,t>,[l1:[|]],[],[],[]> + // WH WORDS // -------- @@ -172,7 +181,7 @@ eight || (NP NUM:'eight' NP*) || <x,l1,<e,t>,[l1:[x|count(x,8)]],[],[],[]> nine || (NP NUM:'nine' NP*) || <x,l1,<e,t>,[l1:[x|count(x,9)]],[],[],[]> ten || (NP NUM:'ten' NP*) || <x,l1,<e,t>,[l1:[x|count(x,10)]],[],[],[]> - + one || (NUM NUM:'one') || <x,l1,e,[l1:[x|equal(x,1)]],[],[],[]> two || (NUM NUM:'two') || <x,l1,e,[l1:[x|equal(x,2)]],[],[],[]> three || (NUM NUM:'three') || <x,l1,e,[l1:[x|equal(x,3)]],[],[],[]> Modified: trunk/components-ext/src/test/java/org/dllearner/algorithm/tbsl/GoldTagger.java =================================================================== --- trunk/components-ext/src/test/java/org/dllearner/algorithm/tbsl/GoldTagger.java 2012-05-06 21:35:52 UTC (rev 3693) +++ trunk/components-ext/src/test/java/org/dllearner/algorithm/tbsl/GoldTagger.java 2012-05-07 11:56:11 UTC (rev 3694) @@ -25,8 +25,8 @@ public class GoldTagger { - static String GOLD = "src/main/resources/tbsl/evaluation/qald2-dbpedia-train.xml"; - static String OUT = "target/qald2-dbpedia-train-tagged.xml"; + static String GOLD = "/home/christina/Downloads/dbpedia-test-new.xml"; + static String OUT = "/home/christina/Downloads/dbpedia-test-new-tagged.xml"; public static void main(String[] args) { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <seb...@us...> - 2012-05-06 21:36:01
|
Revision: 3693 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3693&view=rev Author: sebastianwtr Date: 2012-05-06 21:35:52 +0000 (Sun, 06 May 2012) Log Message: ----------- [tbsl exploration] implementing Step5 Modified Paths: -------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/Index_utils.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/SQLiteIndex.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/TemplateBuilder.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Utils/ServerUtil.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/MainInterface.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/exploration_main.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/exploration_main/testClass_new.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/IterationModule.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/LevenstheinModule.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/SemanticRelatenes.java trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/modules/WordnetModule.java Removed Paths: ------------- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/Index_utils.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/Index_utils.java 2012-05-04 01:00:54 UTC (rev 3692) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/Index_utils.java 2012-05-06 21:35:52 UTC (rev 3693) @@ -1,5 +1,6 @@ package org.dllearner.algorithm.tbsl.exploration.Index; +import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; @@ -31,7 +32,12 @@ if(fall==0 || fall==3){ - result=myindex.getResourceURI(string.toLowerCase()); + try { + result=myindex.getResourceURI(string.toLowerCase()); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } result_List.add(result); } @@ -46,7 +52,12 @@ if(fall==1){ - tmp1=myindex.getPropertyURI(string.toLowerCase()); + try { + tmp1=myindex.getPropertyURI(string.toLowerCase()); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } tmp2=myindex.getontologyURI(string.toLowerCase()); if(tmp1!=null) result_List.add(tmp1); if(tmp2!=null) result_List.add(tmp2); @@ -65,7 +76,12 @@ string=string.replace(".", " "); String result=null; ArrayList<String> result_List = new ArrayList<String>(); - result=myindex.getResourceURI(string.toLowerCase()); + try { + result=myindex.getResourceURI(string.toLowerCase()); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } if(result!=null){ result_List.add(result); } @@ -107,7 +123,12 @@ if(string.substring(string.length()-1).contains("s")){ String neuer_string = string.substring(0, string.length() -1); - result=myindex.getPropertyURI(neuer_string.toLowerCase()); + try { + result=myindex.getPropertyURI(neuer_string.toLowerCase()); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } result2=myindex.getontologyURI(neuer_string.toLowerCase()); //tmp2=myindex.getYagoURI(neuer_string.toLowerCase()); if(result2!=null){ @@ -127,7 +148,12 @@ } } else{ - result=myindex.getPropertyURI(string.toLowerCase()); + try { + result=myindex.getPropertyURI(string.toLowerCase()); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } result2=myindex.getontologyURI(string.toLowerCase()); if(Setting.isDebugModus())DebugMode.debugPrint("Result: "+result); if(result2!=null){ Modified: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/SQLiteIndex.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/SQLiteIndex.java 2012-05-04 01:00:54 UTC (rev 3692) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Index/SQLiteIndex.java 2012-05-06 21:35:52 UTC (rev 3693) @@ -1,8 +1,12 @@ package org.dllearner.algorithm.tbsl.exploration.Index; import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import java.sql.Connection; @@ -54,7 +58,7 @@ WHERE City LIKE '%tav%' */ - public String getResourceURI(String string) throws SQLException{ + public String getResourceURI(String string) throws SQLException, IOException{ /* while(rs.next()) {*/ Statement stat = conn.createStatement(); @@ -68,7 +72,9 @@ } catch (Exception e) { // TODO Auto-generated catch block //e.printStackTrace(); + return null; + } } @@ -140,7 +146,7 @@ } - public String getPropertyURI(String string) throws SQLException{ + public String getPropertyURI(String string) throws SQLException, IOException{ Statement stat = conn.createStatement(); ResultSet rs; ArrayList<String> al = new ArrayList<String>(); @@ -175,13 +181,13 @@ boolean found = false; for(String s : al){ if(s.contains("ontology")){ - System.out.println("Return String: "+s); + //System.out.println("Return String: "+s); found=true; return s; } } if(found==false){ - System.out.println("Return String: "+al.get(0)); + //System.out.println("Return String: "+al.get(0)); return al.get(0); } } @@ -190,7 +196,9 @@ } catch (Exception e) { // TODO Auto-generated catch block //e.printStackTrace(); - System.err.println("Error in SQLiteIndex.getProperty!!"); + //System.err.println("Error in SQLiteIndex.getProperty!!"); + + return null; } Deleted: trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java =================================================================== --- trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java 2012-05-04 01:00:54 UTC (rev 3692) +++ trunk/components-ext/src/main/java/org/dllearner/algorithm/tbsl/exploration/Sparql/SparqlObject.java 2012-05-06 21:35:52 UTC (rev 3693) @@ -1,2665 +0,0 @@ -package org.dllearner.algorithm.tbsl.exploration.Sparql; -import java.io.BufferedReader; - -import java.io.BufferedWriter; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStreamWriter; -import java.net.HttpURLConnection; -import java.net.MalformedURLException; -import java.net.URL; -import java.net.URLConnection; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import java.util.Set; - -import net.didion.jwnl.JWNLException; -import net.didion.jwnl.data.POS; - -import org.dllearner.algorithm.tbsl.exploration.Index.SQLiteIndex; -import org.dllearner.algorithm.tbsl.exploration.Utils.Levenshtein; -import org.dllearner.algorithm.tbsl.exploration.Utils.ServerUtil; -import org.dllearner.algorithm.tbsl.nlp.StanfordLemmatizer; -import org.dllearner.algorithm.tbsl.nlp.WordNet; -import org.dllearner.algorithm.tbsl.sparql.BasicQueryTemplate; -import org.dllearner.algorithm.tbsl.sparql.Path; -import org.dllearner.algorithm.tbsl.sparql.SPARQL_Filter; -import org.dllearner.algorithm.tbsl.sparql.SPARQL_Having; -import org.dllearner.algorithm.tbsl.sparql.SPARQL_Term; -import org.dllearner.algorithm.tbsl.sparql.Slot; -import org.dllearner.algorithm.tbsl.sparql.Template; -import org.dllearner.algorithm.tbsl.templator.BasicTemplator; -import org.dllearner.algorithm.tbsl.templator.Templator; -import org.xml.sax.InputSource; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; - - - - -public class SparqlObject { - //global Variable dict - - //start counting with 0 - static int explorationdepthwordnet=2; - static int iterationdepth =0; - static int numberofanswers=1; - static double LevenstheinMin = 0.8; - static WordNet wordnet; - BasicTemplator btemplator; - Templator templator; - private static SQLiteIndex myindex; - boolean only_best_levensthein_query; - static StanfordLemmatizer lemmatiser; - //one Minute - private static int timeToTimeoutOnServer=60000; - - //change here and in getRessourcePropertys - //String Prefix="http://greententacle.techfak.uni-bielefeld.de:5171/sparql"; - String Prefix="http://dbpedia.org/sparql"; - //String Prefix="http://greententacle.techfak.uni-bielefeld.de:5171/sparql"; - //String Prefix="http://purpurtentacle.techfak.uni-bielefeld.de:8890/sparql"; - - - //Konstruktor - public SparqlObject() throws MalformedURLException, ClassNotFoundException, SQLException{ - - wordnet = new WordNet(); - System.out.println("Loading SPARQL Templator"); - // - btemplator = new BasicTemplator(); - //btemplator.UNTAGGED_INPUT = false; - //templator = new Templator(); - System.out.println("Loading SPARQL Templator Done\n"); - System.out.println("Start Indexing"); - myindex = new SQLiteIndex(); - - System.out.println("Done:Indexing"); - - //normaly 1 - setExplorationdepthwordnet(1); - //eigentlich immer mit 0 initialisieren - setIterationdepth(9); - setNumberofanswers(1); - - only_best_levensthein_query=false; - lemmatiser = new StanfordLemmatizer(); - } - - /* - * ##################################### - * Getter and Setter Methods - */ - - public int getExplorationdepthwordnet() { - return explorationdepthwordnet; - } - - - public void setExplorationdepthwordnet(int explorationdepthwordnet) { - SparqlObject.explorationdepthwordnet = explorationdepthwordnet; - } - - - public int getIterationdepth() { - return iterationdepth; - } - - - public void setIterationdepth(int iterationdepth) { - SparqlObject.iterationdepth = iterationdepth; - } - - - public int getNumberofanswers() { - return numberofanswers; - } - - - public void setNumberofanswers(int numberofanswers) { - SparqlObject.numberofanswers = numberofanswers; - } - - - /* - * ############################## - * - */ - /* - * "Main" Method of this Class. - * - */ - public queryInformation create_Sparql_query(queryInformation queryObject) throws JWNLException, IOException, SQLException{ - //create_Sparql_query_new(string); - - ArrayList<ArrayList<String>> lstquery = new ArrayList<ArrayList<String>>(); - long startParsingTime = System.currentTimeMillis(); - //lstquery=getQuery(queryObject.getQuery(),queryObject); - queryObject=getQuery(queryObject); - lstquery=queryObject.getQueryInformation(); - queryObject.setQueryInformation(lstquery); - /*BufferedReader in1 = new BufferedReader(new InputStreamReader(System.in)); - String line; - - line = in1.readLine();*/ - long endParsingTime = System.currentTimeMillis(); - long startIterationTime = System.currentTimeMillis(); - System.out.println("The Questionparsing took "+ (endParsingTime-startParsingTime)+ " ms"); - ArrayList<String> final_answer = new ArrayList<String>(); - Set<String> final_query_hash = new HashSet<String>(); - - if(lstquery.isEmpty()){ - saveNotParsedQuestions(queryObject.getQuery()); - } - - for(ArrayList<String> querylist : lstquery){ - - boolean startIterating=true; - String query=""; - if(querylist.get(0).contains("ERROR"))startIterating=false; - else query=querylist.get(0).toString(); - - //TODO: Somewhere is an error, because sometimes there is an double _ a __ and thats not allowed. - //fixing it now with an replace of "__" to "" - query=query.replace("__", ""); - - if(getIterationdepth()==-1&&startIterating==true){ - String tmp = new String(); - String s = null; - BufferedReader in = null; - - // Liest Textzeilen aus der Datei in einen Vector: - try { - in = new BufferedReader( - new InputStreamReader( - new FileInputStream( "/tmp/testresult.txt" ) ) ); - while( null != (s = in.readLine()) ) { - tmp=tmp.concat("\n".concat(s)); - } - } catch( FileNotFoundException ex ) { - } catch( Exception ex ) { - System.out.println( ex ); - } finally { - if( in != null ) - try { - in.close(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - - String out=null; - if (query.equals("") || query.equals(" ")||query.length()==0) query="Could not parse"; - out=tmp + "\n" + queryObject.getQuery() + ":\n"+query+"\n"; - - BufferedWriter outfile = new BufferedWriter( - new OutputStreamWriter( - new FileOutputStream( "/tmp/testresult.txt" ) ) ); - - outfile.write(out); - outfile.close(); - - } - - /* - * ################################################################################################# - */ - //Iteration 0 - if(getIterationdepth()==0&&startIterating==true||getIterationdepth()==9&&startIterating==true){ - String tmp = new String(); - String s = null; - BufferedReader in = null; - - // Lies Textzeilen aus der Datei in einen Vector: - try { - in = new BufferedReader( - new InputStreamReader( - new FileInputStream( "/tmp/answer.txt" ) ) ); - while( null != (s = in.readLine()) ) { - tmp+="\n"+s; - } - } catch( FileNotFoundException ex ) { - } catch( Exception ex ) { - System.out.println( ex ); - } finally { - if( in != null ) - try { - in.close(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - - final_query_hash.add(query); - - } - /* - * ################################################################################################# - */ - //Iterration 1 - /* - * Only Levensthein!!! - */ - if(getIterationdepth()==1&&startIterating==true||getIterationdepth()==9&&startIterating==true){ - - ArrayList<String> final_answer_tmp = new ArrayList<String>(); - ArrayList<String> final_query_tmp=new ArrayList<String>(); - if(querylist.size()==4&&!query.contains("rdf:type")){ - - final_query_tmp=simpleCase(querylist, query, "LEVENSTHEIN",queryObject); - for(String i: final_query_tmp){ - final_query_hash.add(i); - - } - } - //e.g. Select ßy Where (?y rdf:type <http://..../ontology/School> - if(querylist.size()==4&&query.contains("rdf:type")){ - final_query_hash.add(query); - } - - - if(querylist.size()>4&&query.contains("rdf:type")){ - - final_query_tmp=isAIteration(querylist, query,"LEVENSTHEIN",queryObject.getIsaResource()); - for(String i: final_query_tmp){ - - final_query_hash.add(i); - } - } - - if(querylist.size()>4&&!query.contains("rdf:type")){ - final_query_tmp=advancedCase(querylist, query,"LEVENSTHEIN"); - for(String i: final_query_tmp){ - final_query_hash.add(i); - } - } - - - - - - } - /* - * ################################################################################################# - */ - //Iterration 2 - /* - * Only Wordnet!!! - */ - if(getIterationdepth()==2&&startIterating==true||getIterationdepth()==9&&startIterating==true){ - ArrayList<String> final_query_tmp = new ArrayList<String>(); - //isAIteration(querylist, query); - - if(querylist.size()==4&&!query.contains("rdf:type")){ - - final_query_tmp=simpleCase(querylist, query, "WORDNET",queryObject); - for(String i: final_query_tmp){ - final_query_hash.add(i); - } - } - //e.g. Select ßy Where (?y rdf:type <http://..../ontology/School> - if(querylist.size()==4&&query.contains("rdf:type")){ - final_query_hash.add(query); - } - - if(querylist.size()>4&&query.contains("rdf:type")){ - - final_query_tmp=isAIteration(querylist, query,"WORDNET",queryObject.getIsaResource()); - for(String i: final_query_tmp){ - final_query_hash.add(i); - } - } - - if(querylist.size()>4&&!query.contains("rdf:type")){ - final_query_tmp=advancedCase(querylist, query,"WORDNET"); - for(String i: final_query_tmp){ - final_query_hash.add(i); - } - } - - - - - } - - - } - - - /* - * Send Query to Server and get answers - */ - - - - Iterator<String> it = final_query_hash.iterator(); - while (it.hasNext()) { - System.out.println(it.next()); - ArrayList<String> answer= new ArrayList<String>(); - try{ - String anfrage=it.next().toString(); - answer=sendServerQuestionRequestArray(anfrage); - // @en is also in the ML - /* - answer_tmp=answer_tmp.replace("\"@en", ""); - answer_tmp=answer_tmp.replace("\"", "");*/ - - //filter answers! - for(String answer_tmp : answer ){ - if(answer_tmp!="EmtyAnswer"){ - if(queryObject.isHint()){ - //System.out.println("Using hint!"); - /* - * Answertyps: resource, string, boolean, num, date - */ - if(queryObject.getType().contains("boolean")){ - if(answer_tmp.contains("true")||answer_tmp.contains("false")) final_answer.add(answer_tmp); - - } - else if (queryObject.getType().contains("resource")){ - try{ - String[] tmparray = answer_tmp.split("\n"); - for(String z : tmparray)final_answer.add(z); - } - catch(Exception e){ - final_answer.add(answer_tmp); - } - } - else if (queryObject.getType().contains("string")||queryObject.getType().contains("uri")){ - if(!answer_tmp.contains("EmtyAnswer")) { - String[] tmparray = answer_tmp.split("\n"); - for(String z : tmparray)final_answer.add(z); - } - - } - else if (queryObject.getType().contains("num")){ - if(answer_tmp.matches("[0-9]*")) final_answer.add(answer_tmp); - - } - else if (queryObject.getType().contains("date")){ - final_answer.add(answer_tmp); - } - } - else{ - //final_answer.add("Begin:\n"+anfrage +"\n"+answer_tmp+" \n End"); - final_answer.add(answer_tmp); - } - } - } - } - catch (Exception e){ - - } - } - - - long stopIterationTime = System.currentTimeMillis(); - /* - * Set time - */ - - queryObject.setTimeGesamt(stopIterationTime-startParsingTime); - queryObject.setTimeParser(endParsingTime-startParsingTime); - queryObject.setTimeWithoutParser(stopIterationTime-startIterationTime); - queryObject.setResult(final_answer); - - return queryObject; - } - - private ArrayList<String> newIteration(ArrayList<String> querylist, String query, queryInformation queryObject) throws SQLException, - JWNLException { - //only for special case, that the first condition has a resource - ArrayList<String> final_answer=new ArrayList<String>(); - String firstResource=""; - String firstProperty=""; - String secondProperty=null; - String sideOfProperty=null; - String sideOfPropertyTwo=null; - int tmpcounter=0; - for(String s : querylist){ - //we dont need the first one, because thats the query itself - tmpcounter=tmpcounter+1; - if(tmpcounter>=1&&tmpcounter<=4){ - if(s.contains("LEFT")){ - sideOfProperty="LEFT"; - firstResource=s.replace("LEFT",""); - } - if(s.contains("RIGHT")){ - sideOfProperty="RIGHT"; - firstResource=s.replace("RIGHT",""); - } - if(s.contains("PROPERTY")){ - firstProperty=s.replace("PROPERTY",""); - } - - } - if(tmpcounter>4){ - if(s.contains("LEFT")){ - sideOfPropertyTwo="LEFT"; - } - if(s.contains("RIGHT")){ - sideOfPropertyTwo="RIGHT"; - } - if(s.contains("PROPERTY")){ - secondProperty=s.replace("PROPERTY",""); - } - - } - - } - //first create Query and get the URI's - String firstquery=""; - if(sideOfProperty=="RIGHT"){ - firstquery="PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> SELECT ?y WHERE {<"+getUriFromIndex(firstResource.toLowerCase(),0)+"> <"+getUriFromIndex(firstProperty.toLowerCase(),1) +"> ?y}"; - } - if(sideOfProperty=="RIGHT"){ - firstquery="PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> SELECT ?y WHERE {<"+getUriFromIndex(firstProperty.toLowerCase(),1)+"> <"+getUriFromIndex(firstResource.toLowerCase(),0) +"> ?y}"; - - } - - //first try without iterating over wordnet and levensthein - ArrayList<String> answer_tmp=new ArrayList<String>(); - answer_tmp=sendServerQuestionRequestArray(firstquery); - - //if answer_tmp is emty try to iterate in this case with wordnet - ArrayList<String>querylist_new=new ArrayList<String>(); - querylist_new.add(firstquery); - querylist_new.add("PROPERTY"+firstProperty); - querylist_new.add(sideOfProperty+firstResource); - if(answer_tmp.isEmpty()){ - answer_tmp=simpleCase(querylist_new,firstquery,"WORDNET",queryObject); - } - //if answer_tmp is still empty return null and exit function - if(answer_tmp.isEmpty()){final_answer.add("new Iteration didnt work"); - - return final_answer; - } - - ArrayList<ArrayList<String>>secondquerylist=new ArrayList<ArrayList<String>>(); - - //we have now the uri's for the second query and the result answers - //create now for every entry, if it contains something like http an new query - for(String s : answer_tmp){ - System.out.println("!!!!!!!!!!!!!"); - System.out.println("URI found: "+ s); - System.out.println("!!!!!!!!!!!!!"); - String secondquery =""; - ArrayList<String> tmp = new ArrayList<String>(); - if(s.contains("http:")){ - if(sideOfPropertyTwo=="RIGHT"){ - secondquery="PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> SELECT ?y WHERE {<"+getUriFromIndex(s.toLowerCase(),0)+"> <"+getUriFromIndex(secondProperty.toLowerCase(),1) +"> ?y}"; - tmp.add(secondquery); - tmp.add("PROPERTY"+secondProperty); - querylist_new.add(sideOfPropertyTwo+s); - secondquerylist.add(tmp); - } - if(sideOfPropertyTwo=="RIGHT"){ - secondquery="PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> SELECT ?y WHERE {<"+getUriFromIndex(secondProperty.toLowerCase(),1)+"> <"+getUriFromIndex(s.toLowerCase(),0) +"> ?y}"; - tmp.add(secondquery); - tmp.add("PROPERTY"+secondProperty); - querylist_new.add(sideOfPropertyTwo+s); - secondquerylist.add(tmp); - } - - } - } - - - //TODO: Check this part of the function!!! - for(ArrayList as: secondquerylist){ - ArrayList<String> answer_tmp_two=new ArrayList<String>(); - //answer_tmp_two=sendServerQuestionRequestArray(s); - answer_tmp=simpleCase(as,as.get(0).toString(),"WORDNET",queryObject); - for(String t :answer_tmp_two){ - final_answer.add(t); - System.out.println("Answer from advanced Iteration: "+ t); - } - } - if(final_answer.isEmpty())final_answer.add("new Iteration didnt work"); - System.out.println("Returning the function"); - return final_answer; - - } - - - - private ArrayList<String> isAIteration(ArrayList<String> querylist, String query, String fall, String uri_isA_Resource) throws SQLException, - JWNLException { - ArrayList<String> new_queries= new ArrayList<String>(); - //TODO: in get Query change, that there will be a second query, but only with the part of the condition upsidedown, which doesnt contains an isA - System.out.println("\n In IsA Iteration \n"); - - /* Erster Schritt, alle x rausbekommen: - e.g: PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> SELECT ?x WHERE {?x rdf:type <http://dbpedia.org/ontology/Country>.} - - */ - - ArrayList<String> list_of_x=new ArrayList<String>(); - String query_for_x=null; - - - - - query_for_x="PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> SELECT ?x WHERE { ?x rdf:type <"+uri_isA_Resource+">}"; - //now send query_for_x to the server, to get all x - System.out.println("IsA x-query: "+query_for_x); - if(query_for_x!=null)list_of_x=sendServerQuestionRequestArray(query_for_x); - - /* - * Zweiter Schritt: - * alle Propertys z.B. der ersten 5 xe holen und in eine Liste packen - */ - - int number_of_x_used=5; - int tmpcounter=0; - - HashMap<String,String> list_of_properties=new HashMap<String,String>(); - - for(String s: list_of_x){ - /* - * First use left and also right Propertys - */ - - tmpcounter++; - if(tmpcounter <=number_of_x_used){ - HashMap<String,String> propertiesleft = new HashMap<String, String>(); - HashMap<String,String> propertiesright = new HashMap<String, String>(); - // GetRessourcePropertys property = new GetRessourcePropertys(); - - try { - propertiesleft=ServerUtil.getPropertiesForGivenResource(s,"LEFT"); - propertiesright=ServerUtil.getPropertiesForGivenResource(s,"RIGHT"); - } - catch (Exception e){ - - } - - /* - * now put the properties togehter - */ - list_of_properties.putAll(propertiesright); - list_of_properties.putAll(propertiesleft); - } - - } - /* System.out.println("List of Properties: "); - for (Entry<String, String> entry : list_of_properties.entrySet()) { - String key = entry.getKey(); - key=key.replace("\"",""); - key=key.replace("@en",""); - String value = entry.getValue(); - System.out.println("Key: "+ key + " Value: "+value); - }*/ - - /* - * get Property used in the original query - * - */ - - System.out.println("Original Query: "+query); - - //http://dbpedia.org/ontology/officialLanguage - - //look for property - Pattern p3=Pattern.compile (".*\\<(http://dbpedia.org/property/.*)\\>\\W\\W*\\?.*"); - Matcher m3 = p3.matcher(query); - String property_to_compare_with_uri=""; - while(m3.find()) { - property_to_compare_with_uri=m3.group(1); - System.out.println("Property in IsA: "+m3.group(1)); - } - - //if there is no property but an ontology-property - if(property_to_compare_with_uri==""){ - Pattern p4=Pattern.compile (".*\\<(http://dbpedia.org/ontology/[a-z].*)\\>\\W\\W*\\?.*"); - Matcher m4 = p4.matcher(query); - while(m4.find()) { - property_to_compare_with_uri=m4.group(1); - System.out.println("Property in IsA: "+m4.group(1)); - } - } - - String property_to_compare_with=property_to_compare_with_uri.replace("http://dbpedia.org/property/","").replace("http://dbpedia.org/ontology/",""); - - /* BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); - String line; - - System.out.println("############################"); - System.out.println("query: "+query); - System.out.println("property_to_compare_with: "+property_to_compare_with); - System.out.println("property_to_compare_with_uri: "+property_to_compare_with_uri); - System.out.println("############################"); - try { - line = in.readLine(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - }*/ - System.out.println(property_to_compare_with + " : "+property_to_compare_with_uri +" : "+uri_isA_Resource); - if(fall.contains("WORDNET")) new_queries=doWordnet(query,property_to_compare_with,property_to_compare_with_uri,list_of_properties); - if(fall.contains("LEVENSTHEIN")) new_queries=doLevensthein(query,property_to_compare_with_uri,property_to_compare_with_uri,list_of_properties); - - - /* BufferedReader in1 = new BufferedReader(new InputStreamReader(System.in)); - String line; - - try { - line = in1.readLine(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - }*/ - return new_queries; - - } - - - - private ArrayList<String> simpleCase(ArrayList<String> querylist, String query, String fall, queryInformation queryObject) throws SQLException, - JWNLException { - - System.out.println("In Simpe levensthein case!!"); - String resource=""; - String property_to_compare_with=""; - String sideOfProperty="LEFT"; - ArrayList<String> new_queries= new ArrayList<String>(); - - - int tmpcounter=0; - for(String s : querylist){ - //we dont need the first one, because thats the query itself - tmpcounter=tmpcounter+1; - if(tmpcounter>=1){ - if(s.contains("LEFT")){ - sideOfProperty="LEFT"; - resource=s.replace("LEFT",""); - } - if(s.contains("RIGHT")){ - sideOfProperty="RIGHT"; - resource=s.replace("RIGHT",""); - } - if(s.contains("PROPERTY")){ - property_to_compare_with=s.replace("PROPERTY",""); - } - - } - - } - System.out.println("ARRAY LIST: "+querylist); - System.out.println("Property to compare:: "+ property_to_compare_with); - System.out.println("Resource: "+ resource); - - - HashMap<String,String> properties = new HashMap<String, String>(); - - Boolean goOnAfterProperty = true; - - //System.out.println("URI from Resource "+ resource +": "+getUriFromIndex(resource.toLowerCase(),0)); - System.out.println("URI from Resource "+ resource +": "+queryObject.getHashValue(resource.toLowerCase())); - HashMap<String, String> bla = queryObject.getHashMap(); - System.out.println("INhalt Hasmap QueryObject:"); - for (String z: bla.keySet()) System.out.println(z); - - //gets Propertys left or right from the resource! - try { - properties=ServerUtil.getPropertiesForGivenResource(queryObject.getHashValue(resource.toLowerCase()),sideOfProperty); - if (properties==null){ - - System.out.println("Begin:\n"+query +"\nError in getting Properties \n End"); - goOnAfterProperty=false; - } - - System.out.println("Properties from Resource "+resource+": "+properties); - - } catch (IOException e) { - - System.out.println("Begin:\n"+query +"\nError in getting Properties \n End"); - goOnAfterProperty=false; - - } - if(goOnAfterProperty==true){ - if(fall.contains("WORDNET")) new_queries=doWordnet(query, property_to_compare_with,queryObject.getHashValue(property_to_compare_with.toLowerCase()),properties); - if(fall.contains("LEVENSTHEIN")) new_queries=doLevensthein(query, property_to_compare_with,queryObject.getHashValue(property_to_compare_with.toLowerCase()),properties); - //new_queries=doLevensthein(query, property_to_compare_with,getUriFromIndex(property_to_compare_with.toLowerCase(),1),properties); - - //add original query - new_queries.add(query); - - } - - - //test to returnqueries, put them together and than send them to the server. - return new_queries; - //return final_answer; -} - - - - - private ArrayList<String> doLevensthein(String query, String property_to_compare_with,String uri_of_property, HashMap<String, String> properties) - throws SQLException { - ArrayList<String> new_queries= new ArrayList<String>(); - String bestQuery=""; - double highestNLD=0; - //iterate over properties - for (Entry<String, String> entry : properties.entrySet()) { - String key = entry.getKey(); - key=key.replace("\"",""); - key=key.replace("@en",""); - String value = entry.getValue(); - - //compare property gotten from the resource with the property from the original query - double nld=Levenshtein.nld(property_to_compare_with.toLowerCase(), key); - - //check if nld is greater than Levensthein - if(nld>=LevenstheinMin||key.contains(lemmatiser.stem(property_to_compare_with))||property_to_compare_with.contains(lemmatiser.stem(key))){ - //if its so, replace old uri with the new one - String querynew=query; - //String replacement = getUriFromIndex(property_to_compare_with.toLowerCase(),1); - String replacement =uri_of_property ; - if(!querynew.contains(replacement)){ - replacement=replacement.replace("ontology", "property"); - } - querynew=querynew.replace(replacement,value); - if(nld>highestNLD){ - bestQuery=querynew; - highestNLD=nld; - } - if(only_best_levensthein_query==true) new_queries.add(bestQuery); - else new_queries.add(querynew); - } - - } - - return new_queries; - } - - private ArrayList<String> advancedCase(ArrayList<String> querylist, String query, String fall) throws SQLException, - JWNLException { - - ArrayList<String> new_queries= new ArrayList<String>(); - String resourceOne=""; - String property_to_compare_withOne=""; - String resourceTwo=""; - String property_to_compare_withTwo=""; - String sideOfPropertyOne="LEFT"; - String sideOfPropertyTwo="LEFT"; - - - int tmpcounter=0; - for(String s : querylist){ - //we dont need the first one, because thats the query itself - - //for condition One - tmpcounter=tmpcounter+1; - if(tmpcounter>=1&&tmpcounter<=4){ - if(s.contains("LEFT")){ - sideOfPropertyOne="LEFT"; - resourceOne=s.replace("LEFT",""); - } - if(s.contains("RIGHT")){ - sideOfPropertyOne="RIGHT"; - resourceOne=s.replace("RIGHT",""); - } - if(s.contains("PROPERTY")){ - property_to_compare_withOne=s.replace("PROPERTY",""); - } - - } - - //for condition Two - if(tmpcounter>4){ - if(s.contains("LEFT")){ - sideOfPropertyTwo="LEFT"; - resourceTwo=s.replace("LEFT",""); - } - if(s.contains("RIGHT")){ - sideOfPropertyTwo="RIGHT"; - resourceTwo=s.replace("RIGHT",""); - } - if(s.contains("PROPERTY")){ - property_to_compare_withTwo=s.replace("PROPERTY",""); - } - - } - } - HashMap<String,String> propertiesOne = new HashMap<String, String>(); - HashMap<String,String> propertiesTwo = new HashMap<String, String>(); - Boolean goOnAfterProperty = true; - - //Get Properties for Resource in condition One and Two from Server - try { - - propertiesOne=ServerUtil.getPropertiesForGivenResource(getUriFromIndex(resourceOne.toLowerCase(),0),sideOfPropertyOne); - propertiesTwo=ServerUtil.getPropertiesForGivenResource(getUriFromIndex(resourceTwo.toLowerCase(),0),sideOfPropertyTwo); - - if (propertiesOne==null){ - System.out.println("Begin:\n"+query +"\nError in getting Properties \n End"); - goOnAfterProperty=false; - } - - } catch (IOException e) { - - System.out.println("Begin:\n"+query +"\nError in getting Properties \n End"); - goOnAfterProperty=false; - - } - - - if(goOnAfterProperty==true){ - - - - //Iterate over property from resource one - - if(fall.contains("LEVENSTHEIN"))new_queries=doComplexLevensthein(query, property_to_compare_withOne,property_to_compare_withTwo, getUriFromIndex(property_to_compare_withOne.toLowerCase(),1),propertiesOne,propertiesTwo); - if(fall.contains("WORDNET")) new_queries=doComplexeWordnet(query, property_to_compare_withOne,property_to_compare_withTwo, propertiesOne, propertiesTwo); - - - //add original query for iteration - new_queries.add(query); - } - - return new_queries; -} - -private ArrayList<String> doComplexLevensthein(String query, String property_to_compare_withOne, String property_to_compare_withTwo, String uri_of_property_one, HashMap<String,String> propertiesOne,HashMap<String,String> propertiesTwo) throws SQLException{ - ArrayList<String> new_queries= new ArrayList<String>(); - for (Entry<String, String> entryOne : propertiesOne.entrySet()) { - - String queryOne=query; - String keyOne = entryOne.getKey(); - keyOne=keyOne.replace("\"",""); - keyOne=keyOne.replace("@en",""); - String valueOne = entryOne.getValue(); - - - double levnstheinDistanzeOne=Levenshtein.nld(property_to_compare_withOne.toLowerCase(), keyOne); - - /*if distance is higher or equals LevenstheinMin, replace old uri with new uri - * and use that new query, for the property of the second resource - */ - if(levnstheinDistanzeOne>=LevenstheinMin){ - //String replacementOne = getUriFromIndex(property_to_compare_withOne.toLowerCase(),1); - String replacementOne =uri_of_property_one; - if(!queryOne.contains(replacementOne)){ - replacementOne=replacementOne.replace("ontology", "property"); - } - queryOne=queryOne.replace(replacementOne,valueOne); - - - /* - * Iterate now over the second set of properties, but this time not using the original query in which - * to replace the old uri with the new one, but using queryOne from the first step. - */ - new_queries=doLevensthein(queryOne, property_to_compare_withTwo, getUriFromIndex(property_to_compare_withTwo.toLowerCase(),1), propertiesTwo); - } - } - return new_queries; -} - - - -private ArrayList<String> doWordnet(String query, String property, String uri_of_property,HashMap<String,String> properties) throws SQLException, -JWNLException { - ArrayList<String> new_queries= new ArrayList<String>(); - - System.out.println("Start Iterating Wordnet with "+property+" and deept of "+explorationdepthwordnet); - ArrayList<String> semantics=new ArrayList<String>(); - ArrayList<String> tmp_semantics=new ArrayList<String>(); - ArrayList<String> result_SemanticsMatchProperties=new ArrayList<String>(); - if(property.contains("_")){ - String[] fix = property.split("_"); - //here add also lemmatiser - for(String s: fix) semantics.add(s); - } - else semantics.add(property); - System.out.println("Semantics: "+ semantics); - - for(String s: semantics){ - //first check, if there is a singular form in the wordnet dictionary.. eg children -> child - //String _temp_=myindex.getWordnetHelp(property); - String _temp_=myindex.getWordnetHelp(s); - if(_temp_!=null){ - //tmp_semantics=semantics; - tmp_semantics.add(_temp_); - tmp_semantics.add(s); - } - else tmp_semantics.add(s); - /* - else{ - semantics.clear(); - semantics.add(_temp_); - tmp_semantics=semantics; - }*/ - } - - System.out.println("tmp_semantics: "+ tmp_semantics); - Boolean goOnAfterWordnet = true; - - for(int i=0;i<=explorationdepthwordnet;i++){ - - try { - tmp_semantics=getSemantics(tmp_semantics); - System.out.println("tmp_semantics in Iteration: "+ tmp_semantics); - if (tmp_semantics==null){ - goOnAfterWordnet=false; - System.out.println("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); - - } - else{ - //each word only one time - for(String k : tmp_semantics){ - if(!semantics.contains(k)) semantics.add(k); - } - } - - } catch (IOException e) { - - goOnAfterWordnet=false; - System.out.println("Begin:\n"+query +"\n Error in searching Wordnet with word "+semantics+" \n End"); - - } - - } - - if(goOnAfterWordnet==true){ - - for (Entry<String, String> entry : properties.entrySet()) { - String key = entry.getKey(); - String value = entry.getValue(); - key=key.replace("\"",""); - key=key.replace("@en",""); - - for(String b : semantics){ - if(key.contains(b.toLowerCase())||key.contains(lemmatiser.stem(b.toLowerCase()))||b.toLowerCase().contains(lemmatiser.stem(key))){ - if(!result_SemanticsMatchProperties.contains(key)){ - result_SemanticsMatchProperties.add(key); - String query_tmp=query; - //String replacement = getUriFromIndex(property_to_compare_with.toLowerCase(),1); - System.out.println("URI of property: "+uri_of_property); - String replacement = uri_of_property; - if(!query_tmp.contains(replacement)){ - replacement=replacement.replace("ontology", "property"); - } - query_tmp=query_tmp.replace(replacement,value); - System.out.println("Simple Wordnet Query: "+ query_tmp); - new_queries.add(query_tmp); - } - } - } - } - - } - - return new_queries; -} - - - private ArrayList<String> doComplexeWordnet(String query, - String property_to_compare_withOne, - String property_to_compare_withTwo, - HashMap<String, String> propertiesOne, - HashMap<String, String> propertiesTwo) throws SQLException, - JWNLException { - - ArrayList<String> new_queries = new ArrayList<String> (); - /* - * #################################### Semantics One############################################# - */ - - - //System.out.println("Start Iterating Wordnet with "+property_to_compare_withOne+" and deept of "+explorationdepthwordnet); - ArrayList<String> semanticsOne=new ArrayList<String>(); - ArrayList<String> tmp_semanticsOne=new ArrayList<String>(); - ArrayList<String> result_SemanticsMatchPropertiesOne=new ArrayList<String>(); - semanticsOne.add(property_to_compare_withOne); - - //first check, if there is a singular form in the wordnet dictionary.. eg children -> child - String _temp_One=myindex.getWordnetHelp(property_to_compare_withOne); - if(_temp_One==null){ - tmp_semanticsOne=semanticsOne; - } - else{ - semanticsOne.clear(); - semanticsOne.add(_temp_One); - tmp_semanticsOne=semanticsOne; - } - - //get the "semantics" from wordnet. Iterate as long as the explorationdepthwordnet is reached - Boolean goOnAfterWordnet = true; - for(int i=0;i<=explorationdepthwordnet;i++){ - - try { - tmp_semanticsOne=getSemantics(tmp_semanticsOne); - if (tmp_semanticsOne==null){ - goOnAfterWordnet=false; - System.out.println("Begin:\n"+query +"\n Error in searching Wordnet with word "+semanticsOne+" \n End"); - - } - else{ - //each word only one time - for(String k : tmp_semanticsOne){ - if(!semanticsOne.contains(k)) semanticsOne.add(k); - } - } - - } catch (IOException e) { - - goOnAfterWordnet=false; - System.out.println("Begin:\n"+query +"\n Error in searching Wordnet with word "+semanticsOne+" \n End"); - - } - - - } - /* - * #################################### Semantics Two############################################# - */ - - System.out.println("Start Iterating Wordnet with "+property_to_compare_withOne+" and deept of "+explorationdepthwordnet); - ArrayList<String> semanticsTwo=new ArrayList<String>(); - ArrayList<String> tmp_semanticsTwo=new ArrayList<String>(); - ArrayList<String> result_SemanticsMatchPropertiesTwo=new ArrayList<String>(); - semanticsTwo.add(property_to_compare_withTwo); - - //first check, if there is a singular form in the wordnet dictionary.. eg children -> child - String _temp_Two=myindex.getWordnetHelp(property_to_compare_withTwo); - if(_temp_Two==null){ - tmp_semanticsOne=semanticsTwo; - } - else{ - semanticsTwo.clear(); - semanticsTwo.add(_temp_Two); - tmp_semanticsTwo=semanticsTwo; - } - - //get the "semantics" from wordnet. Iterate as long as the explorationdepthwordnet is reached - for(int i=0;i<=explorationdepthwordnet;i++){ - - try { - tmp_semanticsTwo=getSemantics(tmp_semanticsTwo); - if (tmp_semanticsTwo==null){ - goOnAfterWordnet=false; - System.out.println("Begin:\n"+query +"\n Error in searching Wordnet with word "+semanticsTwo+" \n End"); - - } - else{ - //each word only one time - for(String k : tmp_semanticsTwo){ - if(!semanticsTwo.contains(k)) semanticsTwo.add(k); - } - } - - } catch (IOException e) { - - goOnAfterWordnet=false; - System.out.println("Begin:\n"+query +"\n Error in searching Wordnet with word "+semanticsTwo+" \n End"); - - } - - - } - - - if(goOnAfterWordnet==true){ - - - //start iterating over the propery sets - for (Entry<String, String> entryOne : propertiesOne.entrySet()) { - String keyOne = entryOne.getKey(); - String valueOne = entryOne.getValue(); - String queryOne=query; - keyOne=keyOne.replace("\"",""); - keyOne=keyOne.replace("@en",""); - - for(String b : semanticsOne){ - if(keyOne.contains(b.toLowerCase())){ - if(!result_SemanticsMatchPropertiesOne.contains(keyOne)){ - //create new query - result_SemanticsMatchPropertiesOne.add(keyOne); - String replacementOne = getUriFromIndex(property_to_compare_withOne.toLowerCase(),1); - if(!queryOne.contains(replacementOne)){ - replacementOne=replacementOne.replace("ontology", "property"); - } - queryOne=queryOne.replace(replacementOne,valueOne); - - for (Entry<String, String> entryTwo : propertiesTwo.entrySet()) { - String keyTwo = entryTwo.getKey(); - String valueTwo = entryTwo.getValue(); - keyTwo=keyTwo.replace("\"",""); - keyTwo=keyTwo.replace("@en",""); - - for(String z : semanticsTwo){ - if(keyTwo.contains(z.toLowerCase())){ - if(!result_SemanticsMatchPropertiesTwo.contains(keyTwo)){ - //create new query - result_SemanticsMatchPropertiesTwo.add(keyTwo); - String queryTwo=queryOne; - String replacementTwo = getUriFromIndex(property_to_compare_withTwo.toLowerCase(),1); - if(!queryTwo.contains(replacementTwo)){ - replacementTwo=replacementTwo.replace("ontology", "property"); - } - queryTwo=queryTwo.replace(replacementTwo,valueTwo); - System.out.println("Complexe Wordnet Query: "+ queryTwo); - new_queries.add(queryTwo); - } - } - } - } - - } - } - } - - - - } - - - //add original query for iteration - new_queries.add(query); - - } - - return new_queries; - } - - - - - - - - - - - - //TODO: Write function new!!!!! - /** - * Iterates thrue the conditions and returns an array, where one can see, if the Property is left or right from the resource - * @param query - * @return returns an array, where one can see, if the Property is left or right from the resource - */ - private static ArrayList<String> createLeftAndRightPropertyArray(String query){ - query=query.replace(" ", " "); - Pattern p = Pattern.compile (".*\\{(.*\\<http.*)\\}.*"); - Matcher m = p.matcher (query); - ArrayList<String> lstquery = new ArrayList<String>(); - while (m.find()) { - System.out.println("In While Loop!"); - String workingQuery= m.group(1); - //if there is an .../ontology/C, dann ist das eine Klasse und das ganze soll dann nicht ersetzt reingepackt werden, sondern so bleiben, wie es ist. - System.out.println("Before new pattern and checking "+workingQuery); - - //take the Filter out, so you only have the conditions left - Pattern p2=Pattern.compile (".*(\\.FILTER\\(.*\\)).*"); - Matcher m2 = p2.matcher (workingQuery); - while(m2.find()){ - System.out.println("FIlter: "+m2.group(1)); - workingQuery=workingQuery.replace(m2.group(1), ""); - System.out.println("Without Filter: "+workingQuery); - } - - String resourceTemp=""; - - if(workingQuery.contains("ontologie")){ - Pattern p1=Pattern.compile (".*\\<(http://dbpedia.org/ontology/[A-Z].*)\\>\\W\\W*.*"); - Matcher m1 = p1.matcher (workingQuery); - - - /* - * Das darf nicht sein: - * Replacment: <http://dbpedia.org/ontology/Caves> rdf:type ?x .?y <http://dbpedia.org/property/entrances> - */ - while(m1.find()){ - resourceTemp="RESOURCE"+m1.group(1); - String replacment="<"+m1.group(1)+">"; - //TODO: Make it nice!!! - //if he doesnt find the ontolokg party, kind of skip - if(!replacment.contains("property")&&!replacment.contains("resource")){ - System.out.println("Replacment: "+replacment); - workingQuery=workingQuery.replace(replacment, "SKIP"); - System.out.println("New temp: "+workingQuery); - } - - } - - } - - /* - * dbpedia.org/class/yago/ - */ - if(workingQuery.contains("yago")){ - Pattern p3=Pattern.compile (".*\\<(http://dbpedia.org/class/yago//[A-Z].*)\\>\\W.*"); - Matcher m3 = p3.matcher (workingQuery); - - - /* - * Das darf nicht sein: - * Replacment: <http://dbpedia.org/ontology/Caves> rdf:type ?x .?y <http://dbpedia.org/property/entrances> - */ - while(m3.find()){ - resourceTemp="RESOURCE"+m3.group(1); - String replacment="<"+m3.group(1)+">"; - //TODO: Make it nice!!! - //if he doesnt find the ontolokg party, kind of skip - if(!replacment.contains("property")&&!replacment.contains("resource")){ - System.out.println("Replacment: "+replacment); - workingQuery=workingQuery.replace(replacment, "SKIP"); - System.out.println("New temp: "+workingQuery); - } - - } - - } - - System.out.println("TMP before replace :"+workingQuery); - workingQuery=workingQuery.replace("http://dbpedia.org/resource/","").replace("http://dbpedia.org/property/", "").replace("http://dbpedia.org/ontology/", ""); - - System.out.println("TMP After replace :"+workingQuery); - //split on . for sign for end of conditions - String[] firstArray=workingQuery.split("\\."); - for(String i : firstArray){ - - String[] secondArray=i.split(" "); - - //always in three counts - int counter=0; - for(String j : secondArray){ - //System.out.println("j "+j); - counter=counter+1; - //only one condition - if(secondArray.length%3==0){ - if(counter==1&&j.contains("<")&&!j.contains("SKIP")&&!j.contains("rdf:type")){ - //position of Property is right - lstquery.add("RIGHT"+j.replace("<", "").replace(">","")); - } - else if(counter==3&&j.contains("<")&&!j.contains("SKIP")&&!j.contains("rdf:type")){ - //position of Property is left - //here was RIGHT before.... - lstquery.add("LEFT"+j.replace("<", "").replace(">","")); - } - else if(counter==2&&!j.contains("SKIP")&&!j.contains("rdf:type")){ - lstquery.add("PROPERTY"+j.replace("<", "").replace(">","")); - } - - else if(j.contains("?")) lstquery.add("VARIABLE"); - else if(j.contains("SKIP"))lstquery.add(resourceTemp); - else if(j.contains("rdf:type"))lstquery.add("IsA"); - } - if(counter==3)counter=0; - - - } - } - //} - } - - //System.out.println("lstquery "+lstquery); - return lstquery; - } - - - - //TODO: Plural Singual abfragen über die Wordnetdatei... - - /** - * Method gets a String and takes the information from the templator to creat a Sparql query. - * @param question question in natural language - * @return ArrayList of Sparql queries. - * @throws SQLException - */ - private queryInformation getQuery(queryInformation queryObject) throws SQLException { - ArrayList<ArrayList<String>> lstquery = new ArrayList<ArrayList<String>>(); - String question=queryObject.getQuery(); - Set<BasicQueryTemplate> querytemps = btemplator.buildBasicQueries(question); - for (BasicQueryTemplate temp : querytemps) { - - ArrayList<String> lstquerynew = new ArrayList<String>(); - ArrayList<String> lstquerupsidedown = new ArrayList<String>(); - String query; - String selTerms =""; - String yago_query=""; - String yago_query_upside_down=""; - - boolean addQuery=true; - //sometimes there isnt an Selectterm, so dont use this query - try{ - for(SPARQL_Term terms :temp.getSelTerms()) selTerms=selTerms+(terms.toString())+" "; - } - catch (Exception e){ - selTerms=""; - addQuery=false; - } - - - String conditions = ""; - try{ - for(Path condition: temp.getConditions()) conditions=conditions+(condition.toString())+"."; - } - catch (Exception e){ - conditions=""; - addQuery=false; - } - - String filters=""; - try{ - for(SPARQL_Filter tmp : temp.getFilters()) filters=filters+tmp+" "; - } - catch(Exception e){ - filters=""; - addQuery=false; - } - String having=""; - try{ - for(SPARQL_Having tmp : temp.getHavings()) having=having+tmp+" "; - } - catch(Exception e){ - having=""; - addQuery=false; - } - - //if there is no order by, replace with "" - String orderdBy="ORDER BY "; - try{ - for(SPARQL_Term tmp : temp.getOrderBy()) { - System.out.println("Yeah"); - orderdBy=orderdBy+tmp+" "; - } - if((temp.getOrderBy()).size()==0)orderdBy=""; - } - catch(Exception e){ - orderdBy=""; - addQuery=false; - } - - //if limit == 0, then set limit as "" - String limit=""; - try{ - limit="LIMIT "+temp.getLimit(); - - if(temp.getLimit()==0)limit=""; - } - catch(Exception e){ - limit=""; - addQuery=false; - } - - if(addQuery==true){ - query="PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> "+temp.getQt().toString()+" "+selTerms+" WHERE {"+ conditions.replace("--","") + filters+"}"+orderdBy+" "+having +" "+limit; - - String conditions_new = ""; - for(Path condition: temp.getConditions()){ - //make conditions up-side-down - String[] tmp_upside = condition.toString().split(" -- "); - String tmp_conditions_new=""; - for(String con : tmp_upside) tmp_conditions_new = con +" "+tmp_conditions_new; - //remove all dots befor end - tmp_conditions_new=tmp_conditions_new.replace(".", ""); - //at the end ein . - tmp_conditions_new = tmp_conditions_new + "."; - - //conditions_new=tmp_conditions_new; - - conditions_new=conditions_new + tmp_conditions_new; - } - - - - System.out.println("Conditions: " + conditions); - System.out.println("Conditions_new: " + conditions_new); - - - String query_upside_down = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> "+temp.getQt().toString()+" "+selTerms+" WHERE {"+ conditions_new.replace("--","") +filters+"}" + orderdBy +" "+having+" "+limit; - String[] slots= null; - - - /* - * replace isA with rdf:type - */ - query_upside_down=query_upside_down.replace("isA", "rdf:type"); - query=query.replace("isA", "rdf:type"); - - int slotcounter=1; - - /* - * the one after the isA, has to be an ontology Class or an Yago Class!!! - * so first find out, which one is behind the isA - * best with regex or so.... take the condition, regex the thing between isA and . for End of Condition - * kind of regex=[*a IsA (\\?*a.)*a] - * Then put the isA thing in the variable isaComponent and mark it as later on as Resource! - */ - String isaComponent=""; - - Pattern p = Pattern.compile (".*isA (\\?.*)\\..*"); - - /* - * use conditions, because only in this case, there is the right resource right of the isA - */ - Matcher m = p.matcher (conditions.replace("--", "").replace(" ", " ")); - String result=""; - - System.out.println("################"); - while (m.find()) { - if(m.group(1)!=null) - //System.out.println(m.group(1)); - isaComponent=m.group(1); - } - System.out.println("isaComponent "+isaComponent); - - /* - * just in case, there is still a . in it... - * funzt - * - */ - if(isaComponent.contains(".")){ - String[] tmp_array=isaComponent.split("\\."); - for(String i: tmp_array) System.out.println("tmp_array "+i); - isaComponent=tmp_array[0]; - - System.out.println("new isaComponent "+isaComponent); - } - - if(isaComponent=="") isaComponent="No isa Component"; - System.out.println("isaComponent "+isaComponent); - System.out.println("################"); - - for(Slot slot : temp.getSlots()){ - - System.out.println("Slot: "+slot); - //see below - slotcounter=slotcounter+1; - - //resource will be detectet. - //If its not a resource, it has to be a property! - String resource=""; - String property=""; - String slotstring=""; - if(slot.toString().contains("RESOURCE")){ - resource=slot.toString().replace("{","").replace("}","").replace(" RESOURCE ", ""); - System.out.println("Found Resource in getQuery: "+ resource); - } - else{ - property=slot.toString().replace("UNSPEC","").replace("RESOURCE","").replace("{","").replace("}","").replace(" PROPERTY ",""); - System.out.println("Found Property in getQuery: "+ property); - } - - - //query=query.replace(replace, "<"+hm_result+">"); - - /*System.out.println("Recource "+resource); - System.out.println("Property "+property);*/ - - boolean skip=false; - - /* - * Der geht hier garnicht in die Schleife. - */ - if(resource.contains(isaComponent.replace("?", "")) || property.contains(isaComponent.replace("?", ""))){ - skip=true; - - /* - * now replace the variable with the value of OntologyClass - */ - String replace=""; - String tmp=""; - - if(resource.contains(isaComponent.replace("?", ""))) tmp=resource; - if(property.contains(isaComponent.replace("?", ""))) tmp=property; - - String[] array = tmp.split(":"); - if(array[0].length()<2)replace = "?"+array[0]+" "; - else replace="?"+array[0]; - try{ - array[1]=array[1].replace(" ", ""); - } - catch(Exception e){ - - } - - /* - * Here lookup in ontology and in Yago, if ontology doesnt exist, use Yago, if not, use Ontology - * if both exist, add yago_query... [truncated message content] |
From: <lor...@us...> - 2012-05-04 01:01:01
|
Revision: 3692 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3692&view=rev Author: lorenz_b Date: 2012-05-04 01:00:54 +0000 (Fri, 04 May 2012) Log Message: ----------- Reduced delay time. Modified Paths: -------------- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java Modified: trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-05-03 21:29:31 UTC (rev 3691) +++ trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-05-04 01:00:54 UTC (rev 3692) @@ -187,7 +187,7 @@ private final int nrOfAttemptsBeforeForceToSPARQL1_0_Mode = 2; //delay between 2 attempts - private final int delayInMilliseconds = 15000; + private final int delayInMilliseconds = 5000; // max. execution time for each learner for each entity private int maxExecutionTimeInSeconds = 10; @@ -403,7 +403,7 @@ try { ((AbstractAxiomLearningAlgorithm)learner).setForceSPARQL_1_0_Mode(attempt > nrOfAttemptsBeforeForceToSPARQL1_0_Mode); learner.start(); - timeout = false; + timeout = ((AbstractAxiomLearningAlgorithm)learner).isTimeout(); } catch (Exception e) { if(e.getCause() instanceof SocketTimeoutException){ @@ -491,7 +491,7 @@ try { ((AbstractAxiomLearningAlgorithm)learner).setForceSPARQL_1_0_Mode(attempt > nrOfAttemptsBeforeForceToSPARQL1_0_Mode); learner.start(); - timeout = false; + timeout = ((AbstractAxiomLearningAlgorithm)learner).isTimeout(); } catch (Exception e) { if(e.getCause() instanceof SocketTimeoutException){ @@ -589,7 +589,7 @@ try { ((AbstractAxiomLearningAlgorithm)learner).setForceSPARQL_1_0_Mode(attempt > nrOfAttemptsBeforeForceToSPARQL1_0_Mode); learner.start(); - timeout = false; + timeout = ((AbstractAxiomLearningAlgorithm)learner).isTimeout(); } catch (Exception e) { e.printStackTrace(); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-05-03 21:29:38
|
Revision: 3691 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3691&view=rev Author: lorenz_b Date: 2012-05-03 21:29:31 +0000 (Thu, 03 May 2012) Log Message: ----------- Additional termination criteria and option to force sparql 1.0 mode. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointDataPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentDataPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubDataPropertyOfAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubObjectPropertyOfAxiomLearner.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java 2012-05-03 21:20:31 UTC (rev 3690) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java 2012-05-03 21:29:31 UTC (rev 3691) @@ -156,7 +156,7 @@ } } - if(ks.supportsSPARQL_1_1()){ + if(!forceSPARQL_1_0_Mode && ks.supportsSPARQL_1_1()){ runSPARQL1_1_Mode(); } else { runSPARQL1_0_Mode(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointDataPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointDataPropertyAxiomLearner.java 2012-05-03 21:20:31 UTC (rev 3690) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointDataPropertyAxiomLearner.java 2012-05-03 21:29:31 UTC (rev 3691) @@ -93,7 +93,7 @@ allDataProperties = new SPARQLTasks(ks.getEndpoint()).getAllDataProperties(); allDataProperties.remove(propertyToDescribe); - if(ks.supportsSPARQL_1_1()){ + if(!forceSPARQL_1_0_Mode && ks.supportsSPARQL_1_1()){ runSPARQL1_1_Mode(); } else { runSPARQL1_0_Mode(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java 2012-05-03 21:20:31 UTC (rev 3690) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java 2012-05-03 21:29:31 UTC (rev 3691) @@ -94,7 +94,7 @@ allObjectProperties = new SPARQLTasks(ks.getEndpoint()).getAllObjectProperties(); allObjectProperties.remove(propertyToDescribe); - if(ks.supportsSPARQL_1_1()){ + if(!forceSPARQL_1_0_Mode && ks.supportsSPARQL_1_1()){ runSPARQL1_1_Mode(); } else { runSPARQL1_0_Mode(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentDataPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentDataPropertyAxiomLearner.java 2012-05-03 21:20:31 UTC (rev 3690) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentDataPropertyAxiomLearner.java 2012-05-03 21:29:31 UTC (rev 3691) @@ -74,7 +74,7 @@ SortedSet<DatatypeProperty> existingSuperProperties = reasoner.getSuperProperties(propertyToDescribe); logger.debug("Existing super properties: " + existingSuperProperties); - if(ks.supportsSPARQL_1_1()){ + if(!forceSPARQL_1_0_Mode && ks.supportsSPARQL_1_1()){ runSPARQL1_1_Mode(); } else { runSPARQL1_0_Mode(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java 2012-05-03 21:20:31 UTC (rev 3690) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java 2012-05-03 21:29:31 UTC (rev 3691) @@ -78,7 +78,7 @@ SortedSet<ObjectProperty> existingSuperProperties = reasoner.getSuperProperties(propertyToDescribe); logger.debug("Existing super properties: " + existingSuperProperties); - if(ks.supportsSPARQL_1_1()){ + if(!forceSPARQL_1_0_Mode && ks.supportsSPARQL_1_1()){ runSPARQL1_1_Mode(); } else { runSPARQL1_0_Mode(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubDataPropertyOfAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubDataPropertyOfAxiomLearner.java 2012-05-03 21:20:31 UTC (rev 3690) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubDataPropertyOfAxiomLearner.java 2012-05-03 21:29:31 UTC (rev 3691) @@ -73,7 +73,7 @@ SortedSet<DatatypeProperty> existingSuperProperties = reasoner.getSuperProperties(propertyToDescribe); logger.debug("Existing super properties: " + existingSuperProperties); - if(ks.supportsSPARQL_1_1()){ + if(!forceSPARQL_1_0_Mode && ks.supportsSPARQL_1_1()){ runSPARQL1_1_Mode(); } else { runSPARQL1_0_Mode(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubObjectPropertyOfAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubObjectPropertyOfAxiomLearner.java 2012-05-03 21:20:31 UTC (rev 3690) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubObjectPropertyOfAxiomLearner.java 2012-05-03 21:29:31 UTC (rev 3691) @@ -73,7 +73,7 @@ SortedSet<ObjectProperty> existingSuperProperties = reasoner.getSuperProperties(propertyToDescribe); logger.debug("Existing super properties: " + existingSuperProperties); - if(ks.supportsSPARQL_1_1()){ + if(!forceSPARQL_1_0_Mode && ks.supportsSPARQL_1_1()){ runSPARQL1_1_Mode(); } else { runSPARQL1_0_Mode(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-05-03 21:20:37
|
Revision: 3690 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3690&view=rev Author: lorenz_b Date: 2012-05-03 21:20:31 +0000 (Thu, 03 May 2012) Log Message: ----------- Fallback to SPARQL 1.0 after 2 attempts. Modified Paths: -------------- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java Modified: trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-05-03 21:20:22 UTC (rev 3689) +++ trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-05-03 21:20:31 UTC (rev 3690) @@ -183,6 +183,8 @@ // max. number of attempts per algorithm and entity, because to many queries // in a short time could cause blocking by the endpoint private final int maxAttempts = 5; + //after 2 attempts we force the iterative SPARQL 1.1 mode + private final int nrOfAttemptsBeforeForceToSPARQL1_0_Mode = 2; //delay between 2 attempts private final int delayInMilliseconds = 15000; @@ -399,6 +401,7 @@ logger.info("Applying " + algName + " on " + property + " ... (Attempt " + attempt + ")"); startTime = System.currentTimeMillis(); try { + ((AbstractAxiomLearningAlgorithm)learner).setForceSPARQL_1_0_Mode(attempt > nrOfAttemptsBeforeForceToSPARQL1_0_Mode); learner.start(); timeout = false; } catch (Exception e) { @@ -486,6 +489,7 @@ logger.info("Applying " + algName + " on " + property + " ... (Attempt " + attempt + ")"); startTime = System.currentTimeMillis(); try { + ((AbstractAxiomLearningAlgorithm)learner).setForceSPARQL_1_0_Mode(attempt > nrOfAttemptsBeforeForceToSPARQL1_0_Mode); learner.start(); timeout = false; } catch (Exception e) { @@ -583,6 +587,7 @@ logger.info("Applying " + algName + " on " + cls + " ... (Attempt " + attempt + ")"); startTime = System.currentTimeMillis(); try { + ((AbstractAxiomLearningAlgorithm)learner).setForceSPARQL_1_0_Mode(attempt > nrOfAttemptsBeforeForceToSPARQL1_0_Mode); learner.start(); timeout = false; } catch (Exception e) { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-05-03 21:20:30
|
Revision: 3689 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3689&view=rev Author: lorenz_b Date: 2012-05-03 21:20:22 +0000 (Thu, 03 May 2012) Log Message: ----------- Fallback to SPARQL 1.0 after 2 attempts. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/AsymmetricObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalDataPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexiveObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/TransitiveObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/AsymmetricObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/AsymmetricObjectPropertyAxiomLearner.java 2012-05-03 20:59:33 UTC (rev 3688) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/AsymmetricObjectPropertyAxiomLearner.java 2012-05-03 21:20:22 UTC (rev 3689) @@ -99,7 +99,7 @@ // get number of instances of s with <s p o> query = "SELECT (COUNT(*) AS ?total) WHERE {?s <%s> ?o.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); - ResultSet rs = executeSelectQuery(query); + ResultSet rs = executeSelectQuery(query, model); QuerySolution qs; int total = 0; while(rs.hasNext()){ @@ -108,7 +108,7 @@ } query = "SELECT (COUNT(*) AS ?symmetric) WHERE {?s <%s> ?o. ?o <%s> ?s.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); - rs = executeSelectQuery(query); + rs = executeSelectQuery(query, model); int symmetric = 0; while(rs.hasNext()){ qs = rs.next(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalDataPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalDataPropertyAxiomLearner.java 2012-05-03 20:59:33 UTC (rev 3688) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalDataPropertyAxiomLearner.java 2012-05-03 21:20:22 UTC (rev 3689) @@ -97,7 +97,7 @@ query = String.format( "SELECT (COUNT(DISTINCT ?s) AS ?all) WHERE {?s <%s> ?o.}", propertyToDescribe.getName()); - ResultSet rs = executeSelectQuery(query); + ResultSet rs = executeSelectQuery(query, model); QuerySolution qs; int all = 1; while (rs.hasNext()) { @@ -107,7 +107,7 @@ // get number of instances of s with <s p o> <s p o1> where o != o1 query = "SELECT (COUNT(DISTINCT ?s) AS ?functional) WHERE {?s <%s> ?o1. FILTER NOT EXISTS {?s <%s> ?o2. FILTER(?o1 != ?o1)} }"; query = query.replace("%s", propertyToDescribe.getURI().toString()); - rs = executeSelectQuery(query); + rs = executeSelectQuery(query, model); int functional = 1; while (rs.hasNext()) { qs = rs.next(); @@ -129,29 +129,25 @@ private void runSPARQL1_1_Mode() { // get number of instances of s with <s p o> - String query = String.format( - "SELECT (COUNT(DISTINCT ?s) AS ?all) WHERE {?s <%s> ?o.}", - propertyToDescribe.getName()); - ResultSet rs = executeSelectQuery(query); - QuerySolution qs; - int all = 1; - while (rs.hasNext()) { - qs = rs.next(); - all = qs.getLiteral("all").getInt(); + int numberOfSubjects = reasoner.getSubjectCountForProperty(propertyToDescribe, getRemainingRuntimeInMilliSeconds()); + if(numberOfSubjects == -1){ + logger.warn("Early termination: Got timeout while counting number of distinct subjects for given property."); + return; } // get number of instances of s with <s p o> <s p o1> where o != o1 - query = "SELECT (COUNT(DISTINCT ?s) AS ?functional) WHERE {?s <%s> ?o1. FILTER NOT EXISTS {?s <%s> ?o2. FILTER(?o1 != ?o1)} }"; + String query = "SELECT (COUNT(DISTINCT ?s) AS ?functional) WHERE {?s <%s> ?o1. FILTER NOT EXISTS {?s <%s> ?o2. FILTER(?o1 != ?o1)} }"; query = query.replace("%s", propertyToDescribe.getURI().toString()); - rs = executeSelectQuery(query); + ResultSet rs = executeSelectQuery(query); int functional = 1; + QuerySolution qs; while (rs.hasNext()) { qs = rs.next(); functional = qs.getLiteral("functional").getInt(); } - if (all > 0) { + if (numberOfSubjects > 0) { currentlyBestAxioms.add(new EvaluatedAxiom( new FunctionalDatatypePropertyAxiom(propertyToDescribe), - computeScore(all, functional), + computeScore(numberOfSubjects, functional), declaredAsFunctional)); } } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java 2012-05-03 20:59:33 UTC (rev 3688) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java 2012-05-03 21:20:22 UTC (rev 3689) @@ -102,7 +102,7 @@ query = String.format( "SELECT (COUNT(DISTINCT ?o) AS ?all) WHERE {?s <%s> ?o.}", propertyToDescribe.getName()); - ResultSet rs = executeSelectQuery(query); + ResultSet rs = executeSelectQuery(query, model); QuerySolution qs; int all = 1; while (rs.hasNext()) { @@ -110,13 +110,13 @@ all = qs.getLiteral("all").getInt(); } // get number of instances of s with <s p o> <s p o1> where o != o1 - query = "SELECT (COUNT(DISTINCT ?o) AS ?noninversefunctional) WHERE {?s1 <%s> ?o. ?s2 <%s> ?o. FILTER(?s1 != ?s2) }"; + query = "SELECT (COUNT(DISTINCT ?o) AS ?inversefunctional) WHERE {?s1 <%s> ?o. FILTER NOT EXISTS {?s2 <%s> ?o. FILTER(?s1 != ?s2)}}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); - rs = executeSelectQuery(query); - int notInverseFunctional = 1; + rs = executeSelectQuery(query, model); + int inverseFunctional = 1; while (rs.hasNext()) { qs = rs.next(); - notInverseFunctional = qs.getLiteral("noninversefunctional") + inverseFunctional = qs.getLiteral("inversefunctional") .getInt(); } if (all > 0) { @@ -124,8 +124,7 @@ currentlyBestAxioms .add(new EvaluatedAxiom( new InverseFunctionalObjectPropertyAxiom( - propertyToDescribe), computeScore(all, all - - notInverseFunctional), + propertyToDescribe), computeScore(all, inverseFunctional), declaredAsInverseFunctional)); } @@ -137,32 +136,27 @@ private void runSPARQL1_1_Mode() { // get number of instances of s with <s p o> - String query = String.format( - "SELECT (COUNT(DISTINCT ?o) AS ?all) WHERE {?s <%s> ?o.}", - propertyToDescribe.getName()); - ResultSet rs = executeSelectQuery(query); - QuerySolution qs; - int all = 1; - while (rs.hasNext()) { - qs = rs.next(); - all = qs.getLiteral("all").getInt(); + int numberOfObjects = reasoner.getObjectCountForProperty(propertyToDescribe, getRemainingRuntimeInMilliSeconds()); + if(numberOfObjects == -1){ + logger.warn("Early termination: Got timeout while counting number of distinct objects for given property."); + return; } // get number of instances of s with <s p o> <s p o1> where o != o1 - query = "SELECT (COUNT(DISTINCT ?o) AS ?noninversefunctional) WHERE {?s1 <%s> ?o. ?s2 <%s> ?o. FILTER(?s1 != ?s2) }"; + String query = "SELECT (COUNT(DISTINCT ?o) AS ?inversefunctional) WHERE {?s1 <%s> ?o. FILTER NOT EXISTS {?s2 <%s> ?o. FILTER(?s1 != ?s2)}}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); - rs = executeSelectQuery(query); - int notInverseFunctional = 1; + ResultSet rs = executeSelectQuery(query); + int inverseFunctional = 1; + QuerySolution qs; while (rs.hasNext()) { qs = rs.next(); - notInverseFunctional = qs.getLiteral("noninversefunctional") + inverseFunctional = qs.getLiteral("inversefunctional") .getInt(); } - if (all > 0) { + if (numberOfObjects > 0) { currentlyBestAxioms .add(new EvaluatedAxiom( new InverseFunctionalObjectPropertyAxiom( - propertyToDescribe), computeScore(all, all - - notInverseFunctional), + propertyToDescribe), computeScore(numberOfObjects, inverseFunctional), declaredAsInverseFunctional)); } } @@ -173,6 +167,7 @@ l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/profession")); l.setMaxExecutionTimeInSeconds(10); l.init(); + l.setForceSPARQL_1_0_Mode(true); l.start(); System.out.println(l.getCurrentlyBestEvaluatedAxioms(1)); } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java 2012-05-03 20:59:33 UTC (rev 3688) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java 2012-05-03 21:20:22 UTC (rev 3689) @@ -101,7 +101,7 @@ query = String.format( "SELECT (COUNT(DISTINCT ?s) AS ?all) WHERE {?s <%s> ?o.}", propertyToDescribe); - ResultSet rs = executeSelectQuery(query); + ResultSet rs = executeSelectQuery(query, model); QuerySolution qs; int all = 0; while (rs.hasNext()) { @@ -113,7 +113,7 @@ // get number of instances s where not exists <s p s> query = "SELECT (COUNT(DISTINCT ?s) AS ?irreflexive) WHERE {?s <%s> ?o. FILTER(?s != ?o)}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); - rs = executeSelectQuery(query); + rs = executeSelectQuery(query, model); int irreflexive = 0; while (rs.hasNext()) { qs = rs.next(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexiveObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexiveObjectPropertyAxiomLearner.java 2012-05-03 20:59:33 UTC (rev 3688) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexiveObjectPropertyAxiomLearner.java 2012-05-03 21:20:22 UTC (rev 3689) @@ -99,7 +99,7 @@ // get fraction of instances s with <s p s> query = "SELECT (COUNT(DISTINCT ?s) AS ?total) WHERE {?s <%s> ?o.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); - ResultSet rs = executeSelectQuery(query); + ResultSet rs = executeSelectQuery(query, model); QuerySolution qs; int total = 0; while (rs.hasNext()) { @@ -108,7 +108,7 @@ } query = "SELECT (COUNT(DISTINCT ?s) AS ?reflexive) WHERE {?s <%s> ?s.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); - rs = executeSelectQuery(query); + rs = executeSelectQuery(query, model); int reflexive = 0; while (rs.hasNext()) { qs = rs.next(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricObjectPropertyAxiomLearner.java 2012-05-03 20:59:33 UTC (rev 3688) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricObjectPropertyAxiomLearner.java 2012-05-03 21:20:22 UTC (rev 3689) @@ -98,7 +98,7 @@ // get number of instances of s with <s p o> query = "SELECT (COUNT(*) AS ?total) WHERE {?s <%s> ?o.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); - ResultSet rs = executeSelectQuery(query); + ResultSet rs = executeSelectQuery(query, model); QuerySolution qs; int total = 0; while(rs.hasNext()){ @@ -107,7 +107,7 @@ } query = "SELECT (COUNT(*) AS ?symmetric) WHERE {?s <%s> ?o. ?o <%s> ?s}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); - rs = executeSelectQuery(query); + rs = executeSelectQuery(query, model); int symmetric = 0; while(rs.hasNext()){ qs = rs.next(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/TransitiveObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/TransitiveObjectPropertyAxiomLearner.java 2012-05-03 20:59:33 UTC (rev 3688) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/TransitiveObjectPropertyAxiomLearner.java 2012-05-03 21:20:22 UTC (rev 3689) @@ -100,7 +100,7 @@ // get number of instances of s with <s p o> query = "SELECT (COUNT(*) AS ?total) WHERE {?s <%s> ?o. ?o <%s> ?o1.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); - ResultSet rs = executeSelectQuery(query); + ResultSet rs = executeSelectQuery(query, model); QuerySolution qs; int total = 0; while(rs.hasNext()){ @@ -109,7 +109,7 @@ } query = "SELECT (COUNT(*) AS ?transitive) WHERE {?s <%s> ?o. ?o <%s> ?o1. ?s <%s> ?o1.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); - rs = executeSelectQuery(query); + rs = executeSelectQuery(query, model); int transitive = 0; while(rs.hasNext()){ qs = rs.next(); Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2012-05-03 20:59:33 UTC (rev 3688) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2012-05-03 21:20:22 UTC (rev 3689) @@ -53,6 +53,7 @@ import org.dllearner.core.owl.Nothing; import org.dllearner.core.owl.ObjectProperty; import org.dllearner.core.owl.ObjectPropertyHierarchy; +import org.dllearner.core.owl.Property; import org.dllearner.core.owl.Thing; import org.dllearner.kb.LocalModelBasedSparqlEndpointKS; import org.dllearner.kb.SparqlEndpointKS; @@ -169,7 +170,7 @@ } } - public int getSubjectCountForProperty(ObjectProperty p, long timeout){ + public int getSubjectCountForProperty(Property p, long timeout){ int cnt = -1; String query = String.format( "SELECT (COUNT(DISTINCT ?s) AS ?cnt) WHERE {?s <%s> ?o.}", @@ -182,6 +183,19 @@ return cnt; } + public int getObjectCountForProperty(ObjectProperty p, long timeout){ + int cnt = -1; + String query = String.format( + "SELECT (COUNT(DISTINCT ?o) AS ?cnt) WHERE {?s <%s> ?o.}", + p.getName()); + ResultSet rs = executeSelectQuery(query, timeout); + if(rs.hasNext()){ + cnt = rs.next().getLiteral("cnt").getInt(); + } + + return cnt; + } + public int getPopularity(NamedClass nc){ if(classPopularityMap.containsKey(nc)){ return classPopularityMap.get(nc); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-05-03 20:59:42
|
Revision: 3688 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3688&view=rev Author: lorenz_b Date: 2012-05-03 20:59:33 +0000 (Thu, 03 May 2012) Log Message: ----------- Additional termination criteria and option to force sparql 1.0 mode. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/AsymmetricObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalDataPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexiveObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/TransitiveObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java 2012-05-03 18:04:44 UTC (rev 3687) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java 2012-05-03 20:59:33 UTC (rev 3688) @@ -174,7 +174,7 @@ Model newModel = executeConstructQuery(query); Map<NamedClass, Integer> result = new HashMap<NamedClass, Integer>(); NamedClass cls; - while(newModel.size() != 0){ + while(!terminationCriteriaSatisfied() && newModel.size() != 0){ model.add(newModel); //get total number of distinct instances query = "SELECT (COUNT(DISTINCT ?s) AS ?count) WHERE {?s a ?type.}"; Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/AsymmetricObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/AsymmetricObjectPropertyAxiomLearner.java 2012-05-03 18:04:44 UTC (rev 3687) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/AsymmetricObjectPropertyAxiomLearner.java 2012-05-03 20:59:33 UTC (rev 3688) @@ -78,7 +78,7 @@ logger.info("Property is already declared as symmetric in knowledge base."); } - if(ks.supportsSPARQL_1_1()){ + if(!forceSPARQL_1_0_Mode && ks.supportsSPARQL_1_1()){ runSPARQL1_1_Mode(); } else { runSPARQL1_0_Mode(); @@ -94,7 +94,7 @@ String baseQuery = "CONSTRUCT {?s <%s> ?o.} WHERE {?s <%s> ?o} LIMIT %d OFFSET %d"; String query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); Model newModel = executeConstructQuery(query); - while(newModel.size() != 0){ + while(!terminationCriteriaSatisfied() && newModel.size() != 0){ model.add(newModel); // get number of instances of s with <s p o> query = "SELECT (COUNT(*) AS ?total) WHERE {?s <%s> ?o.}"; Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java 2012-05-03 18:04:44 UTC (rev 3687) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java 2012-05-03 20:59:33 UTC (rev 3688) @@ -251,7 +251,7 @@ SparqlEndpoint endpoint = SparqlEndpoint.getEndpointDBpedia(); // endpoint = new SparqlEndpoint(new URL("http://dbpedia.aksw.org:8902/sparql"), Collections.singletonList("http://dbpedia.org"), Collections.<String>emptyList())); DisjointObjectPropertyAxiomLearner l = new DisjointObjectPropertyAxiomLearner(new SparqlEndpointKS(endpoint));//.getEndpointDBpediaLiveAKSW())); - l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/birthPlace")); + l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/aircraftTransport")); l.setMaxExecutionTimeInSeconds(10); l.init(); l.getReasoner().precomputeObjectPropertyPopularity(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalDataPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalDataPropertyAxiomLearner.java 2012-05-03 18:04:44 UTC (rev 3687) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalDataPropertyAxiomLearner.java 2012-05-03 20:59:33 UTC (rev 3688) @@ -75,7 +75,7 @@ logger.info("Property is already declared as functional in knowledge base."); } - if(ks.supportsSPARQL_1_1()){ + if(!forceSPARQL_1_0_Mode && ks.supportsSPARQL_1_1()){ runSPARQL1_1_Mode(); } else { runSPARQL1_0_Mode(); @@ -91,7 +91,7 @@ String baseQuery = "CONSTRUCT {?s <%s> ?o.} WHERE {?s <%s> ?o} LIMIT %d OFFSET %d"; String query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); Model newModel = executeConstructQuery(query); - while(newModel.size() != 0){ + while(!terminationCriteriaSatisfied() && newModel.size() != 0){ model.add(newModel); // get number of instances of s with <s p o> query = String.format( @@ -105,19 +105,19 @@ all = qs.getLiteral("all").getInt(); } // get number of instances of s with <s p o> <s p o1> where o != o1 - query = "SELECT (COUNT(DISTINCT ?s) AS ?notfunctional) WHERE {?s <%s> ?o. ?s <%s> ?o1. FILTER(?o != ?o1) }"; + query = "SELECT (COUNT(DISTINCT ?s) AS ?functional) WHERE {?s <%s> ?o1. FILTER NOT EXISTS {?s <%s> ?o2. FILTER(?o1 != ?o1)} }"; query = query.replace("%s", propertyToDescribe.getURI().toString()); rs = executeSelectQuery(query); - int notFunctional = 1; + int functional = 1; while (rs.hasNext()) { qs = rs.next(); - notFunctional = qs.getLiteral("notfunctional").getInt(); + functional = qs.getLiteral("functional").getInt(); } if (all > 0) { currentlyBestAxioms.clear(); currentlyBestAxioms.add(new EvaluatedAxiom( new FunctionalDatatypePropertyAxiom(propertyToDescribe), - computeScore(all, all - notFunctional), + computeScore(all, functional), declaredAsFunctional)); } @@ -140,18 +140,18 @@ all = qs.getLiteral("all").getInt(); } // get number of instances of s with <s p o> <s p o1> where o != o1 - query = "SELECT (COUNT(DISTINCT ?s) AS ?notfunctional) WHERE {?s <%s> ?o. ?s <%s> ?o1. FILTER(?o != ?o1) }"; + query = "SELECT (COUNT(DISTINCT ?s) AS ?functional) WHERE {?s <%s> ?o1. FILTER NOT EXISTS {?s <%s> ?o2. FILTER(?o1 != ?o1)} }"; query = query.replace("%s", propertyToDescribe.getURI().toString()); rs = executeSelectQuery(query); - int notFunctional = 1; + int functional = 1; while (rs.hasNext()) { qs = rs.next(); - notFunctional = qs.getLiteral("notfunctional").getInt(); + functional = qs.getLiteral("functional").getInt(); } if (all > 0) { currentlyBestAxioms.add(new EvaluatedAxiom( new FunctionalDatatypePropertyAxiom(propertyToDescribe), - computeScore(all, all - notFunctional), + computeScore(all, functional), declaredAsFunctional)); } } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalObjectPropertyAxiomLearner.java 2012-05-03 18:04:44 UTC (rev 3687) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalObjectPropertyAxiomLearner.java 2012-05-03 20:59:33 UTC (rev 3688) @@ -76,7 +76,7 @@ logger.info("Property is already declared as functional in knowledge base."); } - if(ks.supportsSPARQL_1_1()){ + if(!forceSPARQL_1_0_Mode && ks.supportsSPARQL_1_1()){ runSPARQL1_1_Mode(); } else { runSPARQL1_0_Mode(); @@ -92,7 +92,7 @@ String baseQuery = "CONSTRUCT {?s <%s> ?o.} WHERE {?s <%s> ?o} LIMIT %d OFFSET %d"; String query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); Model newModel = executeConstructQuery(query); - while(newModel.size() != 0){ + while(!terminationCriteriaSatisfied() && newModel.size() != 0){System.out.println(query); model.add(newModel); // get number of instances of s with <s p o> query = String.format( @@ -105,20 +105,22 @@ qs = rs.next(); all = qs.getLiteral("all").getInt(); } + System.out.println(all); // get number of instances of s with <s p o> <s p o1> where o != o1 - query = "SELECT (COUNT(DISTINCT ?s) AS ?notfunctional) WHERE {?s <%s> ?o. ?s <%s> ?o1. FILTER(?o != ?o1) }"; + query = "SELECT (COUNT(DISTINCT ?s) AS ?functional) WHERE {?s <%s> ?o1. FILTER NOT EXISTS {?s <%s> ?o2. FILTER(?o1 != ?o2)} }"; query = query.replace("%s", propertyToDescribe.getURI().toString()); rs = executeSelectQuery(query, model); - int notFunctional = 1; + int functional = 1; while (rs.hasNext()) { qs = rs.next(); - notFunctional = qs.getLiteral("notfunctional").getInt(); + functional = qs.getLiteral("functional").getInt(); } + System.out.println(functional); if (all > 0) { currentlyBestAxioms.clear(); currentlyBestAxioms.add(new EvaluatedAxiom( new FunctionalObjectPropertyAxiom(propertyToDescribe), - computeScore(all, all - notFunctional), + computeScore(all, functional), declaredAsFunctional)); } offset += limit; @@ -129,37 +131,34 @@ private void runSPARQL1_1_Mode() { // get number of instances of s with <s p o> - String query = String.format( - "SELECT (COUNT(DISTINCT ?s) AS ?all) WHERE {?s <%s> ?o.}", - propertyToDescribe.getName()); - ResultSet rs = executeSelectQuery(query); - QuerySolution qs; - int all = 1; - while (rs.hasNext()) { - qs = rs.next(); - all = qs.getLiteral("all").getInt(); + int numberOfSubjects = reasoner.getSubjectCountForProperty(propertyToDescribe, getRemainingRuntimeInMilliSeconds()); + if(numberOfSubjects == -1){ + logger.warn("Early termination: Got timeout while counting number of distinct subjects for given property."); + return; } // get number of instances of s with <s p o> <s p o1> where o != o1 - query = "SELECT (COUNT(DISTINCT ?s) AS ?notfunctional) WHERE {?s <%s> ?o. ?s <%s> ?o1. FILTER(?o != ?o1) }"; + String query = "SELECT (COUNT(DISTINCT ?s) AS ?functional) WHERE {?s <%s> ?o1. FILTER NOT EXISTS {?s <%s> ?o2. FILTER(?o1 != ?o2)} }"; query = query.replace("%s", propertyToDescribe.getURI().toString()); - rs = executeSelectQuery(query); - int notFunctional = 1; + ResultSet rs = executeSelectQuery(query); + QuerySolution qs; + int functional = 1; while (rs.hasNext()) { qs = rs.next(); - notFunctional = qs.getLiteral("notfunctional").getInt(); + functional = qs.getLiteral("functional").getInt(); } - if (all > 0) { + if (numberOfSubjects > 0) { currentlyBestAxioms.add(new EvaluatedAxiom( new FunctionalObjectPropertyAxiom(propertyToDescribe), - computeScore(all, all - notFunctional), + computeScore(numberOfSubjects, functional), declaredAsFunctional)); } } public static void main(String[] args) throws Exception{ - FunctionalObjectPropertyAxiomLearner l = new FunctionalObjectPropertyAxiomLearner(new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpedia())); - l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/league")); + FunctionalObjectPropertyAxiomLearner l = new FunctionalObjectPropertyAxiomLearner(new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveAKSW())); + l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/wikiPageExternalLink")); l.setMaxExecutionTimeInSeconds(10); +// l.setForceSPARQL_1_0_Mode(true); l.init(); l.start(); System.out.println(l.getCurrentlyBestEvaluatedAxioms(5)); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java 2012-05-03 18:04:44 UTC (rev 3687) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java 2012-05-03 20:59:33 UTC (rev 3688) @@ -78,7 +78,7 @@ logger.info("Property is already declared as functional in knowledge base."); } - if(ks.supportsSPARQL_1_1()){ + if(!forceSPARQL_1_0_Mode && ks.supportsSPARQL_1_1()){ runSPARQL1_1_Mode(); } else { runSPARQL1_0_Mode(); @@ -96,7 +96,7 @@ String baseQuery = "CONSTRUCT {?s <%s> ?o.} WHERE {?s <%s> ?o} LIMIT %d OFFSET %d"; String query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); Model newModel = executeConstructQuery(query); - while(newModel.size() != 0){ + while(!terminationCriteriaSatisfied() && newModel.size() != 0){ model.add(newModel); // get number of instances of s with <s p o> query = String.format( Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseObjectPropertyAxiomLearner.java 2012-05-03 18:04:44 UTC (rev 3687) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseObjectPropertyAxiomLearner.java 2012-05-03 20:59:33 UTC (rev 3688) @@ -77,7 +77,7 @@ } } - if(ks.supportsSPARQL_1_1()){ + if(!forceSPARQL_1_0_Mode && ks.supportsSPARQL_1_1()){ runSPARQL1_1_Mode(); } else { runSPARQL1_0_Mode(); @@ -90,15 +90,15 @@ Model model = ModelFactory.createDefaultModel(); int limit = 1000; int offset = 0; - String baseQuery = "CONSTRUCT {?s <%s> ?o.} WHERE {?s <%s> ?o} LIMIT %d OFFSET %d"; + String baseQuery = "CONSTRUCT {?s <%s> ?o. ?o ?p ?s} WHERE {?s <%s> ?o. OPTIONAL{?o ?p ?s. ?p a owl:ObjectProperty}} LIMIT %d OFFSET %d"; String query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); Model newModel = executeConstructQuery(query); - while(newModel.size() != 0){ + while(!terminationCriteriaSatisfied() && newModel.size() != 0){ model.add(newModel); // get number of instances of s with <s p o> query = "SELECT (COUNT(*) AS ?total) WHERE {?s <%s> ?o.}"; query = query.replace("%s", propertyToDescribe.getURI().toString()); - ResultSet rs = executeSelectQuery(query); + ResultSet rs = executeSelectQuery(query, model); QuerySolution qs; int total = 0; while(rs.hasNext()){ @@ -107,7 +107,7 @@ } query = String.format("SELECT ?p (COUNT(*) AS ?cnt) WHERE {?s <%s> ?o. ?o ?p ?s.} GROUP BY ?p", propertyToDescribe.getName()); - rs = executeSelectQuery(query); + rs = executeSelectQuery(query, model); while(rs.hasNext()){ qs = rs.next(); currentlyBestAxioms.add(new EvaluatedAxiom( @@ -131,7 +131,7 @@ total = qs.getLiteral("total").getInt(); } - query = String.format("SELECT ?p (COUNT(*) AS ?cnt) WHERE {?s <%s> ?o. ?o ?p ?s.} GROUP BY ?p", propertyToDescribe.getName()); + query = String.format("PREFIX owl: <http://www.w3.org/2002/07/owl#> SELECT ?p (COUNT(*) AS ?cnt) WHERE {?s <%s> ?o. ?o ?p ?s. ?p a owl:ObjectProperty} GROUP BY ?p", propertyToDescribe.getName()); rs = executeSelectQuery(query); while(rs.hasNext()){ qs = rs.next(); @@ -143,13 +143,12 @@ } public static void main(String[] args) throws Exception{ - SparqlEndpointKS ks = new SparqlEndpointKS(new SparqlEndpoint( - new URL("http://dbpedia.aksw.org:8902/sparql"), Collections.singletonList("http://dbpedia.org"), Collections.<String>emptyList()));//.getEndpointDBpediaLiveAKSW())); + SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpedia()); - InverseObjectPropertyAxiomLearner l = new InverseObjectPropertyAxiomLearner(ks); l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/officialLanguage")); - l.setMaxExecutionTimeInSeconds(100); + l.setMaxExecutionTimeInSeconds(10); + l.setForceSPARQL_1_0_Mode(true); // l.setReturnOnlyNewAxioms(true); l.init(); l.start(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java 2012-05-03 18:04:44 UTC (rev 3687) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java 2012-05-03 20:59:33 UTC (rev 3688) @@ -77,7 +77,7 @@ logger.info("Property is already declared as irreflexive in knowledge base."); } - if(ks.supportsSPARQL_1_1()){ + if(!forceSPARQL_1_0_Mode && ks.supportsSPARQL_1_1()){ runSPARQL1_1_Mode(); } else { runSPARQL1_0_Mode(); @@ -95,7 +95,7 @@ String baseQuery = "CONSTRUCT {?s <%s> ?o.} WHERE {?s <%s> ?o} LIMIT %d OFFSET %d"; String query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); Model newModel = executeConstructQuery(query); - while(newModel.size() != 0){ + while(!terminationCriteriaSatisfied() && newModel.size() != 0){ model.add(newModel); // get all instance s with <s p o> query = String.format( Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexiveObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexiveObjectPropertyAxiomLearner.java 2012-05-03 18:04:44 UTC (rev 3687) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ReflexiveObjectPropertyAxiomLearner.java 2012-05-03 20:59:33 UTC (rev 3688) @@ -76,7 +76,7 @@ logger.info("Property is already declared as reflexive in knowledge base."); } - if(ks.supportsSPARQL_1_1()){ + if(!forceSPARQL_1_0_Mode && ks.supportsSPARQL_1_1()){ runSPARQL1_1_Mode(); } else { runSPARQL1_0_Mode(); @@ -94,7 +94,7 @@ String baseQuery = "CONSTRUCT {?s <%s> ?o.} WHERE {?s <%s> ?o} LIMIT %d OFFSET %d"; String query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); Model newModel = executeConstructQuery(query); - while(newModel.size() != 0){ + while(!terminationCriteriaSatisfied() && newModel.size() != 0){ model.add(newModel); // get fraction of instances s with <s p s> query = "SELECT (COUNT(DISTINCT ?s) AS ?total) WHERE {?s <%s> ?o.}"; Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricObjectPropertyAxiomLearner.java 2012-05-03 18:04:44 UTC (rev 3687) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SymmetricObjectPropertyAxiomLearner.java 2012-05-03 20:59:33 UTC (rev 3688) @@ -77,7 +77,7 @@ logger.info("Property is already declared as symmetric in knowledge base."); } - if(ks.supportsSPARQL_1_1()){ + if(!forceSPARQL_1_0_Mode && ks.supportsSPARQL_1_1()){ runSPARQL1_1_Mode(); } else { runSPARQL1_0_Mode(); @@ -93,7 +93,7 @@ String baseQuery = "CONSTRUCT {?s <%s> ?o.} WHERE {?s <%s> ?o} LIMIT %d OFFSET %d"; String query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); Model newModel = executeConstructQuery(query); - while(newModel.size() != 0){ + while(!terminationCriteriaSatisfied() && newModel.size() != 0){ model.add(newModel); // get number of instances of s with <s p o> query = "SELECT (COUNT(*) AS ?total) WHERE {?s <%s> ?o.}"; Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/TransitiveObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/TransitiveObjectPropertyAxiomLearner.java 2012-05-03 18:04:44 UTC (rev 3687) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/TransitiveObjectPropertyAxiomLearner.java 2012-05-03 20:59:33 UTC (rev 3688) @@ -78,7 +78,7 @@ logger.info("Property is already declared as transitive in knowledge base."); } - if(ks.supportsSPARQL_1_1()){ + if(!forceSPARQL_1_0_Mode && ks.supportsSPARQL_1_1()){ runSPARQL1_1_Mode(); } else { runSPARQL1_0_Mode(); @@ -95,7 +95,7 @@ String baseQuery = "CONSTRUCT {?s <%s> ?o.} WHERE {?s <%s> ?o} LIMIT %d OFFSET %d"; String query = String.format(baseQuery, propertyToDescribe.getName(), propertyToDescribe.getName(), limit, offset); Model newModel = executeConstructQuery(query); - while(newModel.size() != 0){ + while(!terminationCriteriaSatisfied() && newModel.size() != 0){ model.add(newModel); // get number of instances of s with <s p o> query = "SELECT (COUNT(*) AS ?total) WHERE {?s <%s> ?o. ?o <%s> ?o1.}"; Modified: trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2012-05-03 18:04:44 UTC (rev 3687) +++ trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2012-05-03 20:59:33 UTC (rev 3688) @@ -89,8 +89,10 @@ protected long startTime; protected int limit = 1000; - private boolean timeout = true; + protected boolean timeout = true; + protected boolean forceSPARQL_1_0_Mode = false; + public AbstractAxiomLearningAlgorithm() { existingAxioms = new TreeSet<Axiom>(new AxiomComparator()); } @@ -137,6 +139,10 @@ public void setMaxFetchedRows(int maxFetchedRows) { this.maxFetchedRows = maxFetchedRows; } + + public void setForceSPARQL_1_0_Mode(boolean forceSPARQL_1_0_Mode) { + this.forceSPARQL_1_0_Mode = forceSPARQL_1_0_Mode; + } @Override public void start() { @@ -313,7 +319,7 @@ return entries; } - private long getRemainingRuntimeInMilliSeconds(){ + protected long getRemainingRuntimeInMilliSeconds(){ return Math.max(0, (maxExecutionTimeInSeconds * 1000) - (System.currentTimeMillis() - startTime)); } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2012-05-03 18:04:44 UTC (rev 3687) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2012-05-03 20:59:33 UTC (rev 3688) @@ -19,6 +19,7 @@ package org.dllearner.reasoning; +import java.net.SocketTimeoutException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; @@ -74,6 +75,8 @@ import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.Resource; import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; +import com.hp.hpl.jena.sparql.engine.http.QueryExceptionHTTP; +import com.hp.hpl.jena.sparql.resultset.ResultSetMem; import com.hp.hpl.jena.vocabulary.OWL; import com.hp.hpl.jena.vocabulary.OWL2; import com.hp.hpl.jena.vocabulary.RDF; @@ -166,6 +169,19 @@ } } + public int getSubjectCountForProperty(ObjectProperty p, long timeout){ + int cnt = -1; + String query = String.format( + "SELECT (COUNT(DISTINCT ?s) AS ?cnt) WHERE {?s <%s> ?o.}", + p.getName()); + ResultSet rs = executeSelectQuery(query, timeout); + if(rs.hasNext()){ + cnt = rs.next().getLiteral("cnt").getInt(); + } + + return cnt; + } + public int getPopularity(NamedClass nc){ if(classPopularityMap.containsKey(nc)){ return classPopularityMap.get(nc); @@ -182,7 +198,7 @@ } public int getPopularity(ObjectProperty op){ - if(objectPropertyPopularityMap.containsKey(op)){ + if(objectPropertyPopularityMap != null && objectPropertyPopularityMap.containsKey(op)){ return objectPropertyPopularityMap.get(op); } else { System.out.println("Cache miss: " + op); @@ -1005,6 +1021,33 @@ return rs; } + private ResultSet executeSelectQuery(String query, long timeout){ + logger.debug("Sending query \n {}", query); + ResultSet rs = null; + if(ks.isRemote()){ + SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); + QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), + query); + queryExecution.setTimeout(timeout); + queryExecution.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); + queryExecution.setNamedGraphURIs(endpoint.getNamedGraphURIs()); + try { + rs = queryExecution.execSelect(); + } catch (QueryExceptionHTTP e) { + if(e.getCause() instanceof SocketTimeoutException){ + logger.warn("Got timeout"); + } else { + logger.error("Exception executing query", e); + } + rs = new ResultSetMem(); + } + } else { + QueryExecution qExec = com.hp.hpl.jena.query.QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); + rs = qExec.execSelect(); + } + return rs; + } + /** * Returns TRUE if the class hierarchy was computed before. * @return This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ku...@us...> - 2012-05-03 19:14:48
|
Revision: 3687 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3687&view=rev Author: kurzum Date: 2012-05-03 18:04:44 +0000 (Thu, 03 May 2012) Log Message: ----------- added accuracy to output Modified Paths: -------------- trunk/interfaces/src/main/java/org/dllearner/server/Rest.java Modified: trunk/interfaces/src/main/java/org/dllearner/server/Rest.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/server/Rest.java 2012-05-03 15:25:13 UTC (rev 3686) +++ trunk/interfaces/src/main/java/org/dllearner/server/Rest.java 2012-05-03 18:04:44 UTC (rev 3687) @@ -76,6 +76,7 @@ learningResult.put("manchester", manchester); learningResult.put("kbsyntax", "other syntax"); learningResult.put("sparql", sparql); + learningResult.put("accuracy", 1.0); learningResult.put("truePositives", "uri1, uri2"); learningResult.put("truePositives", "uri1, uri2"); learningResult.put("trueNegatives", "uri1, uri2"); @@ -91,6 +92,7 @@ learningResult.put("manchester", ed.getDescription().toManchesterSyntaxString(null, null)); learningResult.put("kbsyntax", ed.getDescription().toKBSyntaxString()); learningResult.put("sparql", sqd.getSparqlQuery(ed.getDescription())); + learningResult.put("accuracy", ed.getAccuracy()); learningResult.put("truePositives", EvaluatedDescriptionPosNeg.getJSONArray(ed.getCoveredPositives())); learningResult.put("falsePositives", EvaluatedDescriptionPosNeg.getJSONArray(ed.getNotCoveredPositives())); learningResult.put("trueNegatives", EvaluatedDescriptionPosNeg.getJSONArray(ed.getNotCoveredNegatives())); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-05-03 15:25:23
|
Revision: 3686 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3686&view=rev Author: lorenz_b Date: 2012-05-03 15:25:13 +0000 (Thu, 03 May 2012) Log Message: ----------- Check for empty entities. Modified Paths: -------------- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java Modified: trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-05-03 15:12:01 UTC (rev 3685) +++ trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-05-03 15:25:13 UTC (rev 3686) @@ -376,55 +376,59 @@ // learner.setMaxExecutionTimeInSeconds(10); algName = AnnComponentManager.getName(learner); - int attempt = 0; - long startTime = 0; boolean emptyEntity = sparqlReasoner.getPopularity(property) == 0; if(emptyEntity){ logger.warn("Empty entity: " + property); } - boolean timeout = true; - while(!emptyEntity && ((AbstractAxiomLearningAlgorithm)learner).isTimeout() && attempt++ < maxAttempts){ - if(attempt > 1){ + + if(emptyEntity){ + writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName, "EMPTY_ENTITY", 0, 0, false); + } else { + int attempt = 0; + long startTime = 0; + boolean timeout = true; + while(((AbstractAxiomLearningAlgorithm)learner).isTimeout() && attempt++ < maxAttempts){ + if(attempt > 1){ + try { + logger.warn("Got timeout. Waiting " + delayInMilliseconds + " ms ..."); + Thread.sleep(delayInMilliseconds); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + logger.info("Applying " + algName + " on " + property + " ... (Attempt " + attempt + ")"); + startTime = System.currentTimeMillis(); try { - logger.warn("Got timeout. Waiting " + delayInMilliseconds + " ms ..."); - Thread.sleep(delayInMilliseconds); - } catch (InterruptedException e) { - e.printStackTrace(); + learner.start(); + timeout = false; + } catch (Exception e) { + if(e.getCause() instanceof SocketTimeoutException){ + + } else { + e.printStackTrace(); + } } } - logger.info("Applying " + algName + " on " + property + " ... (Attempt " + attempt + ")"); - startTime = System.currentTimeMillis(); - try { - learner.start(); - timeout = false; - } catch (Exception e) { - if(e.getCause() instanceof SocketTimeoutException){ - - } else { - e.printStackTrace(); + + long runTime = System.currentTimeMillis() - startTime; + List<EvaluatedAxiom> learnedAxioms = learner + .getCurrentlyBestEvaluatedAxioms(nrOfAxiomsToLearn); + if(timeout && learnedAxioms.isEmpty()){ + writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName, "TIMEOUT", 0, runTime, false); + } else if (learnedAxioms == null || learnedAxioms.isEmpty()) { + writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName, "NULL", 0, runTime, false); + } else { + for (EvaluatedAxiom learnedAxiom : learnedAxioms) { + double score = learnedAxiom.getScore().getAccuracy(); + if (Double.isNaN(score)) { + score = -1; + } + writeToDB(property.toManchesterSyntaxString(baseURI, prefixes) .toString(), algName, learnedAxiom.getAxiom().toManchesterSyntaxString(baseURI, prefixes), + score, runTime, isEntailed(learnedAxiom)); } } } - long runTime = System.currentTimeMillis() - startTime; - List<EvaluatedAxiom> learnedAxioms = learner - .getCurrentlyBestEvaluatedAxioms(nrOfAxiomsToLearn); - if(emptyEntity){ - writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName, "EMPTY_ENTITY", 0, 0, false); - } else if(timeout && learnedAxioms.isEmpty()){ - writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName, "TIMEOUT", 0, runTime, false); - } else if (learnedAxioms == null || learnedAxioms.isEmpty()) { - writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName, "NULL", 0, runTime, false); - } else { - for (EvaluatedAxiom learnedAxiom : learnedAxioms) { - double score = learnedAxiom.getScore().getAccuracy(); - if (Double.isNaN(score)) { - score = -1; - } - writeToDB(property.toManchesterSyntaxString(baseURI, prefixes) .toString(), algName, learnedAxiom.getAxiom().toManchesterSyntaxString(baseURI, prefixes), - score, runTime, isEntailed(learnedAxiom)); - } - } objectProperties++; if (maxObjectProperties != 0 && objectProperties == maxObjectProperties) { break; @@ -464,52 +468,54 @@ if(emptyEntity){ logger.warn("Empty entity: " + property); } - - int attempt = 0; - long startTime = 0; - boolean timeout = true; - while(!emptyEntity && ((AbstractAxiomLearningAlgorithm)learner).isTimeout() && attempt++ < maxAttempts){ - if(attempt > 1){ + if(emptyEntity){ + writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName, "EMPTY_ENTITY", 0, 0, false); + } else { + int attempt = 0; + long startTime = 0; + boolean timeout = true; + while(((AbstractAxiomLearningAlgorithm)learner).isTimeout() && attempt++ < maxAttempts){ + if(attempt > 1){ + try { + logger.warn("Got timeout. Waiting " + delayInMilliseconds + " ms ..."); + Thread.sleep(delayInMilliseconds); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + logger.info("Applying " + algName + " on " + property + " ... (Attempt " + attempt + ")"); + startTime = System.currentTimeMillis(); try { - logger.warn("Got timeout. Waiting " + delayInMilliseconds + " ms ..."); - Thread.sleep(delayInMilliseconds); - } catch (InterruptedException e) { - e.printStackTrace(); + learner.start(); + timeout = false; + } catch (Exception e) { + if(e.getCause() instanceof SocketTimeoutException){ + + } else { + e.printStackTrace(); + } } } - logger.info("Applying " + algName + " on " + property + " ... (Attempt " + attempt + ")"); - startTime = System.currentTimeMillis(); - try { - learner.start(); - timeout = false; - } catch (Exception e) { - if(e.getCause() instanceof SocketTimeoutException){ - - } else { - e.printStackTrace(); + + long runTime = System.currentTimeMillis() - startTime; + List<EvaluatedAxiom> learnedAxioms = learner + .getCurrentlyBestEvaluatedAxioms(nrOfAxiomsToLearn); + if(timeout && learnedAxioms.isEmpty()){ + writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName, "TIMEOUT", 0, runTime, false); + } else if (learnedAxioms == null || learnedAxioms.isEmpty()) { + writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName, "NULL", 0, runTime, false); + } else { + for (EvaluatedAxiom learnedAxiom : learnedAxioms) { + double score = learnedAxiom.getScore().getAccuracy(); + if (Double.isNaN(score)) { + score = -1; + } + writeToDB(property.toManchesterSyntaxString(baseURI, prefixes) .toString(), algName, learnedAxiom.getAxiom().toManchesterSyntaxString(baseURI, prefixes), + score, runTime, isEntailed(learnedAxiom)); } } } - long runTime = System.currentTimeMillis() - startTime; - List<EvaluatedAxiom> learnedAxioms = learner - .getCurrentlyBestEvaluatedAxioms(nrOfAxiomsToLearn); - if(emptyEntity){ - writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName, "EMPTY_ENTITY", 0, 0, false); - } else if(timeout && learnedAxioms.isEmpty()){ - writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName, "TIMEOUT", 0, runTime, false); - } else if (learnedAxioms == null || learnedAxioms.isEmpty()) { - writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName, "NULL", 0, runTime, false); - } else { - for (EvaluatedAxiom learnedAxiom : learnedAxioms) { - double score = learnedAxiom.getScore().getAccuracy(); - if (Double.isNaN(score)) { - score = -1; - } - writeToDB(property.toManchesterSyntaxString(baseURI, prefixes) .toString(), algName, learnedAxiom.getAxiom().toManchesterSyntaxString(baseURI, prefixes), - score, runTime, isEntailed(learnedAxiom)); - } - } dataProperties++; if (maxDataProperties != 0 && dataProperties == maxDataProperties) { break; @@ -531,77 +537,81 @@ for (NamedClass cls : classes) { try{ - List<EvaluatedAxiom> learnedAxioms = null; - boolean emptyEntity = sparqlReasoner.getPopularity(cls) == 0; - if(emptyEntity){ - logger.warn("Empty entity: " + cls); - } - long startTime = System.currentTimeMillis(); - boolean timeout = false; - String algName; + String algName = ""; if(algorithmClass == CELOE.class){ algName = CELOE.class.getAnnotation(ComponentAnn.class).name(); - logger.info("Applying " + algName + " on " + cls + " ... "); - if(!emptyEntity){ - learnedAxioms = applyCELOE(ks, cls, false); - } } else { - - // dynamically invoke constructor with SPARQL knowledge source LearningAlgorithm learner = algorithmClass.getConstructor( SparqlEndpointKS.class).newInstance(ks); - ((AbstractAxiomLearningAlgorithm)learner).setReasoner(sparqlReasoner); - ConfigHelper.configure(learner, "classToDescribe", cls.toString()); - ConfigHelper.configure(learner, "maxExecutionTimeInSeconds", - maxExecutionTimeInSeconds); - learner.init(); - // learner.setPropertyToDescribe(property); - // learner.setMaxExecutionTimeInSeconds(10); algName = AnnComponentManager.getName(learner); - int attempt = 0; - - timeout = true; - while(!emptyEntity && ((AbstractAxiomLearningAlgorithm)learner).isTimeout() && attempt++ < maxAttempts){ - if(attempt > 1){ + } + List<EvaluatedAxiom> learnedAxioms = new ArrayList<EvaluatedAxiom>(); + boolean emptyEntity = sparqlReasoner.getPopularity(cls) == 0; + if(emptyEntity){ + logger.warn("Empty entity: " + cls); + writeToDB(cls.toManchesterSyntaxString(baseURI, prefixes), algName, "EMPTY_ENTITY", 0, 0, false); + } else { + long startTime = System.currentTimeMillis(); + boolean timeout = false; + if(algorithmClass == CELOE.class){ + logger.info("Applying " + algName + " on " + cls + " ... "); + learnedAxioms = applyCELOE(ks, cls, false); + } else { + + // dynamically invoke constructor with SPARQL knowledge source + LearningAlgorithm learner = algorithmClass.getConstructor( + SparqlEndpointKS.class).newInstance(ks); + ((AbstractAxiomLearningAlgorithm)learner).setReasoner(sparqlReasoner); + ConfigHelper.configure(learner, "classToDescribe", cls.toString()); + ConfigHelper.configure(learner, "maxExecutionTimeInSeconds", + maxExecutionTimeInSeconds); + learner.init(); + // learner.setPropertyToDescribe(property); + // learner.setMaxExecutionTimeInSeconds(10); + int attempt = 0; + + timeout = true; + while(((AbstractAxiomLearningAlgorithm)learner).isTimeout() && attempt++ < maxAttempts){ + if(attempt > 1){ + try { + logger.warn("Got timeout. Waiting " + delayInMilliseconds + " ms ..."); + Thread.sleep(delayInMilliseconds); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + logger.info("Applying " + algName + " on " + cls + " ... (Attempt " + attempt + ")"); + startTime = System.currentTimeMillis(); try { - logger.warn("Got timeout. Waiting " + delayInMilliseconds + " ms ..."); - Thread.sleep(delayInMilliseconds); - } catch (InterruptedException e) { + learner.start(); + timeout = false; + } catch (Exception e) { e.printStackTrace(); } } - logger.info("Applying " + algName + " on " + cls + " ... (Attempt " + attempt + ")"); - startTime = System.currentTimeMillis(); - try { - learner.start(); - timeout = false; - } catch (Exception e) { - e.printStackTrace(); + learnedAxioms = ((AxiomLearningAlgorithm)learner).getCurrentlyBestEvaluatedAxioms(nrOfAxiomsToLearn); + } + + + long runTime = System.currentTimeMillis() - startTime; + + if(timeout && learnedAxioms.isEmpty()){ + writeToDB(cls.toManchesterSyntaxString(baseURI, prefixes), algName, "TIMEOUT", 0, runTime, false); + } else if (learnedAxioms == null || learnedAxioms.isEmpty()) { + writeToDB(cls.toManchesterSyntaxString(baseURI, prefixes), algName, "NULL", 0, runTime, false); + } else { + for (EvaluatedAxiom learnedAxiom : learnedAxioms) { + double score = learnedAxiom.getScore().getAccuracy(); + if (Double.isNaN(score)) { + score = -1; + } + writeToDB(cls.toManchesterSyntaxString(baseURI, prefixes) .toString(), algName, learnedAxiom.getAxiom().toManchesterSyntaxString(baseURI, prefixes), + score, runTime, isEntailed(learnedAxiom)); } } - learnedAxioms = ((AxiomLearningAlgorithm)learner).getCurrentlyBestEvaluatedAxioms(nrOfAxiomsToLearn); } - long runTime = System.currentTimeMillis() - startTime; - - if(emptyEntity){ - writeToDB(cls.toManchesterSyntaxString(baseURI, prefixes), algName, "EMPTY_ENTITY", 0, 0, false); - } else if(timeout && learnedAxioms.isEmpty()){ - writeToDB(cls.toManchesterSyntaxString(baseURI, prefixes), algName, "TIMEOUT", 0, runTime, false); - } else if (learnedAxioms == null || learnedAxioms.isEmpty()) { - writeToDB(cls.toManchesterSyntaxString(baseURI, prefixes), algName, "NULL", 0, runTime, false); - } else { - for (EvaluatedAxiom learnedAxiom : learnedAxioms) { - double score = learnedAxiom.getScore().getAccuracy(); - if (Double.isNaN(score)) { - score = -1; - } - writeToDB(cls.toManchesterSyntaxString(baseURI, prefixes) .toString(), algName, learnedAxiom.getAxiom().toManchesterSyntaxString(baseURI, prefixes), - score, runTime, isEntailed(learnedAxiom)); - } - } - classesCnt++; if (maxClasses != 0 && classesCnt == maxClasses) { break; This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-05-03 15:12:10
|
Revision: 3685 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3685&view=rev Author: lorenz_b Date: 2012-05-03 15:12:01 +0000 (Thu, 03 May 2012) Log Message: ----------- Check for empty entities. Modified Paths: -------------- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java Modified: trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-05-03 14:43:04 UTC (rev 3684) +++ trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-05-03 15:12:01 UTC (rev 3685) @@ -378,8 +378,12 @@ int attempt = 0; long startTime = 0; + boolean emptyEntity = sparqlReasoner.getPopularity(property) == 0; + if(emptyEntity){ + logger.warn("Empty entity: " + property); + } boolean timeout = true; - while(((AbstractAxiomLearningAlgorithm)learner).isTimeout() && attempt++ < maxAttempts){ + while(!emptyEntity && ((AbstractAxiomLearningAlgorithm)learner).isTimeout() && attempt++ < maxAttempts){ if(attempt > 1){ try { logger.warn("Got timeout. Waiting " + delayInMilliseconds + " ms ..."); @@ -405,7 +409,9 @@ long runTime = System.currentTimeMillis() - startTime; List<EvaluatedAxiom> learnedAxioms = learner .getCurrentlyBestEvaluatedAxioms(nrOfAxiomsToLearn); - if(timeout && learnedAxioms.isEmpty()){ + if(emptyEntity){ + writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName, "EMPTY_ENTITY", 0, 0, false); + } else if(timeout && learnedAxioms.isEmpty()){ writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName, "TIMEOUT", 0, runTime, false); } else if (learnedAxioms == null || learnedAxioms.isEmpty()) { writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName, "NULL", 0, runTime, false); @@ -454,10 +460,15 @@ // learner.setMaxExecutionTimeInSeconds(10); algName = AnnComponentManager.getName(learner); + boolean emptyEntity = sparqlReasoner.getPopularity(property) == 0; + if(emptyEntity){ + logger.warn("Empty entity: " + property); + } + int attempt = 0; long startTime = 0; boolean timeout = true; - while(((AbstractAxiomLearningAlgorithm)learner).isTimeout() && attempt++ < maxAttempts){ + while(!emptyEntity && ((AbstractAxiomLearningAlgorithm)learner).isTimeout() && attempt++ < maxAttempts){ if(attempt > 1){ try { logger.warn("Got timeout. Waiting " + delayInMilliseconds + " ms ..."); @@ -483,7 +494,9 @@ long runTime = System.currentTimeMillis() - startTime; List<EvaluatedAxiom> learnedAxioms = learner .getCurrentlyBestEvaluatedAxioms(nrOfAxiomsToLearn); - if(timeout && learnedAxioms.isEmpty()){ + if(emptyEntity){ + writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName, "EMPTY_ENTITY", 0, 0, false); + } else if(timeout && learnedAxioms.isEmpty()){ writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName, "TIMEOUT", 0, runTime, false); } else if (learnedAxioms == null || learnedAxioms.isEmpty()) { writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName, "NULL", 0, runTime, false); @@ -519,13 +532,19 @@ try{ List<EvaluatedAxiom> learnedAxioms = null; + boolean emptyEntity = sparqlReasoner.getPopularity(cls) == 0; + if(emptyEntity){ + logger.warn("Empty entity: " + cls); + } long startTime = System.currentTimeMillis(); boolean timeout = false; String algName; if(algorithmClass == CELOE.class){ algName = CELOE.class.getAnnotation(ComponentAnn.class).name(); logger.info("Applying " + algName + " on " + cls + " ... "); - learnedAxioms = applyCELOE(ks, cls, false); + if(!emptyEntity){ + learnedAxioms = applyCELOE(ks, cls, false); + } } else { // dynamically invoke constructor with SPARQL knowledge source @@ -542,7 +561,7 @@ int attempt = 0; timeout = true; - while(((AbstractAxiomLearningAlgorithm)learner).isTimeout() && attempt++ < maxAttempts){ + while(!emptyEntity && ((AbstractAxiomLearningAlgorithm)learner).isTimeout() && attempt++ < maxAttempts){ if(attempt > 1){ try { logger.warn("Got timeout. Waiting " + delayInMilliseconds + " ms ..."); @@ -566,7 +585,9 @@ long runTime = System.currentTimeMillis() - startTime; - if(timeout && learnedAxioms.isEmpty()){ + if(emptyEntity){ + writeToDB(cls.toManchesterSyntaxString(baseURI, prefixes), algName, "EMPTY_ENTITY", 0, 0, false); + } else if(timeout && learnedAxioms.isEmpty()){ writeToDB(cls.toManchesterSyntaxString(baseURI, prefixes), algName, "TIMEOUT", 0, runTime, false); } else if (learnedAxioms == null || learnedAxioms.isEmpty()) { writeToDB(cls.toManchesterSyntaxString(baseURI, prefixes), algName, "NULL", 0, runTime, false); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-05-03 14:43:15
|
Revision: 3684 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3684&view=rev Author: lorenz_b Date: 2012-05-03 14:43:04 +0000 (Thu, 03 May 2012) Log Message: ----------- Modified eval script. Modified Paths: -------------- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java Modified: trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-05-03 14:42:31 UTC (rev 3683) +++ trunk/scripts/src/main/java/org/dllearner/scripts/evaluation/EnrichmentEvaluation.java 2012-05-03 14:43:04 UTC (rev 3684) @@ -71,6 +71,7 @@ import org.apache.commons.compress.compressors.CompressorException; import org.apache.commons.compress.compressors.CompressorInputStream; import org.apache.commons.compress.compressors.CompressorStreamFactory; +import org.apache.log4j.ConsoleAppender; import org.apache.log4j.FileAppender; import org.apache.log4j.Level; import org.apache.log4j.Logger; @@ -137,18 +138,18 @@ import org.dllearner.utilities.owl.OWLAPIConverter; import org.ini4j.IniPreferences; import org.ini4j.InvalidFileFormatException; -import org.semanticweb.HermiT.Configuration; -import org.semanticweb.HermiT.Reasoner; -import org.semanticweb.HermiT.examples.HermiTConfigurations; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.io.RDFXMLOntologyFormat; import org.semanticweb.owlapi.model.AxiomType; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLDataFactory; +import org.semanticweb.owlapi.model.OWLDataProperty; import org.semanticweb.owlapi.model.OWLDataPropertyAxiom; import org.semanticweb.owlapi.model.OWLDisjointClassesAxiom; import org.semanticweb.owlapi.model.OWLEntity; +import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLObjectPropertyAxiom; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; @@ -332,35 +333,33 @@ sparqlReasoner.setCache(new ExtractionDBCache("cache")); sparqlReasoner.setUseCache(true); sparqlReasoner.prepareSubsumptionHierarchy(); - sparqlReasoner.precomputeClassPopularity(); + sparqlReasoner.precomputePopularity(); - Thread.sleep(20000); if(runClassAlgorithms){ evaluateClasses(ks); + Thread.sleep(20000); } - Thread.sleep(20000); - if(runObjectPropertyAlgorithms){ evaluateObjectProperties(ks); + Thread.sleep(20000); } - Thread.sleep(20000); - if(runDataPropertyAlgorithms){ evaluateDataProperties(ks); } - System.out.println("Overall runtime: " + (System.currentTimeMillis()-overallStartTime)/1000 + "s."); + logger.info("Overall runtime: " + (System.currentTimeMillis()-overallStartTime)/1000 + "s."); } private void evaluateObjectProperties(SparqlEndpointKS ks)throws IllegalArgumentException, SecurityException, InstantiationException, IllegalAccessException, InvocationTargetException, NoSuchMethodException, ComponentInitException, InterruptedException{ Set<ObjectProperty> properties = new SPARQLTasks(ks.getEndpoint()).getAllObjectProperties(); + logger.info("Evaluating " + properties.size() + " object properties..."); for (Class<? extends AxiomLearningAlgorithm> algorithmClass : objectPropertyAlgorithms) { int objectProperties = 0; - Thread.sleep(10000); + Thread.sleep(5000); String algName = ""; for (ObjectProperty property : properties) { @@ -383,13 +382,13 @@ while(((AbstractAxiomLearningAlgorithm)learner).isTimeout() && attempt++ < maxAttempts){ if(attempt > 1){ try { - System.out.println("Got timeout. Waiting " + delayInMilliseconds + " ms ..."); + logger.warn("Got timeout. Waiting " + delayInMilliseconds + " ms ..."); Thread.sleep(delayInMilliseconds); } catch (InterruptedException e) { e.printStackTrace(); } } - System.out.println("Applying " + algName + " on " + property + " ... (Attempt " + attempt + ")"); + logger.info("Applying " + algName + " on " + property + " ... (Attempt " + attempt + ")"); startTime = System.currentTimeMillis(); try { learner.start(); @@ -435,10 +434,10 @@ private void evaluateDataProperties(SparqlEndpointKS ks) throws IllegalArgumentException, SecurityException, InstantiationException, IllegalAccessException, InvocationTargetException, NoSuchMethodException, ComponentInitException, InterruptedException{ Set<DatatypeProperty> properties = new SPARQLTasks(ks.getEndpoint()).getAllDataProperties(); - + logger.info("Evaluating " + properties.size() + " data properties..."); for (Class<? extends AxiomLearningAlgorithm> algorithmClass : dataPropertyAlgorithms) { int dataProperties = 0; - Thread.sleep(10000); + Thread.sleep(5000); String algName = ""; for (DatatypeProperty property : properties) { Thread.sleep(1000); @@ -461,12 +460,13 @@ while(((AbstractAxiomLearningAlgorithm)learner).isTimeout() && attempt++ < maxAttempts){ if(attempt > 1){ try { + logger.warn("Got timeout. Waiting " + delayInMilliseconds + " ms ..."); Thread.sleep(delayInMilliseconds); } catch (InterruptedException e) { e.printStackTrace(); } } - System.out.println("Applying " + algName + " on " + property + " ... (Attempt " + attempt + ")"); + logger.info("Applying " + algName + " on " + property + " ... (Attempt " + attempt + ")"); startTime = System.currentTimeMillis(); try { learner.start(); @@ -511,10 +511,10 @@ private void evaluateClasses(SparqlEndpointKS ks) throws IllegalArgumentException, SecurityException, InstantiationException, IllegalAccessException, InvocationTargetException, NoSuchMethodException, ComponentInitException, InterruptedException{ Set<NamedClass> classes = new SPARQLTasks(ks.getEndpoint()).getAllClasses(); - + logger.info("Evaluating " + classes.size() + " classes..."); for (Class<? extends LearningAlgorithm> algorithmClass : classAlgorithms) { int classesCnt = 0; - Thread.sleep(10000); + Thread.sleep(5000); for (NamedClass cls : classes) { try{ @@ -524,7 +524,7 @@ String algName; if(algorithmClass == CELOE.class){ algName = CELOE.class.getAnnotation(ComponentAnn.class).name(); - System.out.println("Applying " + algName + " on " + cls + " ... "); + logger.info("Applying " + algName + " on " + cls + " ... "); learnedAxioms = applyCELOE(ks, cls, false); } else { @@ -545,12 +545,13 @@ while(((AbstractAxiomLearningAlgorithm)learner).isTimeout() && attempt++ < maxAttempts){ if(attempt > 1){ try { + logger.warn("Got timeout. Waiting " + delayInMilliseconds + " ms ..."); Thread.sleep(delayInMilliseconds); } catch (InterruptedException e) { e.printStackTrace(); } } - System.out.println("Applying " + algName + " on " + cls + " ... (Attempt " + attempt + ")"); + logger.info("Applying " + algName + " on " + cls + " ... (Attempt " + attempt + ")"); startTime = System.currentTimeMillis(); try { learner.start(); @@ -783,16 +784,22 @@ // get all entities in database because we compute recall only for axioms of entities which we have tested // we use only entities for which triples in the endpoint are contained java.sql.ResultSet rs = conn.prepareStatement("SELECT DISTINCT entity FROM evaluation WHERE axiom != 'NULL'").executeQuery(); - Set<OWLEntity> entities = new HashSet<OWLEntity>(); + Set<OWLEntity> allEntities = new HashSet<OWLEntity>(); + Set<OWLEntity> classes = new HashSet<OWLEntity>(); + Set<OWLEntity> objectProperties = new HashSet<OWLEntity>(); + Set<OWLEntity> dataProperties = new HashSet<OWLEntity>(); IRI iri; while(rs.next()){ iri = IRI.create("http://dbpedia.org/ontology/" + rs.getString(1).substring(4)); if(dbPediaOntology.containsClassInSignature(iri)){ - entities.add(factory.getOWLClass(iri)); + allEntities.add(factory.getOWLClass(iri)); + classes.add(factory.getOWLClass(iri)); } else if(dbPediaOntology.containsObjectPropertyInSignature(iri)){ - entities.add(factory.getOWLObjectProperty(iri)); + allEntities.add(factory.getOWLObjectProperty(iri)); + objectProperties.add(factory.getOWLObjectProperty(iri)); } else if(dbPediaOntology.containsDataPropertyInSignature(iri)){ - entities.add(factory.getOWLDataProperty(iri)); + allEntities.add(factory.getOWLDataProperty(iri)); + dataProperties.add(factory.getOWLDataProperty(iri)); } } @@ -800,9 +807,17 @@ //compute recall for each axiom type ps = conn.prepareStatement("SELECT axiom, entailed, score FROM evaluation WHERE algorithm=? AND score>=?"); + Set<OWLEntity> entities = null; for(Entry<AxiomType<? extends OWLAxiom>, List<Class<? extends LearningAlgorithm>>> entry : axiomType2Algorithm.entrySet()){ AxiomType<? extends OWLAxiom> type = entry.getKey(); algorithms = entry.getValue(); + if(classAlgorithms.containsAll(algorithms)){ + entities = classes; + } else if(objectPropertyAlgorithms.containsAll(algorithms)){ + entities = objectProperties; + } else if(dataPropertyAlgorithms.containsAll(algorithms)){ + entities = dataProperties; + } ps.setString(1, algorithms.get(0).getAnnotation(ComponentAnn.class).name()); ps.setDouble(2, threshold); @@ -826,10 +841,9 @@ } //get all axioms in the reference ontology for a specific axiom type - Set<String> relevantAxioms = getRelevantAxioms(type, entities); + Set<String> relevantAxioms = getRelevantAxioms2(type, entities); //compute the axioms which are in the reference ontology, but not be computed by the learning algorithm Set<String> missedAxioms = org.mindswap.pellet.utils.SetUtils.difference(relevantAxioms, foundAxioms); - System.out.println(missedAxioms); //compute the additional found axioms which were not entailed for(String relAxiom : relevantAxioms){ foundAndNotEntailedAxioms.remove(relAxiom); @@ -956,8 +970,8 @@ private Map<AxiomType<? extends OWLAxiom>, List<Class<? extends LearningAlgorithm>>> getAxiomTypesWithLearningAlgorithms(){ Map<AxiomType<? extends OWLAxiom>, List<Class<? extends LearningAlgorithm>>> axiomType2Algorithm = new LinkedHashMap<AxiomType<? extends OWLAxiom>, List<Class<? extends LearningAlgorithm>>>(); - axiomType2Algorithm.put(AxiomType.SUBCLASS_OF, Arrays.asList((Class<? extends LearningAlgorithm>[])new Class[]{SimpleSubclassLearner.class, CELOE.class})); - axiomType2Algorithm.put(AxiomType.EQUIVALENT_CLASSES, Arrays.asList((Class<? extends LearningAlgorithm>[])new Class[]{CELOE.class})); + axiomType2Algorithm.put(AxiomType.SUBCLASS_OF, Arrays.asList((Class<? extends LearningAlgorithm>[])new Class[]{SimpleSubclassLearner.class}));//, CELOE.class})); +// axiomType2Algorithm.put(AxiomType.EQUIVALENT_CLASSES, Arrays.asList((Class<? extends LearningAlgorithm>[])new Class[]{CELOE.class})); axiomType2Algorithm.put(AxiomType.DISJOINT_CLASSES, Arrays.asList((Class<? extends LearningAlgorithm>[])new Class[]{DisjointClassesLearner.class})); axiomType2Algorithm.put(AxiomType.SUB_OBJECT_PROPERTY, Arrays.asList((Class<? extends LearningAlgorithm>[])new Class[]{SubObjectPropertyOfAxiomLearner.class})); @@ -994,6 +1008,31 @@ return relevantAxioms; } + private Set<String> getRelevantAxioms2(AxiomType<? extends OWLAxiom> axiomType, Set<OWLEntity> entities){ + Set<String> relevantAxioms = new HashSet<String>(); + if(entities.isEmpty()){ + return relevantAxioms; + } + Set<OWLAxiom> entityAxioms = new HashSet<OWLAxiom>(); + for(OWLEntity entity : entities){ + if(entity.isOWLDataProperty()){ + entityAxioms.addAll(dbPediaOntology.getAxioms((OWLDataProperty)entity)); + } else if(entity.isOWLObjectProperty()){ + entityAxioms.addAll(dbPediaOntology.getAxioms((OWLObjectProperty)entity)); + } else if(entity.isOWLClass()){ + entityAxioms.addAll(dbPediaOntology.getAxioms((OWLClass)entity)); + } + } + + for(OWLAxiom axiom : entityAxioms){ + if(axiom.getAxiomType() == axiomType && !axiom.getClassesInSignature().contains(factory.getOWLThing())){ + String axiomString = DLLearnerAxiomConvertVisitor.getDLLearnerAxiom(axiom).toManchesterSyntaxString(baseURI, prefixes); + relevantAxioms.add(axiomString); + } + } + return relevantAxioms; + } + private boolean isRelevantAxiom(OWLAxiom axiom, Set<OWLEntity> entities){ if(axiom instanceof OWLObjectPropertyAxiom){ return containsOneOf(axiom.getObjectPropertiesInSignature(), entities); @@ -1139,10 +1178,10 @@ } catch (OWLOntologyCreationException e1) { e1.printStackTrace(); } catch (FileNotFoundException e1) { - e1.printStackTrace(); + } if(dbPediaOntology == null){ - System.out.println("Loading schema ..."); + logger.info("Loading schema ..."); SPARQLReasoner r = new SPARQLReasoner(new SparqlEndpointKS(endpoint)); dbPediaOntology = convert(r.loadSchema()); try { @@ -1157,12 +1196,14 @@ e.printStackTrace(); } } - System.out.println("Preparing reasoner ..."); + + logger.info("Preparing reasoner ..."); // Configuration conf = new Configuration(); // conf.ignoreUnsupportedDatatypes = true; // reasoner = new Reasoner(conf, dbPediaOntology); reasoner = PelletReasonerFactory.getInstance().createNonBufferingReasoner(dbPediaOntology); reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); + logger.info("done."); } private OWLOntology convert(Model model){ @@ -1178,9 +1219,24 @@ return ontology; } - public static void main(String[] args) throws Exception - - { + public static void main(String[] args) throws Exception { + Logger.getRootLogger().setLevel(Level.INFO); + Logger.getRootLogger().removeAllAppenders(); + + FileAppender app = new FileAppender(new SimpleLayout(), + "evaluation/errors.log"); + app.setThreshold(Level.ERROR); + Logger.getRootLogger().addAppender(app); + + FileAppender app2 = new FileAppender(new SimpleLayout(), + "evaluation/enrichment.log"); + app2.setThreshold(Level.INFO); + Logger.getRootLogger().addAppender(app); + + ConsoleAppender consApp = new ConsoleAppender(new SimpleLayout()); + Logger.getRootLogger().addAppender(consApp); + + OptionParser parser = new OptionParser(); parser.acceptsAll(asList("h", "?", "help"), "Show help."); parser.acceptsAll(asList("e", "endpoint"), @@ -1266,11 +1322,9 @@ ee.printResultsLaTeX(); Files.createFile(new File("enrichment_eval.html"), ee.printHTMLTable()); - FileAppender app = new FileAppender(new SimpleLayout(), - "log/enrichmentEvalErrors.log"); - Logger.getRootLogger().setLevel(Level.ERROR); - Logger.getRootLogger().removeAllAppenders(); - Logger.getRootLogger().addAppender(app); + + + } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-05-03 14:42:43
|
Revision: 3683 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3683&view=rev Author: lorenz_b Date: 2012-05-03 14:42:31 +0000 (Thu, 03 May 2012) Log Message: ----------- Some improvements in algorithms. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/SimpleSubclassLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DataPropertyDomainAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointDataPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentDataPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyRangeAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubDataPropertyOfAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubObjectPropertyOfAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java 2012-05-03 14:31:56 UTC (rev 3682) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java 2012-05-03 14:42:31 UTC (rev 3683) @@ -210,7 +210,7 @@ private void runSPARQL1_1_Mode(){ int limit = 1000; int offset = 0; - String queryTemplate = "SELECT ?type (COUNT(?s) AS ?count) WHERE {?s a ?type." + + String queryTemplate = "PREFIX owl: <http://www.w3.org/2002/07/owl#> SELECT ?type (COUNT(?s) AS ?count) WHERE {?s a ?type. ?type a owl:Class" + "{SELECT ?s WHERE {?s a <%s>.} LIMIT %d OFFSET %d} " + "} GROUP BY ?type"; String query; @@ -220,7 +220,7 @@ boolean repeat = true; while(!terminationCriteriaSatisfied() && repeat){ - query = String.format(queryTemplate, classToDescribe, limit, offset); + query = String.format(queryTemplate, classToDescribe, limit, offset);System.out.println(query); ResultSet rs = executeSelectQuery(query); QuerySolution qs; repeat = false; @@ -350,8 +350,9 @@ } //secondly, create disjoint classexpressions with score 1 - (#occurence/#all) + NamedClass cls; for (Entry<NamedClass, Integer> entry : sortByValues(class2Count)) { - NamedClass cls = entry.getKey(); + cls = entry.getKey(); // drop classes from OWL and RDF namespace if (cls.getName().startsWith(OWL2.getURI()) || cls.getName().startsWith(RDF.getURI())) continue; @@ -386,15 +387,6 @@ return evalDescs; } - private double accuracy(int total, int success){ - double[] confidenceInterval = Heuristics.getConfidenceInterval95Wald(total, success); - return (confidenceInterval[0] + confidenceInterval[1]) / 2; - } - - private double fMEasure(double precision, double recall){ - return 2 * precision * recall / (precision + recall); - } - private void keepMostGeneralClasses(Set<NamedClass> classes){ if(ks.isRemote()){ if(reasoner.isPrepared()){ Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/SimpleSubclassLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/SimpleSubclassLearner.java 2012-05-03 14:31:56 UTC (rev 3682) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/SimpleSubclassLearner.java 2012-05-03 14:42:31 UTC (rev 3683) @@ -170,7 +170,7 @@ boolean notEmpty = false; String query; if(ks.supportsSPARQL_1_1()){ - query = String.format("SELECT DISTINCT ?ind ?type WHERE {?ind a ?type. {SELECT ?ind {?ind a <%s>} LIMIT %d OFFSET %d}}", classToDescribe.getName(), limit, offset); + query = String.format("PREFIX owl: <http://www.w3.org/2002/07/owl#> SELECT DISTINCT ?ind ?type WHERE {?ind a ?type.?type a owl:Class. {SELECT ?ind {?ind a <%s>} LIMIT %d OFFSET %d}}", classToDescribe.getName(), limit, offset); } else { query = String.format("SELECT DISTINCT ?ind ?type WHERE {?ind a <%s>. ?ind a ?type} LIMIT %d OFFSET %d", classToDescribe.getName(), limit, offset); } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DataPropertyDomainAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DataPropertyDomainAxiomLearner.java 2012-05-03 14:31:56 UTC (rev 3682) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DataPropertyDomainAxiomLearner.java 2012-05-03 14:42:31 UTC (rev 3683) @@ -138,7 +138,7 @@ } private int addIndividualsWithTypes(Map<Individual, SortedSet<Description>> ind2Types, int limit, int offset){ - String query = String.format("SELECT DISTINCT ?ind ?type WHERE {?ind <%s> ?o. ?ind a ?type} LIMIT %d OFFSET %d", propertyToDescribe.getName(), limit, offset); + String query = String.format("PREFIX owl: <http://www.w3.org/2002/07/owl#> SELECT DISTINCT ?ind ?type WHERE {?ind <%s> ?o. ?ind a ?type. ?type a owl:Class} LIMIT %d OFFSET %d", propertyToDescribe.getName(), limit, offset); // String query = String.format("SELECT DISTINCT ?ind ?type WHERE {?ind a ?type. {SELECT ?ind {?ind <%s> ?o.} LIMIT %d OFFSET %d}}", propertyToDescribe.getName(), limit, offset); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointDataPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointDataPropertyAxiomLearner.java 2012-05-03 14:31:56 UTC (rev 3682) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointDataPropertyAxiomLearner.java 2012-05-03 14:42:31 UTC (rev 3683) @@ -25,6 +25,7 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Set; +import java.util.TreeSet; import org.dllearner.core.AbstractAxiomLearningAlgorithm; import org.dllearner.core.ComponentAnn; @@ -33,6 +34,7 @@ import org.dllearner.core.config.DataPropertyEditor; import org.dllearner.core.owl.DatatypeProperty; import org.dllearner.core.owl.DisjointDatatypePropertyAxiom; +import org.dllearner.kb.LocalModelBasedSparqlEndpointKS; import org.dllearner.kb.SparqlEndpointKS; import org.dllearner.kb.sparql.SPARQLTasks; import org.dllearner.kb.sparql.SparqlEndpoint; @@ -44,6 +46,7 @@ import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; +import com.hp.hpl.jena.rdf.model.RDFNode; @ComponentAnn(name="disjoint dataproperty axiom learner", shortName="dpldisjoint", version=0.1) public class DisjointDataPropertyAxiomLearner extends AbstractAxiomLearningAlgorithm { @@ -55,6 +58,10 @@ private Set<DatatypeProperty> allDataProperties; + private boolean usePropertyPopularity = true; + + private int popularity; + public DisjointDataPropertyAxiomLearner(SparqlEndpointKS ks){ this.ks = ks; } @@ -74,8 +81,14 @@ fetchedRows = 0; currentlyBestAxioms = new ArrayList<EvaluatedAxiom>(); - //TODO + // we return here if the class contains no instances + popularity = reasoner.getPopularity(propertyToDescribe); + if (popularity == 0) { + return; + } + //TODO detect existing axioms + //at first get all existing dataproperties in knowledgebase allDataProperties = new SPARQLTasks(ks.getEndpoint()).getAllDataProperties(); allDataProperties.remove(propertyToDescribe); @@ -132,7 +145,7 @@ //get properties and how often they occur int limit = 1000; int offset = 0; - String queryTemplate = "SELECT ?p (COUNT(?s) as ?count) WHERE {?s ?p ?o." + + String queryTemplate = "PREFIX owl: <http://www.w3.org/2002/07/owl#> SELECT ?p (COUNT(?s) as ?count) WHERE {?p a owl:DatatypeProperty. ?s ?p ?o." + "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + "}"; String query; @@ -178,19 +191,62 @@ Integer all = property2Count.get(propertyToDescribe); property2Count.remove(propertyToDescribe); + //get complete disjoint properties + Set<DatatypeProperty> completeDisjointProperties = new TreeSet<DatatypeProperty>(allProperties); + completeDisjointProperties.removeAll(property2Count.keySet()); + EvaluatedAxiom evalAxiom; //first create disjoint axioms with properties which not occur and give score of 1 - for(DatatypeProperty p : allProperties){ - evalAxiom = new EvaluatedAxiom(new DisjointDatatypePropertyAxiom(propertyToDescribe, p), - new AxiomScore(1)); + for(DatatypeProperty p : completeDisjointProperties){ + if(usePropertyPopularity){ + int overlap = 0; + int pop; + if(ks.isRemote()){ + pop = reasoner.getPopularity(p); + } else { + Model model = ((LocalModelBasedSparqlEndpointKS)ks).getModel(); + pop = model.listStatements(null, model.getProperty(p.getName()), (RDFNode)null).toSet().size(); + } + //we skip classes with no instances + if(pop == 0) continue; + + //we compute the estimated precision + double precision = accuracy(pop, overlap); + //we compute the estimated recall + double recall = accuracy(popularity, overlap); + //compute the overall score + double score = 1 - fMEasure(precision, recall); + + evalAxiom = new EvaluatedAxiom(new DisjointDatatypePropertyAxiom(propertyToDescribe, p), new AxiomScore(score)); + } else { + evalAxiom = new EvaluatedAxiom(new DisjointDatatypePropertyAxiom(propertyToDescribe, p), new AxiomScore(1)); + } axioms.add(evalAxiom); } //second create disjoint axioms with other properties and score 1 - (#occurence/#all) + DatatypeProperty p; for(Entry<DatatypeProperty, Integer> entry : sortByValues(property2Count)){ - evalAxiom = new EvaluatedAxiom(new DisjointDatatypePropertyAxiom(propertyToDescribe, entry.getKey()), - new AxiomScore(1 - (entry.getValue() / (double)all))); - axioms.add(evalAxiom); + p = entry.getKey(); + int overlap = entry.getValue(); + int pop; + if(ks.isRemote()){ + pop = reasoner.getPopularity(p); + } else { + Model model = ((LocalModelBasedSparqlEndpointKS)ks).getModel(); + pop = model.listStatements(null, model.getProperty(p.getName()), (RDFNode)null).toSet().size(); + } + //we skip classes with no instances + if(pop == 0) continue; + + //we compute the estimated precision + double precision = accuracy(pop, overlap); + //we compute the estimated recall + double recall = accuracy(popularity, overlap); + //compute the overall score + double score = 1 - fMEasure(precision, recall); + + evalAxiom = new EvaluatedAxiom(new DisjointDatatypePropertyAxiom(propertyToDescribe, p), new AxiomScore(score)); } property2Count.put(propertyToDescribe, all); @@ -202,6 +258,7 @@ l.setPropertyToDescribe(new DatatypeProperty("http://dbpedia.org/ontology/position")); l.setMaxExecutionTimeInSeconds(20); l.init(); + l.getReasoner().precomputeDataPropertyPopularity(); l.start(); System.out.println(l.getCurrentlyBestEvaluatedAxioms(5)); } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java 2012-05-03 14:31:56 UTC (rev 3682) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/DisjointObjectPropertyAxiomLearner.java 2012-05-03 14:42:31 UTC (rev 3683) @@ -19,9 +19,7 @@ package org.dllearner.algorithms.properties; -import java.net.URL; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -32,17 +30,15 @@ import org.dllearner.core.AbstractAxiomLearningAlgorithm; import org.dllearner.core.ComponentAnn; import org.dllearner.core.EvaluatedAxiom; -import org.dllearner.core.EvaluatedDescription; import org.dllearner.core.config.ConfigOption; import org.dllearner.core.config.ObjectPropertyEditor; import org.dllearner.core.owl.DisjointObjectPropertyAxiom; -import org.dllearner.core.owl.FunctionalObjectPropertyAxiom; import org.dllearner.core.owl.ObjectProperty; +import org.dllearner.kb.LocalModelBasedSparqlEndpointKS; import org.dllearner.kb.SparqlEndpointKS; import org.dllearner.kb.sparql.SPARQLTasks; import org.dllearner.kb.sparql.SparqlEndpoint; import org.dllearner.learningproblems.AxiomScore; -import org.dllearner.learningproblems.Heuristics; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -50,6 +46,7 @@ import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; +import com.hp.hpl.jena.rdf.model.RDFNode; @ComponentAnn(name="disjoint objectproperty axiom learner", shortName="opldisjoint", version=0.1) public class DisjointObjectPropertyAxiomLearner extends AbstractAxiomLearningAlgorithm { @@ -63,6 +60,8 @@ private boolean usePropertyPopularity = true; + private int popularity; + public DisjointObjectPropertyAxiomLearner(SparqlEndpointKS ks){ this.ks = ks; } @@ -82,9 +81,16 @@ fetchedRows = 0; currentlyBestAxioms = new ArrayList<EvaluatedAxiom>(); + //we return here if the class contains no instances + popularity = reasoner.getPopularity(propertyToDescribe); + if(popularity == 0){ + return; + } + //TODO detect existing axioms - //at first get all existing objectproperties in knowledgebase + + //at first get all existing objectproperties in knowledge base allObjectProperties = new SPARQLTasks(ks.getEndpoint()).getAllObjectProperties(); allObjectProperties.remove(propertyToDescribe); @@ -139,7 +145,7 @@ private void runSPARQL1_1_Mode() { //get properties and how often they occur int offset = 0; - String queryTemplate = "SELECT ?p COUNT(?s) AS ?count WHERE {?s ?p ?o." + + String queryTemplate = "PREFIX owl: <http://www.w3.org/2002/07/owl#> SELECT ?p COUNT(?s) AS ?count WHERE {?p a owl:ObjectProperty. ?s ?p ?o." + "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + "}"; String query; @@ -187,12 +193,25 @@ //first create disjoint axioms with properties which not occur and give score of 1 for(ObjectProperty p : completeDisjointProperties){ if(usePropertyPopularity){ - int popularity = reasoner.getPropertyCount(p); - //skip if property is not used in kb - if(popularity == 0) continue; - double[] confidenceInterval = Heuristics.getConfidenceInterval95Wald(popularity, 0); - double accuracy = (confidenceInterval[0] + confidenceInterval[1]) / 2; - evalAxiom = new EvaluatedAxiom(new DisjointObjectPropertyAxiom(propertyToDescribe, p), new AxiomScore(1- accuracy)); + int overlap = 0; + int pop; + if(ks.isRemote()){ + pop = reasoner.getPopularity(p); + } else { + Model model = ((LocalModelBasedSparqlEndpointKS)ks).getModel(); + pop = model.listStatements(null, model.getProperty(p.getName()), (RDFNode)null).toSet().size(); + } + //we skip classes with no instances + if(pop == 0) continue; + + //we compute the estimated precision + double precision = accuracy(pop, overlap); + //we compute the estimated recall + double recall = accuracy(popularity, overlap); + //compute the overall score + double score = 1 - fMEasure(precision, recall); + + evalAxiom = new EvaluatedAxiom(new DisjointObjectPropertyAxiom(propertyToDescribe, p), new AxiomScore(score)); } else { evalAxiom = new EvaluatedAxiom(new DisjointObjectPropertyAxiom(propertyToDescribe, p), new AxiomScore(1)); } @@ -200,12 +219,28 @@ } //second create disjoint axioms with other properties and score 1 - (#occurence/#all) + ObjectProperty p; for(Entry<ObjectProperty, Integer> entry : sortByValues(property2Count)){ - double[] confidenceInterval = Heuristics.getConfidenceInterval95Wald(all, entry.getValue()); - double accuracy = (confidenceInterval[0] + confidenceInterval[1]) / 2;//System.out.println(entry + ": " + accuracy); - evalAxiom = new EvaluatedAxiom(new DisjointObjectPropertyAxiom(propertyToDescribe, entry.getKey()), - new AxiomScore(1 - accuracy)); - axioms.add(evalAxiom); + p = entry.getKey(); + int overlap = entry.getValue(); + int pop; + if(ks.isRemote()){ + pop = reasoner.getPopularity(p); + } else { + Model model = ((LocalModelBasedSparqlEndpointKS)ks).getModel(); + pop = model.listStatements(null, model.getProperty(p.getName()), (RDFNode)null).toSet().size(); + } + //we skip classes with no instances + if(pop == 0) continue; + + //we compute the estimated precision + double precision = accuracy(pop, overlap); + //we compute the estimated recall + double recall = accuracy(popularity, overlap); + //compute the overall score + double score = 1 - fMEasure(precision, recall); + + evalAxiom = new EvaluatedAxiom(new DisjointObjectPropertyAxiom(propertyToDescribe, p), new AxiomScore(score)); } property2Count.put(propertyToDescribe, all); @@ -216,9 +251,10 @@ SparqlEndpoint endpoint = SparqlEndpoint.getEndpointDBpedia(); // endpoint = new SparqlEndpoint(new URL("http://dbpedia.aksw.org:8902/sparql"), Collections.singletonList("http://dbpedia.org"), Collections.<String>emptyList())); DisjointObjectPropertyAxiomLearner l = new DisjointObjectPropertyAxiomLearner(new SparqlEndpointKS(endpoint));//.getEndpointDBpediaLiveAKSW())); - l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/stateOfOrigin")); + l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/birthPlace")); l.setMaxExecutionTimeInSeconds(10); l.init(); + l.getReasoner().precomputeObjectPropertyPopularity(); l.start(); for(EvaluatedAxiom ax : l.getCurrentlyBestEvaluatedAxioms(Integer.MAX_VALUE)){ System.out.println(ax); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentDataPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentDataPropertyAxiomLearner.java 2012-05-03 14:31:56 UTC (rev 3682) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentDataPropertyAxiomLearner.java 2012-05-03 14:42:31 UTC (rev 3683) @@ -126,7 +126,7 @@ // get subjects with types int limit = 1000; int offset = 0; - String queryTemplate = "SELECT ?p COUNT(?s) AS ?count WHERE {?s ?p ?o." + String queryTemplate = "PREFIX owl: <http://www.w3.org/2002/07/owl#> SELECT ?p COUNT(?s) AS ?count WHERE {?s ?p ?o.?p a owl:DatatypeProperty." + "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + "}"; String query; Map<DatatypeProperty, Integer> result = new HashMap<DatatypeProperty, Integer>(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java 2012-05-03 14:31:56 UTC (rev 3682) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java 2012-05-03 14:42:31 UTC (rev 3683) @@ -129,7 +129,7 @@ private void runSPARQL1_1_Mode() { //get subjects with types int offset = 0; - String queryTemplate = "SELECT ?p (COUNT(?s) AS ?count) WHERE {?s ?p ?o." + + String queryTemplate = "PREFIX owl: <http://www.w3.org/2002/07/owl#> SELECT ?p (COUNT(?s) AS ?count) WHERE {?s ?p ?o.?p a owl:ObjectProperty." + "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + "}"; String query; Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java 2012-05-03 14:31:56 UTC (rev 3682) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java 2012-05-03 14:42:31 UTC (rev 3683) @@ -139,7 +139,7 @@ } private int addIndividualsWithTypes(Map<Individual, SortedSet<Description>> ind2Types, int limit, int offset){ - String query = String.format("SELECT DISTINCT ?ind ?type WHERE {?ind <%s> ?o. ?ind a ?type} LIMIT %d OFFSET %d", propertyToDescribe.getName(), limit, offset); + String query = String.format("PREFIX owl: <http://www.w3.org/2002/07/owl#> SELECT DISTINCT ?ind ?type WHERE {?ind <%s> ?o. ?ind a ?type. ?type a owl:Class} LIMIT %d OFFSET %d", propertyToDescribe.getName(), limit, offset); // String query = String.format("SELECT DISTINCT ?ind ?type WHERE {?ind a ?type. {SELECT ?ind {?ind <%s> ?o.} LIMIT %d OFFSET %d}}", propertyToDescribe.getName(), limit, offset); @@ -176,7 +176,7 @@ } public static void main(String[] args) throws Exception{ - SparqlEndpointKS ks = new SparqlEndpointKS(new SparqlEndpoint(new URL("http://dbpedia.aksw.org:8902/sparql")));//.getEndpointDBpediaLiveAKSW())); + SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpedia()); SPARQLReasoner reasoner = new SPARQLReasoner(ks); reasoner.prepareSubsumptionHierarchy(); @@ -184,7 +184,7 @@ ObjectPropertyDomainAxiomLearner l = new ObjectPropertyDomainAxiomLearner(ks); l.setReasoner(reasoner); - l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/officialLanguage")); + l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/Automobile/fuelCapacity")); l.setMaxExecutionTimeInSeconds(10); // l.setReturnOnlyNewAxioms(true); l.init(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyRangeAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyRangeAxiomLearner.java 2012-05-03 14:31:56 UTC (rev 3682) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyRangeAxiomLearner.java 2012-05-03 14:42:31 UTC (rev 3683) @@ -77,13 +77,13 @@ if(reasoner.isPrepared()){ //get existing ranges - Description existingDomain = reasoner.getRange(propertyToDescribe); - if(existingDomain != null){ - existingAxioms.add(new ObjectPropertyRangeAxiom(propertyToDescribe, existingDomain)); + Description existingRange = reasoner.getRange(propertyToDescribe); + if(existingRange != null){ + existingAxioms.add(new ObjectPropertyRangeAxiom(propertyToDescribe, existingRange)); if(reasoner.isPrepared()){ - if(reasoner.getClassHierarchy().contains(existingDomain)){ - for(Description sup : reasoner.getClassHierarchy().getSuperClasses(existingDomain)){ - existingAxioms.add(new ObjectPropertyRangeAxiom(propertyToDescribe, existingDomain)); + if(reasoner.getClassHierarchy().contains(existingRange)){ + for(Description sup : reasoner.getClassHierarchy().getSuperClasses(existingRange)){ + existingAxioms.add(new ObjectPropertyRangeAxiom(propertyToDescribe, existingRange)); logger.info("Existing range(inferred): " + sup); } } @@ -138,7 +138,7 @@ } private int addIndividualsWithTypes(Map<Individual, SortedSet<Description>> ind2Types, int limit, int offset){ - String query = String.format("SELECT DISTINCT ?ind ?type WHERE {?s <%s> ?ind. ?ind a ?type.} LIMIT %d OFFSET %d", propertyToDescribe.getName(), limit, offset); + String query = String.format("PREFIX owl: <http://www.w3.org/2002/07/owl#> SELECT DISTINCT ?ind ?type WHERE {?s <%s> ?ind. ?ind a ?type. ?type a owl:Class} LIMIT %d OFFSET %d", propertyToDescribe.getName(), limit, offset); // String query = String.format("SELECT DISTINCT ?ind ?type WHERE {?ind a ?type. {SELECT ?ind {?ind <%s> ?o.} LIMIT %d OFFSET %d}}", propertyToDescribe.getName(), limit, offset); @@ -179,9 +179,9 @@ ObjectPropertyRangeAxiomLearner l = new ObjectPropertyRangeAxiomLearner(ks); l.setReasoner(reasoner); - l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/author")); + l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/routeTypeAbbreviation")); l.setMaxExecutionTimeInSeconds(10); - l.setReturnOnlyNewAxioms(true); +// l.setReturnOnlyNewAxioms(true); l.init(); l.start(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubDataPropertyOfAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubDataPropertyOfAxiomLearner.java 2012-05-03 14:31:56 UTC (rev 3682) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubDataPropertyOfAxiomLearner.java 2012-05-03 14:42:31 UTC (rev 3683) @@ -125,7 +125,7 @@ //get subjects with types int limit = 1000; int offset = 0; - String queryTemplate = "SELECT ?p COUNT(?s) AS ?count WHERE {?s ?p ?o." + + String queryTemplate = "PREFIX owl: <http://www.w3.org/2002/07/owl#> SELECT ?p COUNT(?s) AS ?count WHERE {?s ?p ?o.?p a owl:DatatypeProperty." + "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + "}"; String query; Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubObjectPropertyOfAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubObjectPropertyOfAxiomLearner.java 2012-05-03 14:31:56 UTC (rev 3682) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/SubObjectPropertyOfAxiomLearner.java 2012-05-03 14:42:31 UTC (rev 3683) @@ -125,7 +125,7 @@ //get subjects with types int limit = 1000; int offset = 0; - String queryTemplate = "SELECT ?p COUNT(?s) AS ?count WHERE {?s ?p ?o." + + String queryTemplate = "PREFIX owl: <http://www.w3.org/2002/07/owl#> SELECT ?p COUNT(?s) AS ?count WHERE {?s ?p ?o.?p a owl:ObjectProperty." + "{SELECT ?s ?o WHERE {?s <%s> ?o.} LIMIT %d OFFSET %d}" + "}"; String query; @@ -135,7 +135,7 @@ boolean repeat = true; while(!terminationCriteriaSatisfied() && repeat){ - query = String.format(queryTemplate, propertyToDescribe, limit, offset); + query = String.format(queryTemplate, propertyToDescribe, limit, offset);System.out.println(query); ResultSet rs = executeSelectQuery(query); QuerySolution qs; repeat = false; @@ -177,7 +177,7 @@ } public static void main(String[] args) throws Exception{ - SubObjectPropertyOfAxiomLearner l = new SubObjectPropertyOfAxiomLearner(new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveOpenLink())); + SubObjectPropertyOfAxiomLearner l = new SubObjectPropertyOfAxiomLearner(new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpedia())); l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/writer")); l.setMaxExecutionTimeInSeconds(10); l.init(); Modified: trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2012-05-03 14:31:56 UTC (rev 3682) +++ trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2012-05-03 14:42:31 UTC (rev 3683) @@ -227,7 +227,7 @@ } protected Model executeConstructQuery(String query) { - logger.info("Sending query\n{} ...", query); + logger.debug("Sending query\n{} ...", query); if(ks.isRemote()){ SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), @@ -241,7 +241,7 @@ return model; } catch (QueryExceptionHTTP e) { if(e.getCause() instanceof SocketTimeoutException){ - logger.warn("Got timeout", e); + logger.warn("Got timeout"); } else { logger.error("Exception executing query", e); } @@ -254,7 +254,7 @@ } protected ResultSet executeSelectQuery(String query) { - logger.info("Sending query\n{} ...", query); + logger.debug("Sending query\n{} ...", query); if(ks.isRemote()){ SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), @@ -268,7 +268,7 @@ return rs; } catch (QueryExceptionHTTP e) { if(e.getCause() instanceof SocketTimeoutException){ - logger.warn("Got timeout", e); + logger.warn("Got timeout"); } else { logger.error("Exception executing query", e); } @@ -280,7 +280,7 @@ } protected ResultSet executeSelectQuery(String query, Model model) { - logger.info("Sending query on local model\n{} ...", query); + logger.debug("Sending query on local model\n{} ...", query); QueryExecution qexec = QueryExecutionFactory.create(query, model); ResultSet rs = qexec.execSelect();; @@ -366,6 +366,15 @@ return new AxiomScore(accuracy, confidence); } + protected double accuracy(int total, int success){ + double[] confidenceInterval = Heuristics.getConfidenceInterval95Wald(total, success); + return (confidenceInterval[0] + confidenceInterval[1]) / 2; + } + + protected double fMEasure(double precision, double recall){ + return 2 * precision * recall / (precision + recall); + } + class OWLFilter extends Filter<OntClass>{ @Override Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2012-05-03 14:31:56 UTC (rev 3682) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2012-05-03 14:42:31 UTC (rev 3683) @@ -94,6 +94,8 @@ private OntModel model; private Map<NamedClass, Integer> classPopularityMap; + private Map<ObjectProperty, Integer> objectPropertyPopularityMap; + private Map<DatatypeProperty, Integer> dataPropertyPopularityMap; public SPARQLReasoner(SparqlEndpointKS ks) { @@ -113,6 +115,12 @@ this.model = model; } + public void precomputePopularity(){ + precomputeClassPopularity(); + precomputeDataPropertyPopularity(); + precomputeObjectPropertyPopularity(); + } + public void precomputeClassPopularity(){ logger.info("Precomputing class popularity ..."); classPopularityMap = new HashMap<NamedClass, Integer>(); @@ -128,6 +136,36 @@ } } + public void precomputeObjectPropertyPopularity(){ + logger.info("Precomputing object property popularity ..."); + objectPropertyPopularityMap = new HashMap<ObjectProperty, Integer>(); + + Set<ObjectProperty> properties = new SPARQLTasks(ks.getEndpoint()).getAllObjectProperties(); + String queryTemplate = "SELECT (COUNT(*) AS ?cnt) WHERE {?s <%s> ?o}"; + + ResultSet rs; + for(ObjectProperty op : properties){ + rs = executeSelectQuery(String.format(queryTemplate, op.getName())); + int cnt = rs.next().getLiteral("cnt").getInt(); + objectPropertyPopularityMap.put(op, cnt); + } + } + + public void precomputeDataPropertyPopularity(){ + logger.info("Precomputing data property popularity ..."); + dataPropertyPopularityMap = new HashMap<DatatypeProperty, Integer>(); + + Set<DatatypeProperty> properties = new SPARQLTasks(ks.getEndpoint()).getAllDataProperties(); + String queryTemplate = "SELECT (COUNT(*) AS ?cnt) WHERE {?s <%s> ?o}"; + + ResultSet rs; + for(DatatypeProperty dp : properties){ + rs = executeSelectQuery(String.format(queryTemplate, dp.getName())); + int cnt = rs.next().getLiteral("cnt").getInt(); + dataPropertyPopularityMap.put(dp, cnt); + } + } + public int getPopularity(NamedClass nc){ if(classPopularityMap.containsKey(nc)){ return classPopularityMap.get(nc); @@ -143,6 +181,36 @@ } + public int getPopularity(ObjectProperty op){ + if(objectPropertyPopularityMap.containsKey(op)){ + return objectPropertyPopularityMap.get(op); + } else { + System.out.println("Cache miss: " + op); + String queryTemplate = "SELECT (COUNT(*) AS ?cnt) WHERE {?s <%s> ?o}"; + + ResultSet rs = executeSelectQuery(String.format(queryTemplate, op.getName())); + int cnt = rs.next().getLiteral("cnt").getInt(); + objectPropertyPopularityMap.put(op, cnt); + return cnt; + } + + } + + public int getPopularity(DatatypeProperty dp){ + if(dataPropertyPopularityMap.containsKey(dp)){ + return dataPropertyPopularityMap.get(dp); + } else { + System.out.println("Cache miss: " + dp); + String queryTemplate = "SELECT (COUNT(*) AS ?cnt) WHERE {?s <%s> ?o}"; + + ResultSet rs = executeSelectQuery(String.format(queryTemplate, dp.getName())); + int cnt = rs.next().getLiteral("cnt").getInt(); + dataPropertyPopularityMap.put(dp, cnt); + return cnt; + } + + } + public final ClassHierarchy prepareSubsumptionHierarchy() { logger.info("Preparing subsumption hierarchy ..."); long startTime = System.currentTimeMillis(); @@ -252,7 +320,7 @@ propertyCharacteristics.add(OWL2.AsymmetricProperty); for(Resource propChar : propertyCharacteristics){ - query = "CONSTRUCT {?s a <%s>} WHERE {?s a <%s>}".replaceAll("%s", propChar.getURI()); + query = "CONSTRUCT {?s a <%s>. ?s a <http://www.w3.org/2002/07/owl#ObjectProperty>} WHERE {?s a <%s>.}".replaceAll("%s", propChar.getURI()); model.add(loadIncrementally(query)); } //for functional properties we have to distinguish between data and object properties, @@ -914,7 +982,7 @@ } private ResultSet executeSelectQuery(String query){ - logger.info("Sending query \n {}", query); + logger.debug("Sending query \n {}", query); ResultSet rs = null; if(ks.isRemote()){ if(useCache){ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |