From: <ku...@us...> - 2008-01-23 02:24:10
|
Revision: 411 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=411&view=rev Author: kurzum Date: 2008-01-22 18:23:59 -0800 (Tue, 22 Jan 2008) Log Message: ----------- added roleLearningProblem, commented out lots of code, that can be saved using Jena and wrote more comments Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryInterface.java trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/Configuration.java trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/ClassNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/InstanceNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/Node.java trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/PropertyNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestResultSet.java Added Paths: ----------- trunk/src/dl-learner/org/dllearner/learningproblems/RoleLearning.java Removed Paths: ------------- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java 2008-01-22 16:47:20 UTC (rev 410) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java 2008-01-23 02:23:59 UTC (rev 411) @@ -29,7 +29,7 @@ import org.dllearner.kb.sparql.datastructure.Node; /** - * This class is used to extract the information recursively. + * This class is used to extract the information . * * @author Sebastian Hellmann */ @@ -38,23 +38,23 @@ private Configuration configuration; private Manipulator manipulator; private int recursionDepth = 1; - //private boolean getAllSuperClasses = true; - //private boolean closeAfterRecursion = true; - private boolean print_flag=false; + // private boolean getAllSuperClasses = true; + // private boolean closeAfterRecursion = true; + private boolean print_flag = false; public ExtractionAlgorithm(Configuration Configuration) { - this.configuration = Configuration; + // this.configuration = Configuration; this.manipulator = Configuration.getManipulator(); this.recursionDepth = Configuration.getRecursiondepth(); - //this.getAllSuperClasses = Configuration.isGetAllSuperClasses(); - //this.closeAfterRecursion=Configuration.isCloseAfterRecursion(); + // this.getAllSuperClasses = Configuration.isGetAllSuperClasses(); + // this.closeAfterRecursion=Configuration.isCloseAfterRecursion(); } public Node getFirstNode(URI u) { return new InstanceNode(u); } - public Vector<Node> expandAll(URI[] u, TypedSparqlQueryInterface tsp) { + public Vector<Node> expandAll(URI[] u, TypedSparqlQuery tsp) { Vector<Node> v = new Vector<Node>(); for (URI one : u) { v.add(expandNode(one, tsp)); @@ -62,14 +62,17 @@ return v; } - /*most important function - expands one example - cave: the recursion is not a recursion anymore, - it was transformed to an iteration - */ - public Node expandNode(URI u, TypedSparqlQueryInterface tsp) { - long time=System.currentTimeMillis(); - Node n = getFirstNode(u); + /** + * most important function expands one example cave: the recursion is not a + * recursion anymore, it was transformed to an iteration + * + * @param uri + * @param typedSparqlQuery + * @return + */ + public Node expandNode(URI uri, TypedSparqlQuery typedSparqlQuery) { + long time = System.currentTimeMillis(); + Node n = getFirstNode(uri); Vector<Node> v = new Vector<Node>(); v.add(n); p("StartVector: " + v); @@ -82,26 +85,27 @@ Node tmpNode = v.remove(0); p("Expanding " + tmpNode); // System.out.println(this.Manipulator); - + // these are the new not expanded nodes // the others are saved in connection with the original node - Vector<Node> tmpVec = tmpNode.expand(tsp, manipulator); + Vector<Node> tmpVec = tmpNode.expand(typedSparqlQuery, + manipulator); tmp.addAll(tmpVec); } v = tmp; - System.out.println("Recursion counter: " + x + - " with " + v.size() + " Nodes remaining, needed: " - +(System.currentTimeMillis()-time)+"ms"); - time=System.currentTimeMillis(); + System.out.println("Recursion counter: " + x + " with " + v.size() + + " Nodes remaining, needed: " + + (System.currentTimeMillis() - time) + "ms"); + time = System.currentTimeMillis(); } - - HashSet<String> hadAlready=new HashSet<String>(); + + HashSet<String> hadAlready = new HashSet<String>(); // gets All Class Nodes and expands them further if (this.configuration.isGetAllSuperClasses()) { - //Set<Node> classes = new TreeSet<Node>(); + // Set<Node> classes = new TreeSet<Node>(); Vector<Node> classes = new Vector<Node>(); - + Vector<Node> instances = new Vector<Node>(); for (Node one : v) { if (one instanceof ClassNode) { @@ -110,54 +114,62 @@ if (one instanceof InstanceNode) { instances.add(one); } - + } - //System.out.println(instances.size()); - TypedSparqlQueryClasses tsqc=new TypedSparqlQueryClasses(configuration); - if(this.configuration.isCloseAfterRecursion()){ + // System.out.println(instances.size()); + TypedSparqlQueryClasses tsqc = new TypedSparqlQueryClasses( + configuration); + if (this.configuration.isCloseAfterRecursion()) { while (instances.size() > 0) { - p("Getting classes for remaining instances: " + instances.size()); + p("Getting classes for remaining instances: " + + instances.size()); Node next = instances.remove(0); p("Getting classes for: " + next); classes.addAll(next.expand(tsqc, manipulator)); - if (classes.size()>=manipulator.breakSuperClassRetrievalAfter){break;} + if (classes.size() >= manipulator.breakSuperClassRetrievalAfter) { + break; + } } } - Vector<Node>tmp=new Vector<Node>(); - int i=0; + Vector<Node> tmp = new Vector<Node>(); + int i = 0; while (classes.size() > 0) { p("Remaining classes: " + classes.size()); - //Iterator<Node> it=classes.iterator(); - //Node next =(Node) it.next(); - //classes.remove(next); + // Iterator<Node> it=classes.iterator(); + // Node next =(Node) it.next(); + // classes.remove(next); Node next = classes.remove(0); - - if(!hadAlready.contains(next.getURI().toString())){ + + if (!hadAlready.contains(next.getURI().toString())) { p("Expanding: " + next); - //System.out.println(hadAlready.size()); + // System.out.println(hadAlready.size()); hadAlready.add(next.getURI().toString()); - tmp=next.expand(tsp, manipulator); + tmp = next.expand(typedSparqlQuery, manipulator); classes.addAll(tmp); - tmp=new Vector<Node>(); - //if(i % 50==0)System.out.println("got "+i+" extra classes, max: "+manipulator.breakSuperClassRetrievalAfter); + tmp = new Vector<Node>(); + // if(i % 50==0)System.out.println("got "+i+" extra classes, + // max: "+manipulator.breakSuperClassRetrievalAfter); i++; - if (i>=manipulator.breakSuperClassRetrievalAfter){break;} + if (i >= manipulator.breakSuperClassRetrievalAfter) { + break; + } } - //System.out.println("Skipping"); - - - //if (classes.size()>=manipulator.breakSuperClassRetrievalAfter){break;} - + // System.out.println("Skipping"); + + // if + // (classes.size()>=manipulator.breakSuperClassRetrievalAfter){break;} + } - //System.out.println((System.currentTimeMillis()-time)+""); + // System.out.println((System.currentTimeMillis()-time)+""); } return n; } - - void p(String s){ - if(print_flag)System.out.println(s); + + void p(String s) { + if (print_flag) + System.out.println(s); } } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java 2008-01-22 16:47:20 UTC (rev 410) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java 2008-01-23 02:23:59 UTC (rev 411) @@ -20,7 +20,6 @@ package org.dllearner.kb.sparql; import java.net.URI; -import java.util.HashSet; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; @@ -29,13 +28,12 @@ import org.dllearner.kb.sparql.configuration.SparqlEndpoint; import org.dllearner.kb.sparql.configuration.SparqlQueryType; import org.dllearner.kb.sparql.datastructure.Node; -import org.dllearner.utilities.StringTuple; /** * An object of this class encapsulates everything. * * @author Sebastian Hellmann - * + * */ public class Manager { @@ -44,53 +42,23 @@ private ExtractionAlgorithm extractionAlgorithm; public void useConfiguration(SparqlQueryType SparqlQueryType, - SparqlEndpoint SparqlEndpoint, Manipulator manipulator, int recursiondepth, - boolean getAllSuperClasses,boolean closeAfterRecursion) { + SparqlEndpoint SparqlEndpoint, Manipulator manipulator, + int recursiondepth, boolean getAllSuperClasses, + boolean closeAfterRecursion) { - this.configuration = new Configuration(SparqlEndpoint, SparqlQueryType, manipulator, - recursiondepth, getAllSuperClasses, closeAfterRecursion); + this.configuration = new Configuration(SparqlEndpoint, SparqlQueryType, + manipulator, recursiondepth, getAllSuperClasses, + closeAfterRecursion); this.typedSparqlQuery = new TypedSparqlQuery(configuration); this.extractionAlgorithm = new ExtractionAlgorithm(configuration); - - } - public Set<String> getDomainInstancesForRole(String role) { - URI u = null; - try { - u = new URI(role); - } catch (Exception e) { - e.printStackTrace(); - } - Set<StringTuple> t = ((TypedSparqlQuery)this.typedSparqlQuery).getTupelsForRole(u, true); - Set<String> ret = new HashSet<String>(); - for (StringTuple one : t) { - - ret.add(one.a); - } - return ret; } - public Set<String> getRangeInstancesForRole(String role) { - URI u = null; - try { - u = new URI(role); - } catch (Exception e) { - e.printStackTrace(); - } - Set<StringTuple> t = ((TypedSparqlQuery)this.typedSparqlQuery).getTupelsForRole(u,false); - Set<String> ret = new HashSet<String>(); - for (StringTuple one : t) { - - ret.add(one.b); - } - return ret; - } - public String extract(URI uri) { // this.TypedSparqlQuery.query(uri); // System.out.println(ExtractionAlgorithm.getFirstNode(uri)); System.out.println("Start extracting"); - + Node n = extractionAlgorithm.expandNode(uri, typedSparqlQuery); Set<String> s = n.toNTriple(); String nt = ""; @@ -108,7 +76,8 @@ for (String one : instances) { try { - Node n = extractionAlgorithm.expandNode(new URI(one), typedSparqlQuery); + Node n = extractionAlgorithm.expandNode(new URI(one), + typedSparqlQuery); ret.addAll(n.toNTriple()); } catch (Exception e) { e.printStackTrace(); @@ -116,24 +85,18 @@ } System.out.println("Finished extracting, start conversion"); StringBuffer nt = new StringBuffer(); - Object[] arr=ret.toArray(); + Object[] arr = ret.toArray(); for (int i = 0; i < arr.length; i++) { - nt.append((String) arr[i]+"\n"); - if(i%1000==0)System.out.println(i+" of "+arr.length+" triples done"); + nt.append((String) arr[i] + "\n"); + if (i % 1000 == 0) + System.out.println(i + " of " + arr.length + " triples done"); } - System.out.println(arr.length+" of "+arr.length+" triples done"); + System.out.println(arr.length + " of " + arr.length + " triples done"); /* - String tmp=""; - while ( ret.size() > 0) { - tmp=ret.first(); - nt+=tmp; - ret.remove(tmp); - System.out.println(ret.size()); - - } - /*for (String str : ret) { - nt += str + "\n"; - }*/ + * String tmp=""; while ( ret.size() > 0) { tmp=ret.first(); nt+=tmp; + * ret.remove(tmp); System.out.println(ret.size()); } /*for (String str : + * ret) { nt += str + "\n"; } + */ return nt.toString(); } @@ -141,57 +104,43 @@ this.configuration.getSparqlQueryType().addPredicateFilter(str); } - - public Configuration getConfiguration(){ + + public Configuration getConfiguration() { return configuration; } - - /*public void calculateSubjects(String label, int limit) { - System.out.println("SparqlModul: Collecting Subjects"); - oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); - try { - subjects = oc.getSubjectsFromLabel(label, limit); - } catch (IOException e) { - subjects = new String[1]; - subjects[0] = "[Error]Sparql Endpoint could not be reached."; - } - System.out.println("SparqlModul: ****Finished"); - } - /** - * TODO SparqlOntologyCollector needs to be removed - * @param subject + /* + * public void calculateSubjects(String label, int limit) { + * System.out.println("SparqlModul: Collecting Subjects"); + * oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); try { + * subjects = oc.getSubjectsFromLabel(label, limit); } catch (IOException e) { + * subjects = new String[1]; subjects[0] = "[Error]Sparql Endpoint could not + * be reached."; } System.out.println("SparqlModul: ****Finished"); } + * + * /** TODO SparqlOntologyCollector needs to be removed @param subject */ /* - public void calculateTriples(String subject) { - System.out.println("SparqlModul: Collecting Triples"); - oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); - try { - triples = oc.collectTriples(subject); - } catch (IOException e) { - triples = new String[1]; - triples[0] = "[Error]Sparql Endpoint could not be reached."; - } - System.out.println("SparqlModul: ****Finished"); - } -*/ + * public void calculateTriples(String subject) { + * System.out.println("SparqlModul: Collecting Triples"); + * oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); try { + * triples = oc.collectTriples(subject); } catch (IOException e) { triples = + * new String[1]; triples[0] = "[Error]Sparql Endpoint could not be + * reached."; } System.out.println("SparqlModul: ****Finished"); } + */ /** * TODO SparqlOntologyCollector needs to be removed + * * @param concept */ - - /*public void calculateConceptSubjects(String concept) { - System.out.println("SparqlModul: Collecting Subjects"); - oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); - try { - conceptSubjects = oc.getSubjectsFromConcept(concept); - } catch (IOException e) { - conceptSubjects = new String[1]; - conceptSubjects[0] = "[Error]Sparql Endpoint could not be reached."; - } - System.out.println("SparqlModul: ****Finished"); - } - */ - + /* + * public void calculateConceptSubjects(String concept) { + * System.out.println("SparqlModul: Collecting Subjects"); + * oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); try { + * conceptSubjects = oc.getSubjectsFromConcept(concept); } catch + * (IOException e) { conceptSubjects = new String[1]; conceptSubjects[0] = + * "[Error]Sparql Endpoint could not be reached."; } + * System.out.println("SparqlModul: ****Finished"); } + */ + } \ No newline at end of file Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java 2008-01-22 16:47:20 UTC (rev 410) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java 2008-01-23 02:23:59 UTC (rev 411) @@ -33,7 +33,7 @@ * Used to manipulate retrieved tupels, identify blanknodes, etc. * * @author Sebastian Hellmann - * + * */ public class Manipulator { public final String subclass = "http://www.w3.org/2000/01/rdf-schema#subClassOf"; @@ -42,37 +42,47 @@ final String classns = "http://www.w3.org/2002/07/owl#Class"; final String thing = "http://www.w3.org/2002/07/owl#Thing"; - public String blankNodeIdentifier = "bnode"; - public int breakSuperClassRetrievalAfter=200; + public int breakSuperClassRetrievalAfter = 200; public LinkedList<StringTuple> replacePredicate; public LinkedList<StringTuple> replaceObject; - - Set<String> classproperties; + // Set<String> classproperties; - public Manipulator(String blankNodeIdentifier,int breakSuperClassRetrievalAfter,LinkedList<StringTuple> replacePredicate,LinkedList<StringTuple> replaceObject) { + public Manipulator(String blankNodeIdentifier, + int breakSuperClassRetrievalAfter, + LinkedList<StringTuple> replacePredicate, + LinkedList<StringTuple> replaceObject) { this.blankNodeIdentifier = blankNodeIdentifier; - this.replaceObject=replaceObject; - this.replacePredicate=replacePredicate; - this.breakSuperClassRetrievalAfter=breakSuperClassRetrievalAfter; - Set<String> classproperties = new HashSet<String>(); - classproperties.add(subclass); + this.replaceObject = replaceObject; + this.replacePredicate = replacePredicate; + this.breakSuperClassRetrievalAfter = breakSuperClassRetrievalAfter; + // Set<String> classproperties = new HashSet<String>(); + // classproperties.add(subclass); } - // TODO user defined rules missing - public Set<StringTuple> check(Set<StringTuple> s, Node node) { + /** + * this checks for consistency and manipulates the tuples, before they get + * triple + * + * @param tuples + * tuples for the node + * @param node + * @return + */ + public Set<StringTuple> check(Set<StringTuple> tuples, Node node) { Set<StringTuple> toRemove = new HashSet<StringTuple>(); - Iterator<StringTuple> it = s.iterator(); + Iterator<StringTuple> it = tuples.iterator(); while (it.hasNext()) { StringTuple t = (StringTuple) it.next(); replacePredicate(t); replaceObject(t); - // remove <rdf:type, owl:class> + // remove <rdf:type, owl:class> // this is done to avoid transformation to owl:subclassof - if (t.a.equals(type) && t.b.equals(classns) && node instanceof ClassNode) { + if (t.a.equals(type) && t.b.equals(classns) + && node instanceof ClassNode) { toRemove.add(t); } @@ -82,27 +92,29 @@ } // remove all instances with owl:type thing - if (t.a.equals(type) && t.b.equals(thing) && node instanceof InstanceNode) { + if (t.a.equals(type) && t.b.equals(thing) + && node instanceof InstanceNode) { toRemove.add(t); } } - s.removeAll(toRemove); + tuples.removeAll(toRemove); - return s; + return tuples; } - - private void replacePredicate(StringTuple t){ - for(StringTuple rep:replacePredicate){ - if(rep.a.equals(t.a)){ - t.a=rep.b; + + private void replacePredicate(StringTuple t) { + for (StringTuple rep : replacePredicate) { + if (rep.a.equals(t.a)) { + t.a = rep.b; } } } - private void replaceObject(StringTuple t){ - for(StringTuple rep:replaceObject){ - if(rep.a.equals(t.a)){ - t.a=rep.b; + + private void replaceObject(StringTuple t) { + for (StringTuple rep : replaceObject) { + if (rep.a.equals(t.a)) { + t.a = rep.b; } } } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2008-01-22 16:47:20 UTC (rev 410) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2008-01-23 02:23:59 UTC (rev 411) @@ -57,7 +57,7 @@ import org.dllearner.utilities.StringTuple; /** - * Represents a SPARQL Endpoint. + * Represents the SPARQL Endpoint Component. * * @author Jens Lehmann * @author Sebastian Knappe @@ -65,8 +65,8 @@ */ public class SparqlKnowledgeSource extends KnowledgeSource { - private Map<Integer,SparqlQuery> queryIDs = new HashMap<Integer,SparqlQuery>(); - private Map<Integer, String[][]> queryResult=new HashMap<Integer,String[][]>(); + private Map<Integer, SparqlQuery> queryIDs = new HashMap<Integer, SparqlQuery>(); + private Map<Integer, String[][]> queryResult = new HashMap<Integer, String[][]>(); // ConfigOptions public URL url; // String host; @@ -84,19 +84,19 @@ private boolean getAllSuperClasses = true; private boolean closeAfterRecursion = true; private int breakSuperClassRetrievalAfter = 200; - - private boolean learnDomain = false; - private boolean learnRange = false; - private int numberOfInstancesUsedForRoleLearning = 40; - private String role = ""; private String blankNodeIdentifier = "bnode"; -// private String verbosity = "warning"; - - //LinkedList<StringTuple> URIParameters = new LinkedList<StringTuple>(); + // private boolean learnDomain = false; + // private boolean learnRange = false; + // private int numberOfInstancesUsedForRoleLearning = 40; + // private String role = ""; + // + // private String verbosity = "warning"; + + // LinkedList<StringTuple> URIParameters = new LinkedList<StringTuple>(); LinkedList<StringTuple> replacePredicate = new LinkedList<StringTuple>(); LinkedList<StringTuple> replaceObject = new LinkedList<StringTuple>(); - SparqlEndpoint sse = null; + SparqlEndpoint endpoint = null; /** * Holds the results of the calculateSubjects method @@ -130,10 +130,10 @@ private Thread triplesThread; private Thread conceptThread; - - private LinkedList<String> defaultGraphURIs=new LinkedList<String>(); - private LinkedList<String> namedGraphURIs=new LinkedList<String>(); + private LinkedList<String> defaultGraphURIs = new LinkedList<String>(); + private LinkedList<String> namedGraphURIs = new LinkedList<String>(); + // received ontology as array, used if format=Array(an element of the // array consists of the subject, predicate and object separated by '<' private String[] ontArray; @@ -145,8 +145,9 @@ return "SPARQL Endpoint"; } - private static Logger logger = Logger.getLogger(SparqlKnowledgeSource.class); - + private static Logger logger = Logger + .getLogger(SparqlKnowledgeSource.class); + /** * sets the ConfigOptions for this KnowledgeSource * @@ -155,7 +156,8 @@ public static Collection<ConfigOption<?>> createConfigOptions() { Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); options.add(new StringConfigOption("url", "URL of SPARQL Endpoint")); -// options.add(new StringConfigOption("host", "host of SPARQL Endpoint")); + // options.add(new StringConfigOption("host", "host of SPARQL + // Endpoint")); options .add(new StringSetConfigOption("instances", "relevant instances e.g. positive and negative examples in a learning problem")); @@ -208,9 +210,11 @@ options.add(new BooleanConfigOption("closeAfterRecursion", "gets all classes for all instances")); options.add(CommonConfigOptions.getVerbosityOption()); - - options.add(new StringSetConfigOption("defaultGraphURIs","a list of all default Graph URIs")); - options.add(new StringSetConfigOption("namedGraphURIs","a list of all named Graph URIs")); + + options.add(new StringSetConfigOption("defaultGraphURIs", + "a list of all default Graph URIs")); + options.add(new StringSetConfigOption("namedGraphURIs", + "a list of all named Graph URIs")); return options; } @@ -230,8 +234,8 @@ throw new InvalidConfigOptionValueException(entry.getOption(), entry.getValue(), "malformed URL " + s); } -// } else if (option.equals("host")) { -// host = (String) entry.getValue(); + // } else if (option.equals("host")) { + // host = (String) entry.getValue(); } else if (option.equals("instances")) { instances = (Set<String>) entry.getValue(); } else if (option.equals("recursionDepth")) { @@ -254,12 +258,16 @@ useLits = (Boolean) entry.getValue(); } else if (option.equals("getAllSuperClasses")) { getAllSuperClasses = (Boolean) entry.getValue(); - } else if (option.equals("learnDomain")) { - learnDomain = (Boolean) entry.getValue(); - } else if (option.equals("learnRange")) { - learnRange = (Boolean) entry.getValue(); - } else if (option.equals("role")) { - role = (String) entry.getValue(); + /* + * TODO remaove } else if (option.equals("learnDomain")) { + * learnDomain = (Boolean) entry.getValue(); } else if + * (option.equals("learnRange")) { learnRange = (Boolean) + * entry.getValue(); } else if (option.equals("role")) { role = + * (String) entry.getValue(); } else if + * (option.equals("numberOfInstancesUsedForRoleLearning")) { + * numberOfInstancesUsedForRoleLearning = (Integer) + * entry.getValue(); + */ } else if (option.equals("blankNodeIdentifier")) { blankNodeIdentifier = (String) entry.getValue(); } else if (option.equals("example")) { @@ -270,23 +278,21 @@ replaceObject = (LinkedList) entry.getValue(); } else if (option.equals("breakSuperClassRetrievalAfter")) { breakSuperClassRetrievalAfter = (Integer) entry.getValue(); - } else if (option.equals("numberOfInstancesUsedForRoleLearning")) { - numberOfInstancesUsedForRoleLearning = (Integer) entry.getValue(); } else if (option.equals("closeAfterRecursion")) { closeAfterRecursion = (Boolean) entry.getValue(); -// } else if (option.equals("verbosity")) { -// verbosity = (String) entry.getValue(); + // } else if (option.equals("verbosity")) { + // verbosity = (String) entry.getValue(); } else if (option.equals("defaultGraphURIs")) { Set<String> temp = (Set<String>) entry.getValue(); - Iterator iter=temp.iterator(); - while (iter.hasNext()){ - defaultGraphURIs.add((String)iter.next()); + Iterator iter = temp.iterator(); + while (iter.hasNext()) { + defaultGraphURIs.add((String) iter.next()); } } else if (option.equals("namedGraphURIs")) { Set<String> temp = (Set<String>) entry.getValue(); - Iterator iter=temp.iterator(); - while (iter.hasNext()){ - namedGraphURIs.add((String)iter.next()); + Iterator iter = temp.iterator(); + while (iter.hasNext()) { + namedGraphURIs.add((String) iter.next()); } } } @@ -299,100 +305,50 @@ @Override public void init() { logger.info("SparqlModul: Collecting Ontology"); - // SparqlOntologyCollector oc= - // new SparqlOntologyCollector(Datastructures.setToArray(instances), - // numberOfRecursions, filterMode, - // Datastructures.setToArray(predList),Datastructures.setToArray( - // objList),Datastructures.setToArray(classList),format,url,useLits); - + /* + * TODO remove when Jena works SparqlOntologyCollector oc= // new + * SparqlOntologyCollector(Datastructures.setToArray(instances), // + * numberOfRecursions, filterMode, // + * Datastructures.setToArray(predList),Datastructures.setToArray( + * objList),Datastructures.setToArray(classList),format,url,useLits); + * //HashMap<String, String> parameters = new HashMap<String, + * String>(); //parameters.put("default-graph-uri", + * "http://dbpedia.org"); //parameters.put("format", + * "application/sparql-results.xml"); + * + */ + Manager m = new Manager(); - SparqlQueryType sqt = null; + SparqlQueryType sparqlQueryType = null; // get Options for Manipulator - Manipulator man = new Manipulator(blankNodeIdentifier, + Manipulator manipulator = new Manipulator(blankNodeIdentifier, breakSuperClassRetrievalAfter, replacePredicate, replaceObject); - //HashMap<String, String> parameters = new HashMap<String, String>(); - //parameters.put("default-graph-uri", "http://dbpedia.org"); - //parameters.put("format", "application/sparql-results.xml"); // get Options for endpoints if (predefinedEndpoint >= 1) { - sse = SparqlEndpoint.getEndpointByNumber(predefinedEndpoint); + endpoint = SparqlEndpoint.getEndpointByNumber(predefinedEndpoint); } else { // TODO this is not optimal, because not all options are used - sse = new SparqlEndpoint(url); + // like default-graph uri + endpoint = new SparqlEndpoint(url); } // get Options for Filters if (predefinedFilter >= 1) { - sqt = SparqlQueryType.getFilterByNumber(predefinedFilter); + sparqlQueryType = SparqlQueryType + .getFilterByNumber(predefinedFilter); } else { - sqt = new SparqlQueryType("forbid", objList, predList, useLits ); + sparqlQueryType = new SparqlQueryType("forbid", objList, predList, + useLits); } // give everything to the manager - m.useConfiguration(sqt, sse, man, recursionDepth, getAllSuperClasses, - closeAfterRecursion); + m.useConfiguration(sparqlQueryType, endpoint, manipulator, + recursionDepth, getAllSuperClasses, closeAfterRecursion); try { String ont = ""; - // System.out.println(learnDomain); - // used to learn a domain of a role - if (learnDomain || learnRange) { - Set<String> pos = new HashSet<String>(); - Set<String> neg = new HashSet<String>(); - if (learnDomain) { - pos = m.getDomainInstancesForRole(role); - neg = m.getRangeInstancesForRole(role); - } else if (learnRange) { - neg = m.getDomainInstancesForRole(role); - pos = m.getRangeInstancesForRole(role); - } - // choose 30 - - Set<String> tmp = new HashSet<String>(); - for (String one : pos) { - tmp.add(one); - if (tmp.size() >= numberOfInstancesUsedForRoleLearning) - break; - } - pos = tmp; - logger.info("Instances used: " + pos.size()); - - tmp = new HashSet<String>(); - for (String one : neg) { - tmp.add(one); - if (tmp.size() >= numberOfInstancesUsedForRoleLearning) - break; - } - neg = tmp; - - instances = new HashSet<String>(); - instances.addAll(pos); - - instances.addAll(neg); - - for (String one : pos) { - logger.info("+\"" + one + "\""); - } - for (String one : neg) { - logger.info("-\"" + one + "\""); - } - - /* - * Random r= new Random(); - * - * - * Object[] arr=instances.toArray(); - * while(instances.size()>=30){ } - */ - // add the role to the filter(a solution is always EXISTS - // role.TOP) - m.addPredicateFilter(role); - // System.out.println(instances); - // THIS is a workaround - - } // the actual extraction is started here ont = m.extract(instances); logger.info("Number of cached SPARQL queries: " @@ -479,9 +435,9 @@ logger.info("SparqlModul: Collecting Subjects"); // oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); // try { - Vector<String> v = (SparqlQuery.makeLabelQuery(label, limit, sse) + Vector<String> v = (SparqlQuery.makeLabelQuery(label, limit, endpoint) .getAsVector("subject")); - subjects = (String[]) v.toArray(new String[v.size()]); + subjects = (String[]) v.toArray(new String[v.size()]); // subjects = oc.getSubjectsFromLabel(label, limit); // } catch (IOException e) { // TODO I removed IOException, please check @@ -498,22 +454,23 @@ */ public void calculateTriples(String subject) { logger.info("SparqlModul: Collecting Triples"); - Vector<StringTuple> v = (SparqlQuery.makeArticleQuery(subject, sse) - .getAsVectorOfTupels("predicate", "objcet")); - //String[] subjects = (String[]) v.toArray(new String[v.size()]); + Vector<StringTuple> v = (SparqlQuery + .makeArticleQuery(subject, endpoint).getAsVectorOfTupels( + "predicate", "objcet")); + // String[] subjects = (String[]) v.toArray(new String[v.size()]); String[] tmp = new String[v.size()]; - int i=0; + int i = 0; for (StringTuple stringTuple : v) { - tmp[i++]=stringTuple.a+"<"+stringTuple.b; + tmp[i++] = stringTuple.a + "<" + stringTuple.b; } - triples=tmp; - //oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); - //try { - // triples = oc.collectTriples(subject); - //} catch (IOException e) { - // triples = new String[1]; - // triples[0] = "[Error]Sparql Endpoint could not be reached."; - //} + triples = tmp; + // oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); + // try { + // triples = oc.collectTriples(subject); + // } catch (IOException e) { + // triples = new String[1]; + // triples[0] = "[Error]Sparql Endpoint could not be reached."; + // } logger.info("SparqlModul: ****Finished"); } @@ -524,9 +481,9 @@ */ public void calculateConceptSubjects(String concept) { logger.info("SparqlModul: Collecting Subjects"); - Vector<String> v = (SparqlQuery.makeConceptQuery(concept, sse) + Vector<String> v = (SparqlQuery.makeConceptQuery(concept, endpoint) .getAsVector("subject")); - conceptSubjects = (String[]) v.toArray(new String[v.size()]); + conceptSubjects = (String[]) v.toArray(new String[v.size()]); // oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); // try { @@ -600,44 +557,43 @@ } public int sparqlQuery(String query) { - this.sse=new SparqlEndpoint(url,defaultGraphURIs,namedGraphURIs); - return this.generateQueryID(new SparqlQuery(query, sse)); + this.endpoint = new SparqlEndpoint(url, defaultGraphURIs, + namedGraphURIs); + return this.generateQueryID(new SparqlQuery(query, endpoint)); } - - public void startSparqlQuery(int queryID){ + + public void startSparqlQuery(int queryID) { queryResult.put(queryID, queryIDs.get(queryID).getAsStringArray()); } - - public SparqlQuery getSparqlQuery(int queryID){ + + public SparqlQuery getSparqlQuery(int queryID) { return queryIDs.get(queryID); } - - public String[][] getSparqlResult(int queryID){ + + public String[][] getSparqlResult(int queryID) { return queryResult.get(queryID); } - + private int generateQueryID(SparqlQuery query) { int id; - Random rand=new Random(); + Random rand = new Random(); do { id = rand.nextInt(); - } while(queryIDs.keySet().contains(id)); + } while (queryIDs.keySet().contains(id)); queryIDs.put(id, query); - return id; + return id; } - - public static void main(String[] args) throws MalformedURLException - { - String query="SELECT ?pred ?obj\n"+ - "WHERE {<http://dbpedia.org/resource/Leipzig> ?pred ?obj}"; - URL url=new URL("http://dbpedia.openlinksw.com:8890/sparql"); - SparqlEndpoint sse=new SparqlEndpoint(url); - SparqlQuery q=new SparqlQuery(query,sse); - String[][] array=q.getAsStringArray(); - for (int i=0;i<array.length;i++) - { - for (int j=0;j<array[0].length;j++) - System.out.print(array[i][j]+" "); + + public static void main(String[] args) throws MalformedURLException { + String query = "SELECT ?pred ?obj\n" + + "WHERE {<http://dbpedia.org/resource/Leipzig> ?pred ?obj}"; + URL url = new URL("http://dbpedia.openlinksw.com:8890/sparql"); + SparqlEndpoint sse = new SparqlEndpoint(url); + SparqlQuery q = new SparqlQuery(query, sse); + String[][] array = q.getAsStringArray(); + for (int i = 0; i < array.length; i++) { + for (int j = 0; j < array[0].length; j++) + System.out.print(array[i][j] + " "); System.out.println(); } } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java 2008-01-22 16:47:20 UTC (rev 410) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java 2008-01-23 02:23:59 UTC (rev 411) @@ -23,7 +23,8 @@ /** * Can assemble sparql queries. can make queries for subject, predicate, object - * according to the filter settings object not yet implemented + * according to the filter settings object SparqlQueryType, which gives the + * predicate and object lists * * @author Sebastian Hellmann * @@ -48,6 +49,11 @@ return ret; } + /** + * + * @param role + * @return + */ public String makeRoleQueryUsingFilters(String role) { String Filter = internalFilterAssemblyRole(); @@ -109,19 +115,19 @@ return Filter; } - public String filterSubject(String ns) { + private String filterSubject(String ns) { return "&&( !regex(str(?subject), '" + ns + "') )"; } - public static String filterPredicate(String ns) { + private static String filterPredicate(String ns) { return "&&( !regex(str(?predicate), '" + ns + "') )"; } - public static String filterObject(String ns) { + private static String filterObject(String ns) { return "&&( !regex(str(?object), '" + ns + "') )"; } - public void p(String str) { + private void p(String str) { if (print_flag) { System.out.println(str); } @@ -137,28 +143,18 @@ * @return sparql query */ /* - public static String makeQueryFilter(String subject, oldSparqlFilter sf) { + * public static String makeQueryFilter(String subject, oldSparqlFilter sf) { + * + * String Filter = ""; if (!sf.useLiterals) Filter += "!isLiteral(?object)"; + * for (String p : sf.getPredFilter()) { Filter += "\n" + + * filterPredicate(p); } for (String o : sf.getObjFilter()) { Filter += "\n" + + * filterObject(o); } + * + * String ret = "SELECT * WHERE { \n" + "<" + subject + "> ?predicate + * ?object.\n"; if (!(Filter.length() == 0)) ret += "FILTER( \n" + "(" + + * Filter + "))."; ret += "}"; // System.out.println(ret); return ret; } + */ - String Filter = ""; - if (!sf.useLiterals) - Filter += "!isLiteral(?object)"; - for (String p : sf.getPredFilter()) { - Filter += "\n" + filterPredicate(p); - } - for (String o : sf.getObjFilter()) { - Filter += "\n" + filterObject(o); - } - - String ret = "SELECT * WHERE { \n" + "<" + subject - + "> ?predicate ?object.\n"; - if (!(Filter.length() == 0)) - ret += "FILTER( \n" + "(" + Filter + "))."; - ret += "}"; - // System.out.println(ret); - return ret; - } -*/ - /* * moved to SparqlQuery TODO remove here creates a query for subjects with * the specified label @param label a phrase that is part of the label of a Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2008-01-22 16:47:20 UTC (rev 410) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2008-01-23 02:23:59 UTC (rev 411) @@ -25,6 +25,7 @@ import org.dllearner.kb.sparql.configuration.Configuration; import org.dllearner.kb.sparql.query.Cache; +import org.dllearner.kb.sparql.query.CachedSparqlQuery; import org.dllearner.kb.sparql.query.SparqlQuery; import org.dllearner.utilities.StringTuple; @@ -36,102 +37,98 @@ */ public class TypedSparqlQuery implements TypedSparqlQueryInterface { boolean print_flag = false; - boolean debug_no_cache = false;// true means no cahce is used - private Configuration configuration; - // private SparqlHTTPRequest SparqlHTTPRequest; + protected Configuration configuration; private SparqlQueryMaker sparqlQueryMaker; - // private SparqlQuery sparqlQuery; - //private CachedSparqlQuery cachedSparqlQuery; Cache cache; + // boolean debug_no_cache = false;// true means no cache is used + // private SparqlHTTPRequest SparqlHTTPRequest; + // private SparqlQuery sparqlQuery; + // private CachedSparqlQuery cachedSparqlQuery; + public TypedSparqlQuery(Configuration Configuration) { this.configuration = Configuration; - // this.SparqlHTTPRequest = new - // SparqlHTTPRequest(Configuration.getSparqlEndpoint()); this.sparqlQueryMaker = new SparqlQueryMaker(Configuration .getSparqlQueryType()); + this.cache = new Cache("cache"); // this.sparqlQuery=new SparqlQuery(configuration.getSparqlEndpoint()); - this.cache = new Cache("cache"); // this.cachedSparqlQuery=new // CachedSparqlQuery(this.sparqlQuery,this.cache); } // standard query get a tupels (p,o) for subject s - public Set<StringTuple> query(URI u) { + /** + * uses a cache and gets the result tuples for a resource u + * + * @param uri + * the resource + * @param sparqlQueryString + * @param a + * the name of the first bound variable for xml parsing, normally + * predicate + * @param b + * the name of the second bound variable for xml parsing, + * normally object + * @return + */ + public Set<StringTuple> getTupelForResource(URI uri) { + // TODO remove + String a = "predicate"; + String b = "object"; // getQuery - String sparql = sparqlQueryMaker.makeSubjectQueryUsingFilters(u - .toString()); - return cachedSparql(u, sparql, "predicate", "object"); + String sparqlQueryString = sparqlQueryMaker + .makeSubjectQueryUsingFilters(uri.toString()); - } + CachedSparqlQuery csq = new CachedSparqlQuery(configuration + .getSparqlEndpoint(), cache, uri.toString(), sparqlQueryString); - // query get a tupels (s,o) for role p - public Set<StringTuple> getTupelsForRole(URI u) { - - // getQuery - String sparql = sparqlQueryMaker - .makeRoleQueryUsingFilters(u.toString()); - - Set<StringTuple> s = cachedSparql(u, sparql, "subject", "object"); - // System.out.println(s); - return s; - - } - - public Set<StringTuple> getTupelsForRole(URI u, boolean domain) { - - // getQuery - String sparql = sparqlQueryMaker.makeRoleQueryUsingFilters( - u.toString(), domain); - - Set<StringTuple> s = cachedSparql(u, sparql, "subject", "object"); - // System.out.println(s); - return s; - - } - - // uses a cache - private Set<StringTuple> cachedSparql(URI u, String sparql, String a, - String b) { - // check cache - String FromCache = cache.get(u.toString(), sparql); - if (debug_no_cache) { - FromCache = null; - } - String xml = null; - // if not in cache get it from EndPoint - if (FromCache == null) { - configuration.increaseNumberOfuncachedSparqlQueries(); - // try { - xml = sendAndReceiveSPARQL(sparql); - /* - * } catch (IOException e) {e.printStackTrace();} - */ - p(sparql); - // System.out.println(xml); - if (!debug_no_cache) { - cache.put(u.toString(), sparql, xml); - } - // System.out.print("\n"); - } else { - configuration.increaseNumberOfCachedSparqlQueries(); - xml = FromCache; - // System.out.println("FROM CACHE"); - } - - // System.out.println(sparql); - // System.out.println(xml); - // process XML + String xml = csq.getAsXMLString(); + // TODO needs to be changed to new format Set<StringTuple> s = processResult(xml, a, b); try { // System.out.println("retrieved " + s.size() + " tupels\n"); } catch (Exception e) { } return s; + // return cachedSparql(u, sparql, "predicate", "object"); } + @Deprecated + private Set<StringTuple> cachedSparql(URI uri, String sparqlQueryString, + String a, String b) { + return null; + /* + * OLD CODE FOLLOWING keep until Jena is working String FromCache = + * cache.get(u.toString(), sparqlQueryString); if (debug_no_cache) { + * //FromCache = null; } String xml = null; // if not in cache get it + * from EndPoint if (FromCache == null) { + * configuration.increaseNumberOfuncachedSparqlQueries(); // try { xml = + * sendAndReceiveSPARQL(sparqlQueryString); + * + * //} catch (IOException e) {e.printStackTrace();} + * + * p(sparqlQueryString); // System.out.println(xml); if + * (!debug_no_cache) { cache.put(uri.toString(), sparqlQueryString, + * xml); } // System.out.print("\n"); } else { + * configuration.increaseNumberOfCachedSparqlQueries(); xml = FromCache; // + * System.out.println("FROM CACHE"); } + */ + // System.out.println(sparql); + // System.out.println(xml); + // process XML + } + + /** + * TODO old XML processing, can be removed, once Jena is done + * + * @param xml + * @param a + * @param b + * @return a Set of Tuples <a|b> + */ + @Deprecated public Set<StringTuple> processResult(String xml, String a, String b) { Set<StringTuple> ret = new HashSet<StringTuple>(); @@ -177,7 +174,6 @@ * while (xml.indexOf(one) != -1) { * * - * * // System.out.println(new Tupel(predtmp,objtmp)); } */ @@ -185,6 +181,13 @@ } + /** + * TODO used by old XML processing, can be removed once Jena is done + * + * @param xml + * @return + */ + @Deprecated private String getNextResult(String xml) { String res1 = "<result>"; String res2 = "</result>"; @@ -196,6 +199,15 @@ return xml; } + /** + * TODO used by old XML processing, can be removed once Jena is done + * + * @param xml + * @param starttag + * @param endtag + * @return + */ + @Deprecated private String getinTag(String xml, String starttag, String endtag) { String res1 = "<" + starttag + ">"; // System.out.println(res1); @@ -210,6 +222,7 @@ return xml; } + @Deprecated public String sendAndReceiveSPARQL(String queryString) { // SparqlQuery sq=new SparqlQuery(configuration.getSparqlEndpoint()); return new SparqlQuery(queryString, configuration.getSparqlEndpoint()) Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java 2008-01-22 16:47:20 UTC (rev 410) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java 2008-01-23 02:23:59 UTC (rev 411) @@ -1,194 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql; - -import java.net.URI; -import java.util.HashSet; -import java.util.Set; - -import org.dllearner.kb.sparql.configuration.Configuration; -import org.dllearner.kb.sparql.query.Cache; -import org.dllearner.kb.sparql.query.SparqlQuery; -import org.dllearner.utilities.StringTuple; - -/** - * Can execute different queries. - * - * @author Sebastian Hellmann - * - */ -public class TypedSparqlQueryClasses implements TypedSparqlQueryInterface { - boolean print_flag = false; - boolean debug_no_cache = false; - private Configuration configuration; - // private SparqlHTTPRequest SparqlHTTPRequest; - // private SparqlQueryMaker sparqlQueryMaker; - Cache cache; - - public TypedSparqlQueryClasses(Configuration configuration) { - this.configuration = configuration; - this.cache = new Cache("cache"); - } - - // standard query get a tupels (p,o) for subject s - public Set<StringTuple> query(URI u) { - - // getQuery - String sparql = "SELECT ?predicate ?object " + "WHERE {" + "<" - + u.toString() + "> ?predicate ?object;" + "a ?object . " - + " FILTER (!regex(str(?object),'http://xmlns.com/foaf/0.1/'))" - + "}"; - - return cachedSparql(u, sparql, "predicate", "object"); - - } - - // uses a cache - private Set<StringTuple> cachedSparql(URI u, String sparql, String a, - String b) { - // check cache - String FromCache = cache.get(u.toString(), sparql); - if (debug_no_cache) { - FromCache = null; - } - String xml = null; - // if not in cache get it from EndPoint - if (FromCache == null) { - configuration.increaseNumberOfuncachedSparqlQueries(); - // try { - xml = sendAndReceiveSPARQL(sparql); - /* - * } catch (IOException e) {e.printStackTrace();} - */ - p(sparql); - // System.out.println(xml); - if (!debug_no_cache) { - cache.put(u.toString(), sparql, xml); - } - // System.out.print("\n"); - } else { - configuration.increaseNumberOfCachedSparqlQueries(); - xml = FromCache; - // System.out.println("FROM CACHE"); - } - - // System.out.println(sparql); - // System.out.println(xml); - // process XML - Set<StringTuple> s = processResult(xml, a, b); - try { - // System.out.println("retrieved " + s.size() + " tupels\n"); - } catch (Exception e) { - } - return s; - } - - public Set<StringTuple> processResult(String xml, String a, String b) { - - Set<StringTuple> ret = new HashSet<StringTuple>(); - // TODO if result is empty, catch exceptions - String resEnd = "</result>"; - String one = "binding name=\"" + a + "\""; - String two = "binding name=\"" + b + "\""; - String endbinding = "binding"; - String uri = "uri"; - // String uridel = "<uri>"; - String bnode = "<bnode>"; - // String uriend = "</uri>"; - String predtmp = ""; - String objtmp = ""; - // System.out.println(getNextResult(xml)); - String nextResult = ""; - while ((nextResult = getNextResult(xml)) != null) { - // System.out.println(xml.indexOf(resEnd)); - // System.out.println(xml); - if (nextResult.indexOf(bnode) != -1) { - xml = xml.substring(xml.indexOf(resEnd) + resEnd.length()); - continue; - } - // get pred - // predtmp = nextResult.substring(nextResult.indexOf(one) + - // one.length()); - predtmp = getinTag(nextResult, one, endbinding); - predtmp = getinTag(predtmp, uri, uri); - // System.out.println(predtmp); - - // getobj - objtmp = getinTag(nextResult, two, endbinding); - objtmp = getinTag(objtmp, uri, uri); - // System.out.println(objtmp); - - StringTuple st = new StringTuple(predtmp, objtmp); - // System.out.println(st); - ret.add(st); - xml = xml.substring(xml.indexOf(resEnd) + resEnd.length()); - - } - /* - * while (xml.indexOf(one) != -1) { - * - * - * - * // System.out.println(new Tupel(predtmp,objtmp)); } - */ - - return ret; - - } - - private String getNextResult(String xml) { - String res1 = "<result>"; - String res2 = "</result>"; - if (xml.indexOf(res1) == -1) - return null; - xml = xml.substring(xml.indexOf(res1) + res1.length()); - xml = xml.substring(0, xml.indexOf(res2)); - // System.out.println(xml); - return xml; - } - - private String getinTag(String xml, String starttag, String endtag) { - String res1 = "<" + starttag + ">"; - // System.out.println(res1); - String res2 = "</" + endtag + ">"; - if (xml.indexOf(res1) == -1) - return null; - xml = xml.substring(xml.indexOf(res1) + res1.length()); - // System.out.println(xml); - xml = xml.substring(0, xml.indexOf(res2)); - // System.out.println(xml); - - return xml; - } - - private String sendAndReceiveSPARQL(String sparql) { - - - return new SparqlQuery(sparql, configuration.getSparqlEndpoint()) - .getAsXMLString(); - } - - public void p(String str) { - if (print_flag) { - System.out.println(str); - } - } - -} Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryInterface.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryInterface.java 2008-01-22 16:47:20 UTC (rev 410) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryInterface.java 2008-01-23 02:23:59 UTC (rev 411) @@ -1,3 +1,22 @@ +/** + * Copyright (C) 2007, Sebastian Hellmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ package org.dllearner.kb.sparql; import java.net.URI; @@ -7,12 +26,14 @@ /** * - * Type SPARQL query interface. + * Typed SPARQL query interface. The typing means that they all have the same + * input and the same output: They are fn: resource -> ( a | b ) where a + * normally is a predicate and b an object * * @author Sebastian Hellmann - * + * */ public interface TypedSparqlQueryInterface { - public Set<StringTuple> query(URI u); + public Set<StringTuple> getTupelForResource(URI u); } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/Configuration.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/Configuration.java 2008-01-22 16:47:20 UTC (rev 410) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/Configuration.java 2008-01-23 02:23:59 UTC (rev 411) @@ -22,44 +22,42 @@ import org.dllearner.kb.sparql.Manipulator; /** - * Stores all configuration settings. + * Stores all configuration settings. this class collects all configuration + * information see the other classes, which are used as attributes here * * @author Sebastian Hellmann */ public class Configuration { - - /* - * this class colects all configuration information - * see the other classes, which are used as attributes here - * */ - + private SparqlEndpoint endpoint; private SparqlQueryType sparqlQueryType; private Manipulator manipulator; - // the following needs to be moved to + // the following needs to be moved to // class extraction algorithm or manipulator private int recursiondepth = 2; private boolean getAllSuperClasses = true; private boolean closeAfterRecursion = true; - public int numberOfUncachedSparqlQueries=0; - public int numberOfCachedSparqlQueries=0; + public int numberOfUncachedSparqlQueries = 0; + public int numberOfCachedSparqlQueries = 0; public Configuration(SparqlEndpoint specificSparqlEndpoint, - SparqlQueryType sparqlQueryType, Manipulator manipulator, int recursiondepth, - boolean getAllSuperClasses, boolean closeAfterRecursion) { + SparqlQueryType sparqlQueryType, Manipulator manipulator, + int recursiondepth, boolean getAllSuperClasses, + boolean closeAfterRecursion) { this.endpoint = specificSparqlEndpoint; this.sparqlQueryType = sparqlQueryType; this.manipulator = manipulator; this.recursiondepth = recursiondepth; this.getAllSuperClasses = getAllSuperClasses; - this.closeAfterRecursion=closeAfterRecursion; + this.closeAfterRecursion = closeAfterRecursion; } public Configuration changeQueryType(SparqlQueryType sqt) { // TODO must clone here return new Configuration(this.endpoint, sqt, this.manipulator, - this.recursiondepth, this.getAllSuperClasses,this.closeAfterRecursion); + this.recursiondepth, this.getAllSuperClasses, + this.closeAfterRecursion); } @@ -78,6 +76,7 @@ public boolean isGetAllSuperClasses() { return getAllSuperClasses; } + public boolean isCloseAfterRecursion() { return closeAfterRecursion; } @@ -85,11 +84,12 @@ public int getRecursiondepth() { return recursiondepth; } - - public void increaseNumberOfuncachedSparqlQueries(){ + + public void increaseNumberOfuncachedSparqlQueries() { numberOfUncachedSparqlQueries++; } - public void increaseNumberOfCachedSparqlQueries(){ + + public void increaseNumberOfCachedSparqlQueries() { numberOfCachedSparqlQueries++; } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/ClassNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/ClassNode.java 2008-01-22 16:47:20 UTC (rev 410) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/ClassNode.java 2008-01-23 02:23:59 UTC (rev 411) @@ -30,7 +30,7 @@ import org.dllearner.utilities.StringTuple; /** - * Is a node in the graph that is a class. + * Is a node in the graph, that is a class. * * @author Sebastian Hellmann */ @@ -39,14 +39,13 @@ public ClassNode(URI u) { super(u); - // this.type = "class"; } // expands all directly connected nodes @Override public Vector<Node> expand(TypedSparqlQueryInterface tsq, Manipulator m) { - Set<StringTuple> s = tsq.query(this.uri); + Set<StringTuple> s = tsq.getTupelForResource(this.uri); // see manipulator s = m.check(s, this); Vector<Node> Nodes = new Vector<Node>(); @@ -69,6 +68,7 @@ // System.out.println("XXXXX"+t.b); // if o is a blank node expand further + // TODO this needs a lot more work if (t.b.startsWith(m.blankNodeIdentifier)) { tmp.expand(tsq, m); System.out.println(m.blankNodeIdentifier); @@ -90,6 +90,11 @@ public void expandProperties(TypedSparqlQueryInterface tsq, Manipulator m) { } + /* + * (non-Javadoc) + * + * @see org.dllearner.kb.sparql.datastructure.Node#toNTriple() + */ @Override public Set<String> toNTriple() { Set<String> s = new HashSet<String>(); Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/InstanceNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/InstanceNode.java 2008-01-22 16:47:20 UTC (rev 410) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/InstanceNode.java 2008-01-23 02:23:59 UTC (rev 411) @@ -51,7 +51,7 @@ @Override public Vector<Node> expand(TypedSparqlQueryInterface tsq, Manipulator m) { - Set<StringTuple> s = tsq.query(uri); + Set<StringTuple> s = tsq.getTupelForResource(uri); // see Manipulator m.check(s, this); // System.out.println("fffffff"+m); Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/Node.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/Node.java 2008-01-22 16:47:20 UTC (rev 410) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/Node.java 2008-01-23 02:23:59 UTC (rev 411) @@ -27,25 +27,21 @@ import org.dllearner.kb.sparql.TypedSparqlQueryInterface; /** - * Abstract class. + * Abstract class. defines functions to expand the nodes * * @author Sebastian Hellmann * */ -/** - * @author sebastian - * - */ public abstract class Node implements Comparable<Node> { - final String subclass = "http://www.w3.org/2000/01/rdf-schema#subClassOf"; + final String subclass = "http://www.w3.org/2000/01/rdf-schema#subClassOf"; final String rdftype = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type"; final String objectProperty = "http://www.w3.org/2002/07/owl#ObjectProperty"; final String classns = "http://www.w3.org/2002/07/owl#Class"; final String thing = "http://www.w3.org/2002/07/owl#Thing"; URI uri; - //protected String type; + // protected String type; protected boolean expanded = false; public Node(URI u) { @@ -53,29 +49,30 @@ } /** - * Nodes are expanded with a certain context, given by - * the typedSparqlQuery and the manipulator + * Nodes are expanded with a certain context, given by the typedSparqlQuery + * and the manipulator + * * @param typedSparqlQuery * @param manipulator * @return Vector<Node> all Nodes that are new because of expansion */ - public abstract Vector<Node> expand(TypedSparqlQueryInterface typedSparqlQuery, - Manipulator manipulator); + public abstract Vector<Node> expand( + TypedSparqlQueryInterface typedSparqlQuery, Manipulator manipulator); - /** - * used to get type defs for properties like rdf:type SymmetricProperties + * gets type defs for properties like rdf:type SymmetricProperties * * @param typedSparqlQuery * @param manipulator - * @return Vector<Node> + * @return Vector<Node> */ public abstract void expandProperties( TypedSparqlQueryInterface typedSparqlQuery, Manipulator manipulator); /** * output - * @return a set of n-triple + * + * @return a set of n-triple */ public abstract Set<String> toNTriple(); Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/PropertyNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/Prope... [truncated message content] |