From: <jen...@us...> - 2007-12-02 14:19:50
|
Revision: 301 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=301&view=rev Author: jenslehmann Date: 2007-12-02 06:19:48 -0800 (Sun, 02 Dec 2007) Log Message: ----------- some cleanup Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java Removed Paths: ------------- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlHTTPRequest.java Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlHTTPRequest.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlHTTPRequest.java 2007-12-02 13:31:59 UTC (rev 300) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlHTTPRequest.java 2007-12-02 14:19:48 UTC (rev 301) @@ -1,105 +0,0 @@ -package org.dllearner.kb.sparql; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.io.OutputStreamWriter; -import java.net.HttpURLConnection; -import java.net.InetAddress; -import java.net.URLEncoder; -import java.util.Iterator; -import java.util.Set; - - - -public class SparqlHTTPRequest { - - - private SpecificSparqlEndpoint SparqlEndpoint; - - - - public SparqlHTTPRequest(SpecificSparqlEndpoint SparqlEndpoint){ - this.SparqlEndpoint=SparqlEndpoint; - - - } - - - public String sendAndReceiveSPARQL( String sparql){ - String ret= ""; - try{ - //System.out.println(sparql); - - //System.out.println(content); - - ret=this.sendAndReceive(sparql); - //System.out.println(ret); - - //this.sendAndReceiveSPARQL("SELECT * WHERE {?a ?b ?c} LIMIT 10"); - }catch (Exception e) {e.printStackTrace();} - return ret; - - - }//down - - - - - - - private String sendAndReceive(String sparql) throws IOException{ - StringBuilder answer = new StringBuilder(); - - // String an Sparql-Endpoint schicken - HttpURLConnection connection; - - connection = (HttpURLConnection) this.SparqlEndpoint.getURL().openConnection(); - connection.setDoOutput(true); - - connection.addRequestProperty("Host", this.SparqlEndpoint.getHost()); - connection.addRequestProperty("Connection","close"); - connection.addRequestProperty("Accept","text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"); - connection.addRequestProperty("Accept-Language","de-de,de;q=0.8,en-us;q=0.5,en;q=0.3"); - connection.addRequestProperty("Accept-Charset","utf-8;q=1.0"); - connection.addRequestProperty("User-Agent","Mozilla/5.0 (Windows; U; Windows NT 5.1; de; rv:1.8.1.4) Gecko/20070515 Firefox/2.0.0.4 Web-Sniffer/1.0.24"); - - OutputStream os = connection.getOutputStream(); - OutputStreamWriter osw = new OutputStreamWriter(os); - - Set<String> s =SparqlEndpoint.getParameters().keySet(); - Iterator<String> it=s.iterator(); - String FullURI=""; - while (it.hasNext()) { - String element = (String) it.next(); - FullURI+=""+URLEncoder.encode(element, "UTF-8")+"="+ - URLEncoder.encode(SparqlEndpoint.getParameters().get(element), "UTF-8")+"&"; - } - //System.out.println(FullURI); - FullURI+=""+SparqlEndpoint.getHasQueryParameter()+"="+URLEncoder.encode(sparql, "UTF-8"); - - - osw.write(FullURI); - osw.close(); - - // receive answer - InputStream is = connection.getInputStream(); - InputStreamReader isr = new InputStreamReader(is,"UTF-8"); - BufferedReader br = new BufferedReader(isr); - - String line; - do { - line = br.readLine(); - if(line!=null) - answer.append(line); - } while (line != null); - - br.close(); - - return answer.toString(); - } - -} - Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2007-12-02 13:31:59 UTC (rev 300) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2007-12-02 14:19:48 UTC (rev 301) @@ -1,18 +1,27 @@ package org.dllearner.kb.sparql; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.net.HttpURLConnection; import java.net.URI; +import java.net.URLEncoder; import java.util.HashSet; +import java.util.Iterator; import java.util.Set; public class TypedSparqlQuery { private Configuration Configuration; - private SparqlHTTPRequest SparqlHTTPRequest; + // private SparqlHTTPRequest SparqlHTTPRequest; private SparqlQueryMaker SparqlQueryMaker; Cache Cache; public TypedSparqlQuery(Configuration Configuration) { this.Configuration = Configuration; - this.SparqlHTTPRequest = new SparqlHTTPRequest(Configuration.getSparqlEndpoint()); + // this.SparqlHTTPRequest = new SparqlHTTPRequest(Configuration.getSparqlEndpoint()); this.SparqlQueryMaker = new SparqlQueryMaker(Configuration.getSparqlQueryType()); this.Cache = new Cache("cache"); } @@ -25,10 +34,15 @@ // check cache String FromCache = this.Cache.get(u.toString(), sparql); - String xml; + String xml = null; // if not in cache get it from EndPoint if (FromCache == null) { - xml = this.SparqlHTTPRequest.sendAndReceiveSPARQL(sparql); + try { + xml = sendAndReceiveSPARQL(sparql); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } //System.out.println(sparql); //System.out.println(xml); this.Cache.put(u.toString(), xml, sparql); @@ -77,4 +91,56 @@ } + private String sendAndReceiveSPARQL(String sparql) throws IOException{ + StringBuilder answer = new StringBuilder(); + + // String an Sparql-Endpoint schicken + HttpURLConnection connection; + SpecificSparqlEndpoint se = Configuration.getSparqlEndpoint(); + + connection = (HttpURLConnection) se.getURL().openConnection(); + connection.setDoOutput(true); + + connection.addRequestProperty("Host", se.getHost()); + connection.addRequestProperty("Connection","close"); + connection.addRequestProperty("Accept","text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"); + connection.addRequestProperty("Accept-Language","de-de,de;q=0.8,en-us;q=0.5,en;q=0.3"); + connection.addRequestProperty("Accept-Charset","utf-8;q=1.0"); + connection.addRequestProperty("User-Agent","Mozilla/5.0 (Windows; U; Windows NT 5.1; de; rv:1.8.1.4) Gecko/20070515 Firefox/2.0.0.4 Web-Sniffer/1.0.24"); + + OutputStream os = connection.getOutputStream(); + OutputStreamWriter osw = new OutputStreamWriter(os); + + Set<String> s =se.getParameters().keySet(); + Iterator<String> it=s.iterator(); + String FullURI=""; + while (it.hasNext()) { + String element = (String) it.next(); + FullURI+=""+URLEncoder.encode(element, "UTF-8")+"="+ + URLEncoder.encode(se.getParameters().get(element), "UTF-8")+"&"; + } + //System.out.println(FullURI); + FullURI+=""+se.getHasQueryParameter()+"="+URLEncoder.encode(sparql, "UTF-8"); + + + osw.write(FullURI); + osw.close(); + + // receive answer + InputStream is = connection.getInputStream(); + InputStreamReader isr = new InputStreamReader(is,"UTF-8"); + BufferedReader br = new BufferedReader(isr); + + String line; + do { + line = br.readLine(); + if(line!=null) + answer.append(line); + } while (line != null); + + br.close(); + + return answer.toString(); + } + } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2007-12-02 14:34:18
|
Revision: 302 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=302&view=rev Author: jenslehmann Date: 2007-12-02 06:34:12 -0800 (Sun, 02 Dec 2007) Log Message: ----------- more code cleanup Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java trunk/src/dl-learner/org/dllearner/kb/sparql/Node.java trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedConfigurations.java trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java trunk/src/dl-learner/org/dllearner/kb/sparql/SpecificSparqlEndpoint.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java 2007-12-02 14:19:48 UTC (rev 301) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java 2007-12-02 14:34:12 UTC (rev 302) @@ -16,7 +16,7 @@ @Override public Vector<Node> expand(TypedSparqlQuery tsq, Manipulator m) { - Set<Tupel> s = tsq.query(this.URI); + Set<Tupel> s = tsq.query(this.uri); s = m.check(s, this); Vector<Node> Nodes = new Vector<Node>(); // Manipulation @@ -47,11 +47,11 @@ @Override public Set<String> toNTriple() { Set<String> s = new HashSet<String>(); - s.add("<" + this.URI + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + s.add("<" + this.uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + "http://www.w3.org/2002/07/owl#Class" + ">."); for (PropertyNode one : properties) { - s.add("<" + this.URI + "><" + one.getURI() + "><" + one.getB().getURI() + ">."); + s.add("<" + this.uri + "><" + one.getURI() + "><" + one.getB().getURI() + ">."); s.addAll(one.getB().toNTriple()); } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java 2007-12-02 14:19:48 UTC (rev 301) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java 2007-12-02 14:34:12 UTC (rev 302) @@ -5,15 +5,15 @@ public class ExtractionAlgorithm { - private Configuration Configuration; - private Manipulator Manipulator; - private int recursiondepth = 2; + private Configuration configuration; + private Manipulator manipulator; + private int recursionDepth = 2; private boolean getAllBackground = true; public ExtractionAlgorithm(Configuration Configuration) { - this.Configuration = Configuration; - this.Manipulator = Configuration.getManipulator(); - this.recursiondepth=Configuration.getRecursiondepth(); + this.configuration = Configuration; + this.manipulator = Configuration.getManipulator(); + this.recursionDepth=Configuration.getRecursiondepth(); this.getAllBackground=Configuration.isGetAllBackground(); } @@ -37,14 +37,14 @@ System.out.println("StartVector: " + v); // n.expand(tsp, this.Manipulator); // Vector<Node> second= - for (int x = 1; x <= this.recursiondepth; x++) { + for (int x = 1; x <= this.recursionDepth; x++) { Vector<Node> tmp = new Vector<Node>(); while (v.size() > 0) { Node tmpNode = v.remove(0); System.out.println("Expanding " + tmpNode); //System.out.println(this.Manipulator); - Vector<Node> tmpVec = tmpNode.expand(tsp, this.Manipulator); + Vector<Node> tmpVec = tmpNode.expand(tsp, this.manipulator); tmp.addAll(tmpVec); } @@ -62,7 +62,7 @@ System.out.println(classes.size()); Node next=classes.remove(0); System.out.println(next); - classes.addAll(next.expand(tsp, this.Manipulator)); + classes.addAll(next.expand(tsp, this.manipulator)); } } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java 2007-12-02 14:19:48 UTC (rev 301) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java 2007-12-02 14:34:12 UTC (rev 302) @@ -21,7 +21,7 @@ @Override public Vector<Node> expand(TypedSparqlQuery tsq, Manipulator m) { - Set<Tupel> s = tsq.query(this.URI); + Set<Tupel> s = tsq.query(this.uri); // Manipulation m.check(s, this); //System.out.println("fffffff"+m); @@ -60,15 +60,15 @@ @Override public Set<String> toNTriple() { Set<String> s = new HashSet<String>(); - s.add("<" + this.URI + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + s.add("<" + this.uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + "http://www.w3.org/2002/07/owl#Thing" + ">."); for (ClassNode one : classes) { - s.add("<" + this.URI + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + s.add("<" + this.uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + one.getURI() + ">."); s.addAll(one.toNTriple()); } for (PropertyNode one : properties) { - s.add("<" + this.URI + "><" + one.getURI() + "><" + one.getB().getURI() + ">."); + s.add("<" + this.uri + "><" + one.getURI() + "><" + one.getB().getURI() + ">."); s.addAll(one.toNTriple()); s.addAll(one.getB().toNTriple()); } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java 2007-12-02 14:19:48 UTC (rev 301) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java 2007-12-02 14:34:12 UTC (rev 302) @@ -6,9 +6,9 @@ public class Manager { - private Configuration Configuration; - private TypedSparqlQuery TypedSparqlQuery; - private ExtractionAlgorithm ExtractionAlgorithm; + private Configuration configuration; + private TypedSparqlQuery typedSparqlQuery; + private ExtractionAlgorithm extractionAlgorithm; /*public void usePredefinedConfiguration(URI uri) { @@ -19,23 +19,23 @@ public void usePredefinedConfiguration(int i) { - this.Configuration = PredefinedConfigurations.get(i); - this.TypedSparqlQuery = new TypedSparqlQuery(Configuration); - this.ExtractionAlgorithm = new ExtractionAlgorithm(Configuration); + this.configuration = PredefinedConfigurations.get(i); + this.typedSparqlQuery = new TypedSparqlQuery(configuration); + this.extractionAlgorithm = new ExtractionAlgorithm(configuration); } public void useConfiguration(SparqlQueryType SparqlQueryType, SpecificSparqlEndpoint SparqlEndpoint, int recursiondepth,boolean getAllBackground) { - this.Configuration = new Configuration(SparqlEndpoint, SparqlQueryType,recursiondepth,getAllBackground); - this.TypedSparqlQuery = new TypedSparqlQuery(Configuration); - this.ExtractionAlgorithm = new ExtractionAlgorithm(Configuration); + this.configuration = new Configuration(SparqlEndpoint, SparqlQueryType,recursiondepth,getAllBackground); + this.typedSparqlQuery = new TypedSparqlQuery(configuration); + this.extractionAlgorithm = new ExtractionAlgorithm(configuration); } public String extract(URI uri) { // this.TypedSparqlQuery.query(uri); // System.out.println(ExtractionAlgorithm.getFirstNode(uri)); System.out.println("Start extracting"); - Node n = this.ExtractionAlgorithm.expandNode(uri, this.TypedSparqlQuery); + Node n = this.extractionAlgorithm.expandNode(uri, this.typedSparqlQuery); Set<String> s = n.toNTriple(); String nt = ""; for (String str : s) { @@ -52,7 +52,7 @@ for (String one : instances) { try { - Node n = this.ExtractionAlgorithm.expandNode(new URI(one), this.TypedSparqlQuery); + Node n = this.extractionAlgorithm.expandNode(new URI(one), this.typedSparqlQuery); ret.addAll(n.toNTriple()); } catch (Exception e) { e.printStackTrace(); Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Node.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Node.java 2007-12-02 14:19:48 UTC (rev 301) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Node.java 2007-12-02 14:34:12 UTC (rev 302) @@ -5,7 +5,7 @@ import java.util.Vector; public abstract class Node { - URI URI; + URI uri; protected String type; protected boolean expanded = false; @@ -14,7 +14,7 @@ // Hashtable<String,Node> datatype=new Hashtable<String,Node>();; public Node(URI u) { - this.URI = u; + this.uri = u; } @@ -31,7 +31,7 @@ @Override public String toString() { - return "Node: " + URI + ":" + type; + return "Node: " + uri + ":" + type; } @@ -48,7 +48,7 @@ } public URI getURI() { - return URI; + return uri; } } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedConfigurations.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedConfigurations.java 2007-12-02 14:19:48 UTC (rev 301) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedConfigurations.java 2007-12-02 14:34:12 UTC (rev 302) @@ -7,53 +7,51 @@ public class PredefinedConfigurations { - public static Configuration get(int i){ - - switch (i){ - case 0: return dbpediaYago(); - + public static Configuration get(int i) { + + switch (i) { + case 0: + return dbpediaYago(); + + } + return null; } - return null; - } - - public static Configuration dbpediaYago(){ - URL u=null; - HashMap<String, String>m=new HashMap<String, String>(); - m.put("default-graph-uri","http://dbpedia.org"); - m.put("format","application/sparql-results.xml"); - try{ - u=new URL("http://dbpedia.openlinksw.com:8890/sparql"); - }catch (Exception e) {e.printStackTrace();} - SpecificSparqlEndpoint sse=new SpecificSparqlEndpoint( - u,"dbpedia.openlinksw.com",m); - //System.out.println(u); - Set<String>pred=new HashSet<String>(); - pred.add("http://www.w3.org/2004/02/skos/core"); - pred.add("http://www.w3.org/2002/07/owl#sameAs"); - pred.add("http://xmlns.com/foaf/0.1/"); - pred.add("http://dbpedia.org/property/reference"); - pred.add("http://dbpedia.org/property/website"); - pred.add("http://dbpedia.org/property/wikipage"); - - - Set<String>obj=new HashSet<String>(); - obj.add("http://dbpedia.org/resource/Category:Wikipedia_"); - obj.add("http://dbpedia.org/resource/Category:Articles_"); - obj.add("http://xmlns.com/foaf/0.1/"); - obj.add("http://upload.wikimedia.org/wikipedia/commons"); - obj.add("http://upload.wikimedia.org/wikipedia"); - obj.add("http://www.geonames.org"); - obj.add("http://www.w3.org/2006/03/wn/wn20/instances/synset"); - obj.add("http://www4.wiwiss.fu-berlin.de/flickrwrappr"); - obj.add("http://www.w3.org/2004/02/skos/core"); - - SparqlQueryType sqt=new SparqlQueryType("forbid",obj,pred,"false"); - - - - return new Configuration(sse,sqt,2,true); - - } - - + + public static Configuration dbpediaYago() { + URL u = null; + HashMap<String, String> m = new HashMap<String, String>(); + m.put("default-graph-uri", "http://dbpedia.org"); + m.put("format", "application/sparql-results.xml"); + try { + u = new URL("http://dbpedia.openlinksw.com:8890/sparql"); + } catch (Exception e) { + e.printStackTrace(); + } + SpecificSparqlEndpoint sse = new SpecificSparqlEndpoint(u, "dbpedia.openlinksw.com", m); + // System.out.println(u); + Set<String> pred = new HashSet<String>(); + pred.add("http://www.w3.org/2004/02/skos/core"); + pred.add("http://www.w3.org/2002/07/owl#sameAs"); + pred.add("http://xmlns.com/foaf/0.1/"); + pred.add("http://dbpedia.org/property/reference"); + pred.add("http://dbpedia.org/property/website"); + pred.add("http://dbpedia.org/property/wikipage"); + + Set<String> obj = new HashSet<String>(); + obj.add("http://dbpedia.org/resource/Category:Wikipedia_"); + obj.add("http://dbpedia.org/resource/Category:Articles_"); + obj.add("http://xmlns.com/foaf/0.1/"); + obj.add("http://upload.wikimedia.org/wikipedia/commons"); + obj.add("http://upload.wikimedia.org/wikipedia"); + obj.add("http://www.geonames.org"); + obj.add("http://www.w3.org/2006/03/wn/wn20/instances/synset"); + obj.add("http://www4.wiwiss.fu-berlin.de/flickrwrappr"); + obj.add("http://www.w3.org/2004/02/skos/core"); + + SparqlQueryType sqt = new SparqlQueryType("forbid", obj, pred, "false"); + + return new Configuration(sse, sqt, 2, true); + + } + } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java 2007-12-02 14:19:48 UTC (rev 301) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java 2007-12-02 14:34:12 UTC (rev 302) @@ -10,7 +10,7 @@ private Node a; private Node b; - private Set<String> SpecialTypes; + private Set<String> specialTypes; public PropertyNode(URI u) { super(u); @@ -23,12 +23,12 @@ this.type = "property"; this.a = a; this.b = b; - this.SpecialTypes = new HashSet<String>(); + this.specialTypes = new HashSet<String>(); } @Override public Vector<Node> expand(TypedSparqlQuery tsq, Manipulator m) { - Set<Tupel> s = tsq.query(this.URI); + Set<Tupel> s = tsq.query(this.uri); Vector<Node> Nodes = new Vector<Node>(); // Manipulation @@ -37,7 +37,7 @@ Tupel t = (Tupel) it.next(); try { if (t.a.equals(m.type)) { - SpecialTypes.add(t.b); + specialTypes.add(t.b); } } catch (Exception e) { System.out.println(t); @@ -60,10 +60,10 @@ @Override public Set<String> toNTriple() { Set<String> s = new HashSet<String>(); - s.add("<" + this.URI + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + s.add("<" + this.uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + "http://www.w3.org/2002/07/owl#ObjectProperty" + ">."); - for (String one : SpecialTypes) { - s.add("<" + this.URI + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + for (String one : specialTypes) { + s.add("<" + this.uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + one + ">."); } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java 2007-12-02 14:19:48 UTC (rev 301) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java 2007-12-02 14:34:12 UTC (rev 302) @@ -2,22 +2,22 @@ public class SparqlQueryMaker { - private SparqlQueryType SparqlQueryType; + private SparqlQueryType sparqlQueryType; public SparqlQueryMaker(SparqlQueryType SparqlQueryType) { - this.SparqlQueryType = SparqlQueryType; + this.sparqlQueryType = SparqlQueryType; } public String makeQueryUsingFilters(String subject) { String lineend = "\n"; String Filter = ""; - if (!this.SparqlQueryType.isLiterals()) + if (!this.sparqlQueryType.isLiterals()) Filter += "!isLiteral(?object))"; - for (String p : this.SparqlQueryType.getPredicatefilterlist()) { + for (String p : this.sparqlQueryType.getPredicatefilterlist()) { Filter += lineend + filterPredicate(p); } - for (String o : this.SparqlQueryType.getObjectfilterlist()) { + for (String o : this.sparqlQueryType.getObjectfilterlist()) { Filter += lineend + filterObject(o); } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/SpecificSparqlEndpoint.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SpecificSparqlEndpoint.java 2007-12-02 14:19:48 UTC (rev 301) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SpecificSparqlEndpoint.java 2007-12-02 14:34:12 UTC (rev 302) @@ -9,14 +9,14 @@ String host; String hasQueryParameter; - URL URL; + URL url; public HashMap<String, String> parameters = new HashMap<String, String>(); public SpecificSparqlEndpoint(URL url,String host, HashMap<String, String> parameters) { super(); this.host=host; - this.URL = url; + this.url = url; this.hasQueryParameter = "query"; this.parameters = parameters; } @@ -47,7 +47,7 @@ } public URL getURL() { - return this.URL; + return this.url; } } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2007-12-02 14:19:48 UTC (rev 301) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2007-12-02 14:34:12 UTC (rev 302) @@ -14,22 +14,22 @@ import java.util.Set; public class TypedSparqlQuery { - private Configuration Configuration; + private Configuration configuration; // private SparqlHTTPRequest SparqlHTTPRequest; - private SparqlQueryMaker SparqlQueryMaker; + private SparqlQueryMaker sparqlQueryMaker; Cache Cache; public TypedSparqlQuery(Configuration Configuration) { - this.Configuration = Configuration; + this.configuration = Configuration; // this.SparqlHTTPRequest = new SparqlHTTPRequest(Configuration.getSparqlEndpoint()); - this.SparqlQueryMaker = new SparqlQueryMaker(Configuration.getSparqlQueryType()); + this.sparqlQueryMaker = new SparqlQueryMaker(Configuration.getSparqlQueryType()); this.Cache = new Cache("cache"); } public Set<Tupel> query(URI u) { // getQuery - String sparql = SparqlQueryMaker.makeQueryUsingFilters(u.toString()); + String sparql = sparqlQueryMaker.makeQueryUsingFilters(u.toString()); // check cache String FromCache = this.Cache.get(u.toString(), sparql); @@ -96,7 +96,7 @@ // String an Sparql-Endpoint schicken HttpURLConnection connection; - SpecificSparqlEndpoint se = Configuration.getSparqlEndpoint(); + SpecificSparqlEndpoint se = configuration.getSparqlEndpoint(); connection = (HttpURLConnection) se.getURL().openConnection(); connection.setDoOutput(true); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2007-12-02 14:41:40
|
Revision: 303 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=303&view=rev Author: jenslehmann Date: 2007-12-02 06:41:38 -0800 (Sun, 02 Dec 2007) Log Message: ----------- Fixed more code style problems. (Do not use this.object when object is sufficient.) Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java trunk/src/dl-learner/org/dllearner/kb/sparql/Tupel.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java 2007-12-02 14:34:12 UTC (rev 302) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java 2007-12-02 14:41:38 UTC (rev 303) @@ -37,14 +37,14 @@ System.out.println("StartVector: " + v); // n.expand(tsp, this.Manipulator); // Vector<Node> second= - for (int x = 1; x <= this.recursionDepth; x++) { + for (int x = 1; x <= recursionDepth; x++) { Vector<Node> tmp = new Vector<Node>(); while (v.size() > 0) { Node tmpNode = v.remove(0); System.out.println("Expanding " + tmpNode); //System.out.println(this.Manipulator); - Vector<Node> tmpVec = tmpNode.expand(tsp, this.manipulator); + Vector<Node> tmpVec = tmpNode.expand(tsp, manipulator); tmp.addAll(tmpVec); } @@ -62,7 +62,7 @@ System.out.println(classes.size()); Node next=classes.remove(0); System.out.println(next); - classes.addAll(next.expand(tsp, this.manipulator)); + classes.addAll(next.expand(tsp, manipulator)); } } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java 2007-12-02 14:34:12 UTC (rev 302) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java 2007-12-02 14:41:38 UTC (rev 303) @@ -21,7 +21,7 @@ @Override public Vector<Node> expand(TypedSparqlQuery tsq, Manipulator m) { - Set<Tupel> s = tsq.query(this.uri); + Set<Tupel> s = tsq.query(uri); // Manipulation m.check(s, this); //System.out.println("fffffff"+m); @@ -48,7 +48,7 @@ } } - this.expanded = true; + expanded = true; return Nodes; } @@ -60,15 +60,15 @@ @Override public Set<String> toNTriple() { Set<String> s = new HashSet<String>(); - s.add("<" + this.uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + s.add("<" + uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + "http://www.w3.org/2002/07/owl#Thing" + ">."); for (ClassNode one : classes) { - s.add("<" + this.uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + s.add("<" + uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + one.getURI() + ">."); s.addAll(one.toNTriple()); } for (PropertyNode one : properties) { - s.add("<" + this.uri + "><" + one.getURI() + "><" + one.getB().getURI() + ">."); + s.add("<" + uri + "><" + one.getURI() + "><" + one.getB().getURI() + ">."); s.addAll(one.toNTriple()); s.addAll(one.getB().toNTriple()); } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java 2007-12-02 14:34:12 UTC (rev 302) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java 2007-12-02 14:41:38 UTC (rev 303) @@ -35,7 +35,7 @@ // this.TypedSparqlQuery.query(uri); // System.out.println(ExtractionAlgorithm.getFirstNode(uri)); System.out.println("Start extracting"); - Node n = this.extractionAlgorithm.expandNode(uri, this.typedSparqlQuery); + Node n = extractionAlgorithm.expandNode(uri, typedSparqlQuery); Set<String> s = n.toNTriple(); String nt = ""; for (String str : s) { @@ -52,7 +52,7 @@ for (String one : instances) { try { - Node n = this.extractionAlgorithm.expandNode(new URI(one), this.typedSparqlQuery); + Node n = extractionAlgorithm.expandNode(new URI(one), typedSparqlQuery); ret.addAll(n.toNTriple()); } catch (Exception e) { e.printStackTrace(); Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java 2007-12-02 14:34:12 UTC (rev 302) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java 2007-12-02 14:41:38 UTC (rev 303) @@ -34,21 +34,20 @@ while (it.hasNext()) { Tupel t = (Tupel) it.next(); // all classes with owl:type class - if (t.a.equals(this.type) && t.b.equals(this.classns) && node.isClass()) { + if (t.a.equals(type) && t.b.equals(classns) && node.isClass()) { toRemove.add(t); } - ; + // all with type class - if (t.b.equals(this.classns) && node.isClass()) { + if (t.b.equals(classns) && node.isClass()) { toRemove.add(t); } - ; + // all instances with owl:type thing - if (t.a.equals(this.type) && t.b.equals(this.thing) && node.isInstance()) { + if (t.a.equals(type) && t.b.equals(thing) && node.isInstance()) { toRemove.add(t); } - ; - + } s.removeAll(toRemove); Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java 2007-12-02 14:34:12 UTC (rev 302) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java 2007-12-02 14:41:38 UTC (rev 303) @@ -28,7 +28,7 @@ @Override public Vector<Node> expand(TypedSparqlQuery tsq, Manipulator m) { - Set<Tupel> s = tsq.query(this.uri); + Set<Tupel> s = tsq.query(uri); Vector<Node> Nodes = new Vector<Node>(); // Manipulation @@ -54,16 +54,16 @@ } public Node getB() { - return this.b; + return b; } @Override public Set<String> toNTriple() { Set<String> s = new HashSet<String>(); - s.add("<" + this.uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + s.add("<" + uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + "http://www.w3.org/2002/07/owl#ObjectProperty" + ">."); for (String one : specialTypes) { - s.add("<" + this.uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + s.add("<" + uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + one + ">."); } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java 2007-12-02 14:34:12 UTC (rev 302) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java 2007-12-02 14:41:38 UTC (rev 303) @@ -14,10 +14,10 @@ String Filter = ""; if (!this.sparqlQueryType.isLiterals()) Filter += "!isLiteral(?object))"; - for (String p : this.sparqlQueryType.getPredicatefilterlist()) { + for (String p : sparqlQueryType.getPredicatefilterlist()) { Filter += lineend + filterPredicate(p); } - for (String o : this.sparqlQueryType.getObjectfilterlist()) { + for (String o : sparqlQueryType.getObjectfilterlist()) { Filter += lineend + filterObject(o); } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Tupel.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Tupel.java 2007-12-02 14:34:12 UTC (rev 302) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Tupel.java 2007-12-02 14:41:38 UTC (rev 303) @@ -16,7 +16,7 @@ } public boolean equals(Tupel t) { - if (this.a.equals(t.a) && this.b.equals(t.b)) + if (a.equals(t.a) && b.equals(t.b)) return true; else return false; Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2007-12-02 14:34:12 UTC (rev 302) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2007-12-02 14:41:38 UTC (rev 303) @@ -17,13 +17,13 @@ private Configuration configuration; // private SparqlHTTPRequest SparqlHTTPRequest; private SparqlQueryMaker sparqlQueryMaker; - Cache Cache; + Cache cache; public TypedSparqlQuery(Configuration Configuration) { this.configuration = Configuration; // this.SparqlHTTPRequest = new SparqlHTTPRequest(Configuration.getSparqlEndpoint()); this.sparqlQueryMaker = new SparqlQueryMaker(Configuration.getSparqlQueryType()); - this.Cache = new Cache("cache"); + this.cache = new Cache("cache"); } public Set<Tupel> query(URI u) { @@ -32,7 +32,7 @@ String sparql = sparqlQueryMaker.makeQueryUsingFilters(u.toString()); // check cache - String FromCache = this.Cache.get(u.toString(), sparql); + String FromCache = cache.get(u.toString(), sparql); String xml = null; // if not in cache get it from EndPoint @@ -45,7 +45,7 @@ } //System.out.println(sparql); //System.out.println(xml); - this.Cache.put(u.toString(), xml, sparql); + cache.put(u.toString(), xml, sparql); System.out.print("\n"); } else { xml = FromCache; @@ -54,7 +54,7 @@ // System.out.println(xml); // process XML - Set<Tupel> s = this.processResult(xml); + Set<Tupel> s = processResult(xml); try { System.out.println("retrieved " + s.size() + " tupels"); } catch (Exception e) { @@ -115,7 +115,7 @@ Iterator<String> it=s.iterator(); String FullURI=""; while (it.hasNext()) { - String element = (String) it.next(); + String element = it.next(); FullURI+=""+URLEncoder.encode(element, "UTF-8")+"="+ URLEncoder.encode(se.getParameters().get(element), "UTF-8")+"&"; } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2007-12-02 14:45:05
|
Revision: 304 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=304&view=rev Author: jenslehmann Date: 2007-12-02 06:44:58 -0800 (Sun, 02 Dec 2007) Log Message: ----------- removed isXXX methods (can be checked using instanceof) Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java trunk/src/dl-learner/org/dllearner/kb/sparql/Node.java trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java 2007-12-02 14:41:38 UTC (rev 303) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java 2007-12-02 14:44:58 UTC (rev 304) @@ -40,11 +40,6 @@ } @Override - public boolean isClass() { - return true; - } - - @Override public Set<String> toNTriple() { Set<String> s = new HashSet<String>(); s.add("<" + this.uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java 2007-12-02 14:41:38 UTC (rev 303) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java 2007-12-02 14:44:58 UTC (rev 304) @@ -54,7 +54,7 @@ if (this.getAllBackground) { Vector<Node> classes = new Vector<Node>(); for (Node one : v) { - if (one.isClass()) { + if (one instanceof ClassNode) { classes.add(one); } } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java 2007-12-02 14:41:38 UTC (rev 303) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java 2007-12-02 14:44:58 UTC (rev 304) @@ -53,11 +53,6 @@ } @Override - public boolean isInstance() { - return true; - } - - @Override public Set<String> toNTriple() { Set<String> s = new HashSet<String>(); s.add("<" + uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java 2007-12-02 14:41:38 UTC (rev 303) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java 2007-12-02 14:44:58 UTC (rev 304) @@ -34,17 +34,17 @@ while (it.hasNext()) { Tupel t = (Tupel) it.next(); // all classes with owl:type class - if (t.a.equals(type) && t.b.equals(classns) && node.isClass()) { + if (t.a.equals(type) && t.b.equals(classns) && node instanceof ClassNode) { toRemove.add(t); } // all with type class - if (t.b.equals(classns) && node.isClass()) { + if (t.b.equals(classns) && node instanceof ClassNode) { toRemove.add(t); } // all instances with owl:type thing - if (t.a.equals(type) && t.b.equals(thing) && node.isInstance()) { + if (t.a.equals(type) && t.b.equals(thing) && node instanceof InstanceNode) { toRemove.add(t); } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Node.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Node.java 2007-12-02 14:41:38 UTC (rev 303) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Node.java 2007-12-02 14:44:58 UTC (rev 304) @@ -35,18 +35,6 @@ } - public boolean isClass() { - return false; - } - - public boolean isInstance() { - return false; - } - - public boolean isProperty() { - return false; - } - public URI getURI() { return uri; } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java 2007-12-02 14:41:38 UTC (rev 303) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java 2007-12-02 14:44:58 UTC (rev 304) @@ -48,11 +48,6 @@ return Nodes; } - @Override - public boolean isProperty() { - return true; - } - public Node getB() { return b; } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ku...@us...> - 2007-12-05 12:24:18
|
Revision: 324 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=324&view=rev Author: kurzum Date: 2007-12-05 04:24:13 -0800 (Wed, 05 Dec 2007) Log Message: ----------- added a break for skos after 500 classes, because they are cyclic Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedEndpoint.java trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedFilter.java Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java 2007-12-04 18:02:06 UTC (rev 323) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java 2007-12-05 12:24:13 UTC (rev 324) @@ -94,6 +94,7 @@ Node next = classes.remove(0); System.out.println("Expanding: " + next); classes.addAll(next.expand(tsp, manipulator)); + if (classes.size()>=500){break;} } } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedEndpoint.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedEndpoint.java 2007-12-04 18:02:06 UTC (rev 323) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedEndpoint.java 2007-12-05 12:24:13 UTC (rev 324) @@ -31,6 +31,10 @@ return dbpediaEndpoint(); case 2: return localJoseki(); + case 3: + return worldFactBook(); + case 4: + return govTrack(); } return null; } @@ -60,4 +64,29 @@ } return new SpecificSparqlEndpoint(u, "localost", m); } + public static SpecificSparqlEndpoint worldFactBook() { + URL u = null; + HashMap<String, String> m = new HashMap<String, String>(); + // m.put("default-graph-uri", "http://dbpedia.org"); + // m.put("format", "application/sparql-results.xml"); + try { + u = new URL("http://www4.wiwiss.fu-berlin.de/factbook/sparql"); + } catch (Exception e) { + e.printStackTrace(); + } + return new SpecificSparqlEndpoint(u, "www4.wiwiss.fu-berlin.de", m); + } + public static SpecificSparqlEndpoint govTrack() { + URL u = null; + HashMap<String, String> m = new HashMap<String, String>(); + // m.put("default-graph-uri", "http://dbpedia.org"); + // m.put("format", "application/sparql-results.xml"); + try { + u = new URL("http://www.rdfabout.com/sparql"); + } catch (Exception e) { + e.printStackTrace(); + } + return new SpecificSparqlEndpoint(u, "www.rdfabout.com", m); + } + } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedFilter.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedFilter.java 2007-12-04 18:02:06 UTC (rev 323) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedFilter.java 2007-12-05 12:24:13 UTC (rev 324) @@ -31,7 +31,7 @@ case 1: return YagoFilter(); case 2: - return CategoriesOnly(); + return SKOS(); } return null; } @@ -64,15 +64,16 @@ return new SparqlQueryType("forbid", obj, pred, "false"); } - public static SparqlQueryType CategoriesOnly(){ + public static SparqlQueryType SKOS(){ Set<String> pred = new HashSet<String>(); - pred.add("http://www.w3.org/2004/02/skos/core"); + //pred.add("http://www.w3.org/2004/02/skos/core"); pred.add("http://www.w3.org/2002/07/owl#sameAs"); pred.add("http://xmlns.com/foaf/0.1/"); pred.add("http://dbpedia.org/property/reference"); pred.add("http://dbpedia.org/property/website"); pred.add("http://dbpedia.org/property/wikipage"); + pred.add("http://www.w3.org/2004/02/skos/core#narrower"); pred.add("http://dbpedia.org/property/wikiPageUsesTemplate"); Set<String> obj = new HashSet<String>(); @@ -85,8 +86,8 @@ obj.add("http://www.geonames.org"); obj.add("http://www.w3.org/2006/03/wn/wn20/instances/synset"); obj.add("http://www4.wiwiss.fu-berlin.de/flickrwrappr"); - obj.add("http://www.w3.org/2004/02/skos/core"); + obj.add("http://dbpedia.org/class/yago"); obj.add("http://dbpedia.org/resource/Template"); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ku...@us...> - 2008-01-10 09:08:07
|
Revision: 360 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=360&view=rev Author: kurzum Date: 2008-01-10 01:08:04 -0800 (Thu, 10 Jan 2008) Log Message: ----------- latest changes forgot to commit Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/Configuration.java trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java 2008-01-10 09:07:30 UTC (rev 359) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java 2008-01-10 09:08:04 UTC (rev 360) @@ -104,7 +104,7 @@ return s; } - @Override + public int compareTo(Node n){ return super.compareTo(n); // Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Configuration.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Configuration.java 2008-01-10 09:07:30 UTC (rev 359) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Configuration.java 2008-01-10 09:08:04 UTC (rev 360) @@ -37,23 +37,27 @@ // the following needs to be moved to // class extraction algorithm or manipulator private int recursiondepth = 2; - private boolean getAllBackground = true; + private boolean getAllSuperClasses = true; + private boolean closeAfterRecursion = true; + public int numberOfUncachedSparqlQueries=0; + public int numberOfCachedSparqlQueries=0; public Configuration(SpecificSparqlEndpoint specificSparqlEndpoint, SparqlQueryType sparqlQueryType, Manipulator manipulator, int recursiondepth, - boolean getAllBackground) { + boolean getAllSuperClasses, boolean closeAfterRecursion) { this.specificSparqlEndpoint = specificSparqlEndpoint; this.sparqlQueryType = sparqlQueryType; this.manipulator = manipulator; this.recursiondepth = recursiondepth; - this.getAllBackground = getAllBackground; + this.getAllSuperClasses = getAllSuperClasses; + this.closeAfterRecursion=closeAfterRecursion; } public Configuration changeQueryType(SparqlQueryType sqt) { // TODO must clone here return new Configuration(this.specificSparqlEndpoint, sqt, this.manipulator, - this.recursiondepth, this.getAllBackground); + this.recursiondepth, this.getAllSuperClasses,this.closeAfterRecursion); } @@ -69,12 +73,22 @@ return sparqlQueryType; } - public boolean isGetAllBackground() { - return getAllBackground; + public boolean isGetAllSuperClasses() { + return getAllSuperClasses; } + public boolean isCloseAfterRecursion() { + return closeAfterRecursion; + } public int getRecursiondepth() { return recursiondepth; } + + public void increaseNumberOfuncachedSparqlQueries(){ + numberOfUncachedSparqlQueries++; + } + public void increaseNumberOfCachedSparqlQueries(){ + numberOfCachedSparqlQueries++; + } } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java 2008-01-10 09:07:30 UTC (rev 359) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java 2008-01-10 09:08:04 UTC (rev 360) @@ -32,17 +32,17 @@ private Configuration configuration; private Manipulator manipulator; - private int recursionDepth = 2; - private boolean getAllBackground = true; - private boolean closeAfterRecursion = true; + private int recursionDepth = 1; + //private boolean getAllSuperClasses = true; + //private boolean closeAfterRecursion = true; private boolean print_flag=false; public ExtractionAlgorithm(Configuration Configuration) { this.configuration = Configuration; this.manipulator = Configuration.getManipulator(); this.recursionDepth = Configuration.getRecursiondepth(); - this.getAllBackground = Configuration.isGetAllBackground(); - + //this.getAllSuperClasses = Configuration.isGetAllSuperClasses(); + //this.closeAfterRecursion=Configuration.isCloseAfterRecursion(); } public Node getFirstNode(URI u) { @@ -87,13 +87,13 @@ v = tmp; System.out.println("Recursion counter: " + x + " with " + v.size() + " Nodes remaining, needed: " - +(System.currentTimeMillis()-time)); + +(System.currentTimeMillis()-time)+"ms"); time=System.currentTimeMillis(); } HashSet<String> hadAlready=new HashSet<String>(); // gets All Class Nodes and expands them further - if (this.getAllBackground) { + if (this.configuration.isGetAllSuperClasses()) { //Set<Node> classes = new TreeSet<Node>(); Vector<Node> classes = new Vector<Node>(); @@ -107,9 +107,9 @@ } } - System.out.println(instances.size()); + //System.out.println(instances.size()); TypedSparqlQueryClasses tsqc=new TypedSparqlQueryClasses(configuration); - if(closeAfterRecursion){ + if(this.configuration.isCloseAfterRecursion()){ while (instances.size() > 0) { p("Getting classes for remaining instances: " + instances.size()); Node next = instances.remove(0); @@ -134,7 +134,7 @@ tmp=next.expand(tsp, manipulator); classes.addAll(tmp); tmp=new Vector<Node>(); - if(i % 50==0)System.out.println("got "+i+" extra classes, max: "+manipulator.breakSuperClassRetrievalAfter); + //if(i % 50==0)System.out.println("got "+i+" extra classes, max: "+manipulator.breakSuperClassRetrievalAfter); i++; if (i>=manipulator.breakSuperClassRetrievalAfter){break;} } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java 2008-01-10 09:07:30 UTC (rev 359) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java 2008-01-10 09:08:04 UTC (rev 360) @@ -41,10 +41,10 @@ public void useConfiguration(SparqlQueryType SparqlQueryType, SpecificSparqlEndpoint SparqlEndpoint, Manipulator manipulator, int recursiondepth, - boolean getAllBackground) { + boolean getAllSuperClasses,boolean closeAfterRecursion) { this.configuration = new Configuration(SparqlEndpoint, SparqlQueryType, manipulator, - recursiondepth, getAllBackground); + recursiondepth, getAllSuperClasses, closeAfterRecursion); this.typedSparqlQuery = new TypedSparqlQuery(configuration); this.extractionAlgorithm = new ExtractionAlgorithm(configuration); @@ -117,6 +117,7 @@ nt.append((String) arr[i]+"\n"); if(i%1000==0)System.out.println(i+" of "+arr.length+" triples done"); } + System.out.println(arr.length+" of "+arr.length+" triples done"); /* String tmp=""; while ( ret.size() > 0) { @@ -136,5 +137,9 @@ this.configuration.getSparqlQueryType().addPredicateFilter(str); } + + public Configuration getConfiguration(){ + return configuration; + } } \ No newline at end of file Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2008-01-10 09:07:30 UTC (rev 359) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2008-01-10 09:08:04 UTC (rev 360) @@ -97,6 +97,7 @@ String xml = null; // if not in cache get it from EndPoint if (FromCache == null) { + configuration.increaseNumberOfuncachedSparqlQueries(); try { xml = sendAndReceiveSPARQL(sparql); } catch (IOException e) { @@ -110,6 +111,7 @@ } //System.out.print("\n"); } else { + configuration.increaseNumberOfCachedSparqlQueries(); xml = FromCache; //System.out.println("FROM CACHE"); } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java 2008-01-10 09:07:30 UTC (rev 359) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java 2008-01-10 09:08:04 UTC (rev 360) @@ -81,6 +81,7 @@ String xml = null; // if not in cache get it from EndPoint if (FromCache == null) { + configuration.increaseNumberOfuncachedSparqlQueries(); try { xml = sendAndReceiveSPARQL(sparql); } catch (IOException e) { @@ -94,6 +95,7 @@ } //System.out.print("\n"); } else { + configuration.increaseNumberOfCachedSparqlQueries(); xml = FromCache; //System.out.println("FROM CACHE"); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ku...@us...> - 2008-01-18 08:14:42
|
Revision: 384 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=384&view=rev Author: kurzum Date: 2008-01-18 00:14:37 -0800 (Fri, 18 Jan 2008) Log Message: ----------- changed name Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/Cache.java Added Paths: ----------- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java Removed Paths: ------------- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlEndpoint.java Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Cache.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Cache.java 2008-01-18 07:49:29 UTC (rev 383) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Cache.java 2008-01-18 08:14:37 UTC (rev 384) @@ -35,10 +35,17 @@ * one additional function and would just be a data class * * it writes the files according to one resource in the basedir and saves the - * cache object in it. + * cache object in it. Filename is the subject, a resource + * e.g. http://dbpedia.org/resource/Angela_Merkel which is first urlencoded + * and so serves as the hash for the filename. * + * the cache object in the file remembers: a timestamp, + * a hashmap SparqlQuery -> SparqlXMLResult + * Cache validates if timestamp too old and Sparql-Query the same + * before returning the SPARQL xml-result + * * @author Sebastian Hellmann - * + * @author Sebastian Knappe */ public class Cache implements Serializable { @@ -63,9 +70,9 @@ // /** * constructor for the cache itself - * + * Called once at the startup * @param path - * where the cache files will be + * Where the base path to the cache is */ public Cache(String path) { this.basedir = path + File.separator; @@ -76,32 +83,33 @@ } } - // constructor for single cache object(one entry) + /** - * @param sparql + * constructor for single cache object(one entry) + * @param sparqlQuery * query * @param content * that is the sparql query result as xml */ - protected Cache(String sparql, String content) { + protected Cache(String sparqlQuery, String content) { // this.content = c; // this.sparqlquery = sparql; this.timestamp = System.currentTimeMillis(); this.hm = new HashMap<String, String>(); - hm.put(sparql, content); + hm.put(sparqlQuery, content); } /** - * gets a chached sparqlquery for a resource(key) and returns the + * gets a chached sparqlQuery for a resource(key) and returns the * sparqlXMLResult or null, if none is found. * * @param key * is the resource, the identifier - * @param sparqlquery + * @param sparqlQuery * is a special sparql query about that resource * @return sparqlXMLResult */ - public String get(String key, String sparqlquery) { + public String get(String key, String sparqlQuery) { // System.out.println("get From "+key); String ret = null; try { @@ -114,7 +122,7 @@ // System.out.println("fresh"); String xml = ""; try { - xml = c.hm.get(sparqlquery); + xml = c.hm.get(sparqlQuery); } catch (Exception e) { return null; } Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlEndpoint.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlEndpoint.java 2008-01-18 07:49:29 UTC (rev 383) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlEndpoint.java 2008-01-18 08:14:37 UTC (rev 384) @@ -1,503 +0,0 @@ -/** - * Copyright (C) 2007, Jens Lehmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql; - -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URI; -import java.net.URL; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.Set; - -import org.dllearner.core.KnowledgeSource; -import org.dllearner.core.OntologyFormat; -import org.dllearner.core.OntologyFormatUnsupportedException; -import org.dllearner.core.config.BooleanConfigOption; -import org.dllearner.core.config.ConfigEntry; -import org.dllearner.core.config.ConfigOption; -import org.dllearner.core.config.IntegerConfigOption; -import org.dllearner.core.config.InvalidConfigOptionValueException; -import org.dllearner.core.config.StringConfigOption; -import org.dllearner.core.config.StringSetConfigOption; -import org.dllearner.core.config.StringTupleListConfigOption; -import org.dllearner.core.dl.KB; -import org.dllearner.parser.KBParser; -import org.dllearner.reasoning.DIGConverter; -import org.dllearner.reasoning.JenaOWLDIGConverter; -import org.dllearner.utilities.StringTuple; - -/** - * Represents a SPARQL Endpoint. - * - * @author Jens Lehmann - * @author Sebastian Knappe - * @author Sebastian Hellmann - */ -public class SparqlEndpoint extends KnowledgeSource { - - // ConfigOptions - private URL url; - String host; - private Set<String> instances=new HashSet<String>();; - private URL dumpFile; - private int recursionDepth = 1; - private int predefinedFilter = 0; - private int predefinedEndpoint = 0; - private Set<String> predList=new HashSet<String>(); - private Set<String> objList=new HashSet<String>(); - // private Set<String> classList; - private String format = "N-TRIPLES"; - private boolean dumpToFile = true; - private boolean useLits = false; - private boolean getAllSuperClasses = true; - private boolean closeAfterRecursion = true; - private int breakSuperClassRetrievalAfter = 200; - - private boolean learnDomain = false; - private boolean learnRange = false; - private int numberOfInstancesUsedForRoleLearning=40; - private String role=""; - private String blankNodeIdentifier = "bnode"; - - LinkedList<StringTuple> URIParameters = new LinkedList<StringTuple>(); - LinkedList<StringTuple> replacePredicate = new LinkedList<StringTuple>(); - LinkedList<StringTuple> replaceObject = new LinkedList<StringTuple>(); - - /** - * Holds the results of the calculateSubjects method - */ - private String[] subjects; - - /** - * Holds the results of the calculateTriples method - */ - private String[] triples; - - /** - * Holds the results of the calculateConceptSubjects method - */ - private String[] conceptSubjects; - - /** - * if a method is running this becomes true - */ - private boolean subjectThreadRunning = false; - - private boolean triplesThreadRunning = false; - - private boolean conceptThreadRunning = false; - - /** - * the Thread that is running a method - */ - private Thread subjectThread; - - private Thread triplesThread; - - private Thread conceptThread; - - // received ontology as array, used if format=Array(an element of the - // array consists of the subject, predicate and object separated by '<' - private String[] ontArray; - - // received ontology as KB, the internal format - private KB kb; - - public static String getName() { - return "SPARQL Endpoint Restructured"; - } - - /** - * sets the ConfigOptions for this KnowledgeSource - * - * @return - */ - public static Collection<ConfigOption<?>> createConfigOptions() { - Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); - options.add(new StringConfigOption("url", "URL of SPARQL Endpoint")); - options.add(new StringConfigOption("host", "host of SPARQL Endpoint")); - options.add(new StringSetConfigOption("instances", - "relevant instances e.g. positive and negative examples in a learning problem")); - options.add(new IntegerConfigOption("recursionDepth", - "recursion depth of KB fragment selection", 2)); - options.add(new IntegerConfigOption("predefinedFilter", "the mode of the SPARQL Filter")); - options.add(new IntegerConfigOption("predefinedEndpoint", "the mode of the SPARQL Filter")); - - options.add(new StringSetConfigOption("predList", "list of all ignored roles")); - options.add(new StringSetConfigOption("objList", "list of all ignored objects")); - options.add(new StringSetConfigOption("classList", "list of all ignored classes")); - options.add(new StringConfigOption("format", "N-TRIPLES or KB format", "N-TRIPLES")); - options.add(new BooleanConfigOption("dumpToFile", - "Specifies whether the extracted ontology is written to a file or not.", true)); - options.add(new BooleanConfigOption("useLits", "use Literals in SPARQL query")); - options.add(new BooleanConfigOption("getAllSuperClasses", "If true then all superclasses are retrieved until the most general class (owl:Thing) is reached.", true)); - - options.add(new BooleanConfigOption("learnDomain", "learns the Domain for a Role")); - options.add(new BooleanConfigOption("learnRange", "learns the Range for a Role")); - options.add(new StringConfigOption("role", "role to learn Domain/Range from")); - options.add(new StringConfigOption("blankNodeIdentifier", - "used to identify blanknodes in Tripels")); - - options.add(new StringTupleListConfigOption("example", "example")); - options.add(new StringTupleListConfigOption("replacePredicate", "rule for replacing predicates")); - options.add(new StringTupleListConfigOption("replaceObject", "rule for replacing predicates")); - options.add(new IntegerConfigOption("breakSuperClassRetrievalAfter", "stops a cyclic hierarchy after specified number of classes")); - options.add(new IntegerConfigOption("numberOfInstancesUsedForRoleLearning", "")); - options.add(new BooleanConfigOption("closeAfterRecursion", "gets all classes for all instances")); - - - - return options; - } - - /* - * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.ConfigEntry) - */ - @Override - @SuppressWarnings( { "unchecked" }) - public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { - String option = entry.getOptionName(); - if (option.equals("url")) { - String s = (String) entry.getValue(); - try { - url = new URL(s); - } catch (MalformedURLException e) { - throw new InvalidConfigOptionValueException(entry.getOption(), entry.getValue(), - "malformed URL " + s); - } - } else if (option.equals("host")) { - host = (String) entry.getValue(); - } else if (option.equals("instances")) { - instances = (Set<String>) entry.getValue(); - } else if (option.equals("recursionDepth")) { - recursionDepth = (Integer) entry.getValue(); - } else if (option.equals("predList")) { - predList = (Set<String>) entry.getValue(); - } else if (option.equals("objList")) { - objList = (Set<String>) entry.getValue(); - //} else if (option.equals("classList")) { - // classList = (Set<String>) entry.getValue(); - } else if (option.equals("predefinedEndpoint")) { - predefinedEndpoint = (Integer) entry.getValue(); - } else if (option.equals("predefinedFilter")) { - predefinedFilter = (Integer) entry.getValue(); - } else if (option.equals("format")) { - format = (String) entry.getValue(); - } else if (option.equals("dumpToFile")) { - dumpToFile = (Boolean) entry.getValue(); - } else if (option.equals("useLits")) { - useLits = (Boolean) entry.getValue(); - } else if (option.equals("getAllSuperClasses")) { - getAllSuperClasses = (Boolean) entry.getValue(); - } else if (option.equals("learnDomain")) { - learnDomain = (Boolean) entry.getValue(); - }else if (option.equals("learnRange")) { - learnRange = (Boolean) entry.getValue(); - } else if (option.equals("role")) { - role = (String) entry.getValue(); - } else if (option.equals("blankNodeIdentifier")) { - blankNodeIdentifier = (String) entry.getValue(); - } else if (option.equals("example")) { - //System.out.println(entry.getValue()); - }else if (option.equals("replacePredicate")) { - replacePredicate = (LinkedList)entry.getValue(); - }else if (option.equals("replaceObject")) { - replaceObject = (LinkedList)entry.getValue(); - }else if (option.equals("breakSuperClassRetrievalAfter")) { - breakSuperClassRetrievalAfter = (Integer) entry.getValue(); - }else if (option.equals("numberOfInstancesUsedForRoleLearning")) { - numberOfInstancesUsedForRoleLearning = (Integer) entry.getValue(); - }else if (option.equals("closeAfterRecursion")) { - closeAfterRecursion = (Boolean) entry.getValue(); - } - - } - - /* - * (non-Javadoc) - * - * @see org.dllearner.core.Component#init() - */ - @Override - public void init() { - System.out.println("SparqlModul: Collecting Ontology"); - // SparqlOntologyCollector oc= - // new SparqlOntologyCollector(Datastructures.setToArray(instances), - // numberOfRecursions, filterMode, - // Datastructures.setToArray(predList),Datastructures.setToArray( - // objList),Datastructures.setToArray(classList),format,url,useLits); - Manager m = new Manager(); - SpecificSparqlEndpoint sse = null; - SparqlQueryType sqt = null; - // get Options for Manipulator - Manipulator man = new Manipulator(blankNodeIdentifier,breakSuperClassRetrievalAfter,replacePredicate,replaceObject); - HashMap<String, String> parameters = new HashMap<String, String>(); - parameters.put("default-graph-uri", "http://dbpedia.org"); - parameters.put("format", "application/sparql-results.xml"); - - // get Options for endpoints - if (predefinedEndpoint >= 1) { - sse = PredefinedEndpoint.getEndpoint(predefinedEndpoint); - } else { - sse = new SpecificSparqlEndpoint(url, host, parameters); - } - - // get Options for Filters - - if (predefinedFilter >= 1) { - sqt = PredefinedFilter.getFilter(predefinedFilter); - - } else { - sqt = new SparqlQueryType("forbid", objList, predList, useLits + ""); - - } - // give everything to the manager - m.useConfiguration(sqt, sse, man, recursionDepth, getAllSuperClasses,closeAfterRecursion); - try { - String ont = ""; - //System.out.println(learnDomain); - // used to learn a domain of a role - if (learnDomain || learnRange) { - Set<String> pos=new HashSet<String>(); - Set<String> neg=new HashSet<String>(); - if(learnDomain){ - pos = m.getDomainInstancesForRole(role); - neg = m.getRangeInstancesForRole(role); - }else if(learnRange){ - neg = m.getDomainInstancesForRole(role); - pos = m.getRangeInstancesForRole(role); - } - //choose 30 - - - Set<String> tmp=new HashSet<String>(); - for(String one:pos){ - tmp.add(one); - if(tmp.size()>=numberOfInstancesUsedForRoleLearning)break; - } - pos=tmp; - System.out.println("Instances used: "+pos.size()); - - tmp=new HashSet<String>(); - for(String one:neg){ - tmp.add(one); - if(tmp.size()>=numberOfInstancesUsedForRoleLearning)break; - } - neg=tmp; - - instances=new HashSet<String>(); - instances.addAll(pos); - - instances.addAll(neg); - - for(String one:pos){ - System.out.println("+\""+one+"\""); - } - for(String one:neg){ - System.out.println("-\""+one+"\""); - } - - /*Random r= new Random(); - - - Object[] arr=instances.toArray(); - while(instances.size()>=30){ - - }*/ - // add the role to the filter(a solution is always EXISTS - // role.TOP) - m.addPredicateFilter(role); - //System.out.println(instances); - // THIS is a workaround - - } - // the actual extraction is started here - ont = m.extract(instances); - System.out.println("Number of cached SPARQL queries: "+m.getConfiguration().numberOfCachedSparqlQueries); - System.out.println("Number of uncached SPARQL queries: "+m.getConfiguration().numberOfUncachedSparqlQueries); - - System.out.println("Finished collecting Fragment"); - - if (dumpToFile) { - String filename = System.currentTimeMillis() + ".nt"; - String basedir = "cache" + File.separator; - try { - if (!new File(basedir).exists()) - new File(basedir).mkdir(); - - FileWriter fw = new FileWriter(new File(basedir + filename), true); - fw.write(ont); - fw.flush(); - fw.close(); - - dumpFile = (new File(basedir + filename)).toURI().toURL(); - } catch (Exception e) { - e.printStackTrace(); - } - } - if (format.equals("KB")) { - try { - //kb = KBParser.parseKBFile(new StringReader(ont)); - kb=KBParser.parseKBFile(dumpFile); - } catch (Exception e) { - e.printStackTrace(); - } - } - } catch (Exception e) { - e.printStackTrace(); - } - System.out.println("SparqlModul: ****Finished"); - } - - /* - * (non-Javadoc) - * - * @see org.dllearner.core.KnowledgeSource#toDIG() - */ - @Override - public String toDIG(URI kbURI) { - if (format.equals("N-TRIPLES")) - return JenaOWLDIGConverter.getTellsString(dumpFile, OntologyFormat.N_TRIPLES, kbURI); - else - return DIGConverter.getDIGString(kb, kbURI).toString(); - } - - /* - * (non-Javadoc) - * - * @see org.dllearner.core.KnowledgeSource#export(java.io.File, - * org.dllearner.core.OntologyFormat) - */ - @Override - public void export(File file, OntologyFormat format) throws OntologyFormatUnsupportedException { - // currently no export functions implemented, so we just throw an - // exception - throw new OntologyFormatUnsupportedException("export", format); - } - - public URL getURL() { - return url; - } - - public String[] getOntArray() { - return ontArray; - } - - public void calculateSubjects(String label, int limit) { - System.out.println("SparqlModul: Collecting Subjects"); - SparqlOntologyCollector oc = new SparqlOntologyCollector(url); - try { - subjects = oc.getSubjectsFromLabel(label, limit); - } catch (IOException e) { - subjects = new String[1]; - subjects[0] = "[Error]Sparql Endpoint could not be reached."; - } - System.out.println("SparqlModul: ****Finished"); - } - - public void calculateTriples(String subject) { - System.out.println("SparqlModul: Collecting Triples"); - SparqlOntologyCollector oc = new SparqlOntologyCollector(url); - try { - triples = oc.collectTriples(subject); - } catch (IOException e) { - triples = new String[1]; - triples[0] = "[Error]Sparql Endpoint could not be reached."; - } - System.out.println("SparqlModul: ****Finished"); - } - - public void calculateConceptSubjects(String concept) { - System.out.println("SparqlModul: Collecting Subjects"); - SparqlOntologyCollector oc = new SparqlOntologyCollector(url); - try { - conceptSubjects = oc.getSubjectsFromConcept(concept); - } catch (IOException e) { - conceptSubjects = new String[1]; - conceptSubjects[0] = "[Error]Sparql Endpoint could not be reached."; - } - System.out.println("SparqlModul: ****Finished"); - } - - public boolean subjectThreadIsRunning() { - return subjectThreadRunning; - } - - public void setSubjectThreadRunning(boolean bool) { - subjectThreadRunning = bool; - } - - public boolean triplesThreadIsRunning() { - return triplesThreadRunning; - } - - public void setTriplesThreadRunning(boolean bool) { - triplesThreadRunning = bool; - } - - public boolean conceptThreadIsRunning() { - return conceptThreadRunning; - } - - public void setConceptThreadRunning(boolean bool) { - conceptThreadRunning = bool; - } - - public String[] getSubjects() { - return subjects; - } - - public Thread getSubjectThread() { - return subjectThread; - } - - public void setSubjectThread(Thread subjectThread) { - this.subjectThread = subjectThread; - } - - public Thread getTriplesThread() { - return triplesThread; - } - - public void setTriplesThread(Thread triplesThread) { - this.triplesThread = triplesThread; - } - - public Thread getConceptThread() { - return conceptThread; - } - - public void setConceptThread(Thread conceptThread) { - this.conceptThread = conceptThread; - } - - public String[] getTriples() { - return triples; - } - - public String[] getConceptSubjects() { - return conceptSubjects; - } -} Copied: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java (from rev 382, trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlEndpoint.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2008-01-18 08:14:37 UTC (rev 384) @@ -0,0 +1,503 @@ +/** + * Copyright (C) 2007, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package org.dllearner.kb.sparql; + +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URL; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.Set; + +import org.dllearner.core.KnowledgeSource; +import org.dllearner.core.OntologyFormat; +import org.dllearner.core.OntologyFormatUnsupportedException; +import org.dllearner.core.config.BooleanConfigOption; +import org.dllearner.core.config.ConfigEntry; +import org.dllearner.core.config.ConfigOption; +import org.dllearner.core.config.IntegerConfigOption; +import org.dllearner.core.config.InvalidConfigOptionValueException; +import org.dllearner.core.config.StringConfigOption; +import org.dllearner.core.config.StringSetConfigOption; +import org.dllearner.core.config.StringTupleListConfigOption; +import org.dllearner.core.dl.KB; +import org.dllearner.parser.KBParser; +import org.dllearner.reasoning.DIGConverter; +import org.dllearner.reasoning.JenaOWLDIGConverter; +import org.dllearner.utilities.StringTuple; + +/** + * Represents a SPARQL Endpoint. + * + * @author Jens Lehmann + * @author Sebastian Knappe + * @author Sebastian Hellmann + */ +public class SparqlKnowledgeSource extends KnowledgeSource { + + // ConfigOptions + private URL url; + String host; + private Set<String> instances=new HashSet<String>();; + private URL dumpFile; + private int recursionDepth = 1; + private int predefinedFilter = 0; + private int predefinedEndpoint = 0; + private Set<String> predList=new HashSet<String>(); + private Set<String> objList=new HashSet<String>(); + // private Set<String> classList; + private String format = "N-TRIPLES"; + private boolean dumpToFile = true; + private boolean useLits = false; + private boolean getAllSuperClasses = true; + private boolean closeAfterRecursion = true; + private int breakSuperClassRetrievalAfter = 200; + + private boolean learnDomain = false; + private boolean learnRange = false; + private int numberOfInstancesUsedForRoleLearning=40; + private String role=""; + private String blankNodeIdentifier = "bnode"; + + LinkedList<StringTuple> URIParameters = new LinkedList<StringTuple>(); + LinkedList<StringTuple> replacePredicate = new LinkedList<StringTuple>(); + LinkedList<StringTuple> replaceObject = new LinkedList<StringTuple>(); + + /** + * Holds the results of the calculateSubjects method + */ + private String[] subjects; + + /** + * Holds the results of the calculateTriples method + */ + private String[] triples; + + /** + * Holds the results of the calculateConceptSubjects method + */ + private String[] conceptSubjects; + + /** + * if a method is running this becomes true + */ + private boolean subjectThreadRunning = false; + + private boolean triplesThreadRunning = false; + + private boolean conceptThreadRunning = false; + + /** + * the Thread that is running a method + */ + private Thread subjectThread; + + private Thread triplesThread; + + private Thread conceptThread; + + // received ontology as array, used if format=Array(an element of the + // array consists of the subject, predicate and object separated by '<' + private String[] ontArray; + + // received ontology as KB, the internal format + private KB kb; + + public static String getName() { + return "SPARQL Endpoint Restructured"; + } + + /** + * sets the ConfigOptions for this KnowledgeSource + * + * @return + */ + public static Collection<ConfigOption<?>> createConfigOptions() { + Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); + options.add(new StringConfigOption("url", "URL of SPARQL Endpoint")); + options.add(new StringConfigOption("host", "host of SPARQL Endpoint")); + options.add(new StringSetConfigOption("instances", + "relevant instances e.g. positive and negative examples in a learning problem")); + options.add(new IntegerConfigOption("recursionDepth", + "recursion depth of KB fragment selection", 2)); + options.add(new IntegerConfigOption("predefinedFilter", "the mode of the SPARQL Filter")); + options.add(new IntegerConfigOption("predefinedEndpoint", "the mode of the SPARQL Filter")); + + options.add(new StringSetConfigOption("predList", "list of all ignored roles")); + options.add(new StringSetConfigOption("objList", "list of all ignored objects")); + options.add(new StringSetConfigOption("classList", "list of all ignored classes")); + options.add(new StringConfigOption("format", "N-TRIPLES or KB format", "N-TRIPLES")); + options.add(new BooleanConfigOption("dumpToFile", + "Specifies whether the extracted ontology is written to a file or not.", true)); + options.add(new BooleanConfigOption("useLits", "use Literals in SPARQL query")); + options.add(new BooleanConfigOption("getAllSuperClasses", "If true then all superclasses are retrieved until the most general class (owl:Thing) is reached.", true)); + + options.add(new BooleanConfigOption("learnDomain", "learns the Domain for a Role")); + options.add(new BooleanConfigOption("learnRange", "learns the Range for a Role")); + options.add(new StringConfigOption("role", "role to learn Domain/Range from")); + options.add(new StringConfigOption("blankNodeIdentifier", + "used to identify blanknodes in Tripels")); + + options.add(new StringTupleListConfigOption("example", "example")); + options.add(new StringTupleListConfigOption("replacePredicate", "rule for replacing predicates")); + options.add(new StringTupleListConfigOption("replaceObject", "rule for replacing predicates")); + options.add(new IntegerConfigOption("breakSuperClassRetrievalAfter", "stops a cyclic hierarchy after specified number of classes")); + options.add(new IntegerConfigOption("numberOfInstancesUsedForRoleLearning", "")); + options.add(new BooleanConfigOption("closeAfterRecursion", "gets all classes for all instances")); + + + + return options; + } + + /* + * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.ConfigEntry) + */ + @Override + @SuppressWarnings( { "unchecked" }) + public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { + String option = entry.getOptionName(); + if (option.equals("url")) { + String s = (String) entry.getValue(); + try { + url = new URL(s); + } catch (MalformedURLException e) { + throw new InvalidConfigOptionValueException(entry.getOption(), entry.getValue(), + "malformed URL " + s); + } + } else if (option.equals("host")) { + host = (String) entry.getValue(); + } else if (option.equals("instances")) { + instances = (Set<String>) entry.getValue(); + } else if (option.equals("recursionDepth")) { + recursionDepth = (Integer) entry.getValue(); + } else if (option.equals("predList")) { + predList = (Set<String>) entry.getValue(); + } else if (option.equals("objList")) { + objList = (Set<String>) entry.getValue(); + //} else if (option.equals("classList")) { + // classList = (Set<String>) entry.getValue(); + } else if (option.equals("predefinedEndpoint")) { + predefinedEndpoint = (Integer) entry.getValue(); + } else if (option.equals("predefinedFilter")) { + predefinedFilter = (Integer) entry.getValue(); + } else if (option.equals("format")) { + format = (String) entry.getValue(); + } else if (option.equals("dumpToFile")) { + dumpToFile = (Boolean) entry.getValue(); + } else if (option.equals("useLits")) { + useLits = (Boolean) entry.getValue(); + } else if (option.equals("getAllSuperClasses")) { + getAllSuperClasses = (Boolean) entry.getValue(); + } else if (option.equals("learnDomain")) { + learnDomain = (Boolean) entry.getValue(); + }else if (option.equals("learnRange")) { + learnRange = (Boolean) entry.getValue(); + } else if (option.equals("role")) { + role = (String) entry.getValue(); + } else if (option.equals("blankNodeIdentifier")) { + blankNodeIdentifier = (String) entry.getValue(); + } else if (option.equals("example")) { + //System.out.println(entry.getValue()); + }else if (option.equals("replacePredicate")) { + replacePredicate = (LinkedList)entry.getValue(); + }else if (option.equals("replaceObject")) { + replaceObject = (LinkedList)entry.getValue(); + }else if (option.equals("breakSuperClassRetrievalAfter")) { + breakSuperClassRetrievalAfter = (Integer) entry.getValue(); + }else if (option.equals("numberOfInstancesUsedForRoleLearning")) { + numberOfInstancesUsedForRoleLearning = (Integer) entry.getValue(); + }else if (option.equals("closeAfterRecursion")) { + closeAfterRecursion = (Boolean) entry.getValue(); + } + + } + + /* + * (non-Javadoc) + * + * @see org.dllearner.core.Component#init() + */ + @Override + public void init() { + System.out.println("SparqlModul: Collecting Ontology"); + // SparqlOntologyCollector oc= + // new SparqlOntologyCollector(Datastructures.setToArray(instances), + // numberOfRecursions, filterMode, + // Datastructures.setToArray(predList),Datastructures.setToArray( + // objList),Datastructures.setToArray(classList),format,url,useLits); + Manager m = new Manager(); + SpecificSparqlEndpoint sse = null; + SparqlQueryType sqt = null; + // get Options for Manipulator + Manipulator man = new Manipulator(blankNodeIdentifier,breakSuperClassRetrievalAfter,replacePredicate,replaceObject); + HashMap<String, String> parameters = new HashMap<String, String>(); + parameters.put("default-graph-uri", "http://dbpedia.org"); + parameters.put("format", "application/sparql-results.xml"); + + // get Options for endpoints + if (predefinedEndpoint >= 1) { + sse = PredefinedEndpoint.getEndpoint(predefinedEndpoint); + } else { + sse = new SpecificSparqlEndpoint(url, host, parameters); + } + + // get Options for Filters + + if (predefinedFilter >= 1) { + sqt = PredefinedFilter.getFilter(predefinedFilter); + + } else { + sqt = new SparqlQueryType("forbid", objList, predList, useLits + ""); + + } + // give everything to the manager + m.useConfiguration(sqt, sse, man, recursionDepth, getAllSuperClasses,closeAfterRecursion); + try { + String ont = ""; + //System.out.println(learnDomain); + // used to learn a domain of a role + if (learnDomain || learnRange) { + Set<String> pos=new HashSet<String>(); + Set<String> neg=new HashSet<String>(); + if(learnDomain){ + pos = m.getDomainInstancesForRole(role); + neg = m.getRangeInstancesForRole(role); + }else if(learnRange){ + neg = m.getDomainInstancesForRole(role); + pos = m.getRangeInstancesForRole(role); + } + //choose 30 + + + Set<String> tmp=new HashSet<String>(); + for(String one:pos){ + tmp.add(one); + if(tmp.size()>=numberOfInstancesUsedForRoleLearning)break; + } + pos=tmp; + System.out.println("Instances used: "+pos.size()); + + tmp=new HashSet<String>(); + for(String one:neg){ + tmp.add(one); + if(tmp.size()>=numberOfInstancesUsedForRoleLearning)break; + } + neg=tmp; + + instances=new HashSet<String>(); + instances.addAll(pos); + + instances.addAll(neg); + + for(String one:pos){ + System.out.println("+\""+one+"\""); + } + for(String one:neg){ + System.out.println("-\""+one+"\""); + } + + /*Random r= new Random(); + + + Object[] arr=instances.toArray(); + while(instances.size()>=30){ + + }*/ + // add the role to the filter(a solution is always EXISTS + // role.TOP) + m.addPredicateFilter(role); + //System.out.println(instances); + // THIS is a workaround + + } + // the actual extraction is started here + ont = m.extract(instances); + System.out.println("Number of cached SPARQL queries: "+m.getConfiguration().numberOfCachedSparqlQueries); + System.out.println("Number of uncached SPARQL queries: "+m.getConfiguration().numberOfUncachedSparqlQueries); + + System.out.println("Finished collecting Fragment"); + + if (dumpToFile) { + String filename = System.currentTimeMillis() + ".nt"; + String basedir = "cache" + File.separator; + try { + if (!new File(basedir).exists()) + new File(basedir).mkdir(); + + FileWriter fw = new FileWriter(new File(basedir + filename), true); + fw.write(ont); + fw.flush(); + fw.close(); + + dumpFile = (new File(basedir + filename)).toURI().toURL(); + } catch (Exception e) { + e.printStackTrace(); + } + } + if (format.equals("KB")) { + try { + //kb = KBParser.parseKBFile(new StringReader(ont)); + kb=KBParser.parseKBFile(dumpFile); + } catch (Exception e) { + e.printStackTrace(); + } + } + } catch (Exception e) { + e.printStackTrace(); + } + System.out.println("SparqlModul: ****Finished"); + } + + /* + * (non-Javadoc) + * + * @see org.dllearner.core.KnowledgeSource#toDIG() + */ + @Override + public String toDIG(URI kbURI) { + if (format.equals("N-TRIPLES")) + return JenaOWLDIGConverter.getTellsString(dumpFile, OntologyFormat.N_TRIPLES, kbURI); + else + return DIGConverter.getDIGString(kb, kbURI).toString(); + } + + /* + * (non-Javadoc) + * + * @see org.dllearner.core.KnowledgeSource#export(java.io.File, + * org.dllearner.core.OntologyFormat) + */ + @Override + public void export(File file, OntologyFormat format) throws OntologyFormatUnsupportedException { + // currently no export functions implemented, so we just throw an + // exception + throw new OntologyFormatUnsupportedException("export", format); + } + + public URL getURL() { + return url; + } + + public String[] getOntArray() { + return ontArray; + } + + public void calculateSubjects(String label, int limit) { + System.out.println("SparqlModul: Collecting Subjects"); + SparqlOntologyCollector oc = new SparqlOntologyCollector(url); + try { + subjects = oc.getSubjectsFromLabel(label, limit); + } catch (IOException e) { + subjects = new String[1]; + subjects[0] = "[Error]Sparql Endpoint could not be reached."; + } + System.out.println("SparqlModul: ****Finished"); + } + + public void calculateTriples(String subject) { + System.out.println("SparqlModul: Collecting Triples"); + SparqlOntologyCollector oc = new SparqlOntologyCollector(url); + try { + triples = oc.collectTriples(subject); + } catch (IOException e) { + triples = new String[1]; + triples[0] = "[Error]Sparql Endpoint could not be reached."; + } + System.out.println("SparqlModul: ****Finished"); + } + + public void calculateConceptSubjects(String concept) { + System.out.println("SparqlModul: Collecting Subjects"); + SparqlOntologyCollector oc = new SparqlOntologyCollector(url); + try { + conceptSubjects = oc.getSubjectsFromConcept(concept); + } catch (IOException e) { + conceptSubjects = new String[1]; + conceptSubjects[0] = "[Error]Sparql Endpoint could not be reached."; + } + System.out.println("SparqlModul: ****Finished"); + } + + public boolean subjectThreadIsRunning() { + return subjectThreadRunning; + } + + public void setSubjectThreadRunning(boolean bool) { + subjectThreadRunning = bool; + } + + public boolean triplesThreadIsRunning() { + return triplesThreadRunning; + } + + public void setTriplesThreadRunning(boolean bool) { + triplesThreadRunning = bool; + } + + public boolean conceptThreadIsRunning() { + return conceptThreadRunning; + } + + public void setConceptThreadRunning(boolean bool) { + conceptThreadRunning = bool; + } + + public String[] getSubjects() { + return subjects; + } + + public Thread getSubjectThread() { + return subjectThread; + } + + public void setSubjectThread(Thread subjectThread) { + this.subjectThread = subjectThread; + } + + public Thread getTriplesThread() { + return triplesThread; + } + + public void setTriplesThread(Thread triplesThread) { + this.triplesThread = triplesThread; + } + + public Thread getConceptThread() { + return conceptThread; + } + + public void setConceptThread(Thread conceptThread) { + this.conceptThread = conceptThread; + } + + public String[] getTriples() { + return triples; + } + + public String[] getConceptSubjects() { + return conceptSubjects; + } +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ku...@us...> - 2008-01-18 08:43:29
|
Revision: 385 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=385&view=rev Author: kurzum Date: 2008-01-18 00:43:11 -0800 (Fri, 18 Jan 2008) Log Message: ----------- changed package structure Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java Added Paths: ----------- trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/ trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/Configuration.java trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/PredefinedEndpoint.java trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/PredefinedFilter.java trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlQueryType.java trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SpecificSparqlEndpoint.java trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/ trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/ClassNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/InstanceNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/Node.java trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/PropertyNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/old/ trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlCache.java trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlFilter.java trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlOntologyCollector.java trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlQueryMaker.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/ trunk/src/dl-learner/org/dllearner/kb/sparql/query/Cache.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/TestSparqlQuery.java Removed Paths: ------------- trunk/src/dl-learner/org/dllearner/kb/sparql/Cache.java trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/Configuration.java trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/Node.java trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedEndpoint.java trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedFilter.java trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlCache.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlFilter.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlOntologyCollector.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryType.java trunk/src/dl-learner/org/dllearner/kb/sparql/SpecificSparqlEndpoint.java Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/Cache.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Cache.java 2008-01-18 08:14:37 UTC (rev 384) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Cache.java 2008-01-18 08:43:11 UTC (rev 385) @@ -1,235 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.io.Serializable; -import java.net.URLEncoder; -import java.util.HashMap; - -/** - * SPARQL query cache to avoid possibly expensive multiple queries. An object of - * this class can be the cache itself or a cache object(one entry), We could - * split that in two classes, but one entry o object only has contains data and - * one additional function and would just be a data class - * - * it writes the files according to one resource in the basedir and saves the - * cache object in it. Filename is the subject, a resource - * e.g. http://dbpedia.org/resource/Angela_Merkel which is first urlencoded - * and so serves as the hash for the filename. - * - * the cache object in the file remembers: a timestamp, - * a hashmap SparqlQuery -> SparqlXMLResult - * Cache validates if timestamp too old and Sparql-Query the same - * before returning the SPARQL xml-result - * - * @author Sebastian Hellmann - * @author Sebastian Knappe - */ -public class Cache implements Serializable { - - /** - * This maps sparql query to sparql result - */ - protected HashMap<String, String> hm; - - final static long serialVersionUID = 104; - transient String basedir = ""; - transient String fileending = ".cache"; - transient boolean debug_print_flag = false; - long timestamp; - /** - * After how many days cache entries get invalid - */ - protected long daysoffreshness = 15; - protected long multiplier = 24 * 60 * 60 * 1000;// h m s ms - - // private HashMap<String, String> inmem_cache; - - // - /** - * constructor for the cache itself - * Called once at the startup - * @param path - * Where the base path to the cache is - */ - public Cache(String path) { - this.basedir = path + File.separator; - if (!new File(path).exists()) { - System.out.println("created directory: " + path + " : " - + new File(path).mkdir()); - - } - } - - - /** - * constructor for single cache object(one entry) - * @param sparqlQuery - * query - * @param content - * that is the sparql query result as xml - */ - protected Cache(String sparqlQuery, String content) { - // this.content = c; - // this.sparqlquery = sparql; - this.timestamp = System.currentTimeMillis(); - this.hm = new HashMap<String, String>(); - hm.put(sparqlQuery, content); - } - - /** - * gets a chached sparqlQuery for a resource(key) and returns the - * sparqlXMLResult or null, if none is found. - * - * @param key - * is the resource, the identifier - * @param sparqlQuery - * is a special sparql query about that resource - * @return sparqlXMLResult - */ - public String get(String key, String sparqlQuery) { - // System.out.println("get From "+key); - String ret = null; - try { - Cache c = readFromFile(makeFilename(key)); - if (c == null) - return null; - // System.out.println(" file found"); - if (!c.checkFreshness()) - return null; - // System.out.println("fresh"); - String xml = ""; - try { - xml = c.hm.get(sparqlQuery); - } catch (Exception e) { - return null; - } - return xml; - // System.out.println("valid"); - // ret = c.content; - } catch (Exception e) { - e.printStackTrace(); - } - return ret; - } - - /** - * @param key - * is the resource, the identifier - * @param sparqlquery - * is the query used as another identifier - * @param content - * is the result of the query - */ - public void put(String key, String sparqlquery, String content) { - // System.out.println("put into "+key); - Cache c = readFromFile(makeFilename(key)); - if (c == null) { - c = new Cache(sparqlquery, content); - putIntoFile(makeFilename(key), c); - } else { - c.hm.put(sparqlquery, content); - putIntoFile(makeFilename(key), c); - } - - } - - /** - * this function takes a resource string and then URIencodes it and makes a - * filename out of it for the use in the hashmap - * - * @param key - * @return the complete key for filename in the hashmap - */ - protected String makeFilename(String key) { - String ret = ""; - try { - ret = basedir + URLEncoder.encode(key, "UTF-8") + fileending; - } catch (Exception e) { - e.printStackTrace(); - } - return ret; - } - - public void checkFile(String Filename) { - if (!new File(Filename).exists()) { - try { - new File(Filename).createNewFile(); - } catch (Exception e) { - e.printStackTrace(); - } - - } - - } - - /** - * puts a cache entry in a file - * - * @param Filename - * @param c - */ - protected void putIntoFile(String Filename, Cache c) { - try { - // FileWriter fw=new FileWriter(new File(Filename),true); - FileOutputStream fos = new FileOutputStream(Filename, false); - ObjectOutputStream o = new ObjectOutputStream(fos); - o.writeObject(c); - fos.flush(); - fos.close(); - } catch (Exception e) { - System.out.println("Not in cache creating: " + Filename); - } - } - - /** - * reads a cache entry from a file - * - * @param Filename - * @return cache entry - */ - protected Cache readFromFile(String Filename) { - Cache content = null; - try { - FileInputStream fos = new FileInputStream(Filename); - ObjectInputStream o = new ObjectInputStream(fos); - content = (Cache) o.readObject(); - // FileReader fr=new FileReader(new File(Filename,"r")); - // BufferedReader br=new BufferedReader(fr); - } catch (Exception e) { - } - return content; - - } - - protected boolean checkFreshness() { - if ((System.currentTimeMillis() - this.timestamp) <= (daysoffreshness * multiplier)) - // fresh - return true; - else - return false; - } - -} Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java 2008-01-18 08:14:37 UTC (rev 384) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java 2008-01-18 08:43:11 UTC (rev 385) @@ -1,113 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql; - -import java.net.URI; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Set; -import java.util.Vector; - -import org.dllearner.utilities.StringTuple; - -/** - * Is a node in the graph that is a class. - * - * @author Sebastian Hellmann - */ -public class ClassNode extends Node { - Set<PropertyNode> properties = new HashSet<PropertyNode>(); - - public ClassNode(URI u) { - super(u); - this.type = "class"; - } - - //expands all directly connected nodes - @Override - public Vector<Node> expand(TypedSparqlQueryInterface tsq, Manipulator m) { - Set<StringTuple> s = tsq.query(this.uri); - // see manipulator - s = m.check(s, this); - Vector<Node> Nodes = new Vector<Node>(); - - - Iterator<StringTuple> it = s.iterator(); - while (it.hasNext()) { - StringTuple t = (StringTuple) it.next(); - try { - // substitute rdf:type with owl:subclassof - if (t.a.equals(m.type) || t.a.equals(m.subclass)) { - ClassNode tmp = new ClassNode(new URI(t.b)); - properties.add(new PropertyNode(new URI(m.subclass), this, tmp)); - Nodes.add(tmp); - } else { - // further expansion stops here - // Nodes.add(tmp); is missing on purpose - ClassNode tmp = new ClassNode(new URI(t.b)); - properties.add(new PropertyNode(new URI(t.a), this, tmp)); - // System.out.println(m.blankNodeIdentifier); - // System.out.println("XXXXX"+t.b); - - // if o is a blank node expand further - if (t.b.startsWith(m.blankNodeIdentifier)) { - tmp.expand(tsq, m); - System.out.println(m.blankNodeIdentifier); - System.out.println("XXXXX" + t.b); - } - // Nodes.add(tmp); - } - } catch (Exception e) { - System.out.println(t); - e.printStackTrace(); - } - - } - return Nodes; - } - - // gets the types for properties recursively - @Override - public Vector<Node> expandProperties(TypedSparqlQueryInterface tsq, Manipulator m) { - // TODO return type doesn't make sense - return new Vector<Node>(); - } - - @Override - public Set<String> toNTriple() { - Set<String> s = new HashSet<String>(); - s.add("<" + this.uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" - + "http://www.w3.org/2002/07/owl#Class" + ">."); - - for (PropertyNode one : properties) { - s.add("<" + this.uri + "><" + one.getURI() + "><" + one.getB().getURI() + ">."); - s.addAll(one.getB().toNTriple()); - } - - return s; - } - - @Override - public int compareTo(Node n){ - return super.compareTo(n); - // - } - -} Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/Configuration.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Configuration.java 2008-01-18 08:14:37 UTC (rev 384) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Configuration.java 2008-01-18 08:43:11 UTC (rev 385) @@ -1,94 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql; - -/** - * Stores all configuration settings. - * - * @author Sebastian Hellmann - */ -public class Configuration { - - /* - * this class colects all configuration information - * see the other classes, which are used as attributes here - * */ - - private SpecificSparqlEndpoint specificSparqlEndpoint; - private SparqlQueryType sparqlQueryType; - private Manipulator manipulator; - // the following needs to be moved to - // class extraction algorithm or manipulator - private int recursiondepth = 2; - private boolean getAllSuperClasses = true; - private boolean closeAfterRecursion = true; - public int numberOfUncachedSparqlQueries=0; - public int numberOfCachedSparqlQueries=0; - - public Configuration(SpecificSparqlEndpoint specificSparqlEndpoint, - SparqlQueryType sparqlQueryType, Manipulator manipulator, int recursiondepth, - boolean getAllSuperClasses, boolean closeAfterRecursion) { - this.specificSparqlEndpoint = specificSparqlEndpoint; - this.sparqlQueryType = sparqlQueryType; - this.manipulator = manipulator; - this.recursiondepth = recursiondepth; - this.getAllSuperClasses = getAllSuperClasses; - this.closeAfterRecursion=closeAfterRecursion; - - } - - public Configuration changeQueryType(SparqlQueryType sqt) { - // TODO must clone here - return new Configuration(this.specificSparqlEndpoint, sqt, this.manipulator, - this.recursiondepth, this.getAllSuperClasses,this.closeAfterRecursion); - - } - - public Manipulator getManipulator() { - return this.manipulator; - } - - public SpecificSparqlEndpoint getSparqlEndpoint() { - return specificSparqlEndpoint; - } - - public SparqlQueryType getSparqlQueryType() { - return sparqlQueryType; - } - - public boolean isGetAllSuperClasses() { - return getAllSuperClasses; - } - public boolean isCloseAfterRecursion() { - return closeAfterRecursion; - } - - public int getRecursiondepth() { - return recursiondepth; - } - - public void increaseNumberOfuncachedSparqlQueries(){ - numberOfUncachedSparqlQueries++; - } - public void increaseNumberOfCachedSparqlQueries(){ - numberOfCachedSparqlQueries++; - } - -} Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java 2008-01-18 08:14:37 UTC (rev 384) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java 2008-01-18 08:43:11 UTC (rev 385) @@ -23,6 +23,11 @@ import java.util.HashSet; import java.util.Vector; +import org.dllearner.kb.sparql.configuration.Configuration; +import org.dllearner.kb.sparql.datastructure.ClassNode; +import org.dllearner.kb.sparql.datastructure.InstanceNode; +import org.dllearner.kb.sparql.datastructure.Node; + /** * This class is used to extract the information recursively. * @@ -127,10 +132,10 @@ //classes.remove(next); Node next = classes.remove(0); - if(!hadAlready.contains(next.uri.toString())){ + if(!hadAlready.contains(next.getURI().toString())){ p("Expanding: " + next); //System.out.println(hadAlready.size()); - hadAlready.add(next.uri.toString()); + hadAlready.add(next.getURI().toString()); tmp=next.expand(tsp, manipulator); classes.addAll(tmp); tmp=new Vector<Node>(); Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java 2008-01-18 08:14:37 UTC (rev 384) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java 2008-01-18 08:43:11 UTC (rev 385) @@ -1,118 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql; - -import java.net.URI; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Set; -import java.util.Vector; - -import org.dllearner.utilities.StringTuple; - -/** - * A node in the graph that is an instance. - * - * @author Sebastian Hellmann - * - */ -public class InstanceNode extends Node { - - Set<ClassNode> classes = new HashSet<ClassNode>(); - Set<StringTuple> datatypes = new HashSet<StringTuple>(); - Set<PropertyNode> properties = new HashSet<PropertyNode>(); - - public InstanceNode(URI u) { - super(u); - this.type = "instance"; - - } - - //expands all directly connected nodes - @Override - public Vector<Node> expand(TypedSparqlQueryInterface tsq, Manipulator m) { - - Set<StringTuple> s = tsq.query(uri); - // see Manipulator - m.check(s, this); - // System.out.println("fffffff"+m); - Vector<Node> Nodes = new Vector<Node>(); - - Iterator<StringTuple> it = s.iterator(); - while (it.hasNext()) { - StringTuple t = (StringTuple) it.next(); - // basically : if p is rdf:type then o is a class - // else it is an instance - try { - if (t.a.equals(m.type)) { - ClassNode tmp = new ClassNode(new URI(t.b)); - classes.add(tmp); - Nodes.add(tmp); - } else { - InstanceNode tmp = new InstanceNode(new URI(t.b)); - properties.add(new PropertyNode(new URI(t.a), this, tmp)); - Nodes.add(tmp); - - } - } catch (Exception e) { - System.out.println("Problem with: " + t); - e.printStackTrace(); - } - - } - expanded = true; - return Nodes; - } - - // gets the types for properties recursively - @Override - public Vector<Node> expandProperties(TypedSparqlQueryInterface tsq, Manipulator m) { - for (PropertyNode one : properties) { - one.expandProperties(tsq, m); - } - return new Vector<Node>(); - } - - @Override - public Set<String> toNTriple() { - Set<String> s = new HashSet<String>(); - s.add("<" + uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" - + "http://www.w3.org/2002/07/owl#Thing" + ">."); - for (ClassNode one : classes) { - s.add("<" + uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" - + one.getURI() + ">."); - s.addAll(one.toNTriple()); - } - for (PropertyNode one : properties) { - s.add("<" + uri + "><" + one.getURI() + "><" + one.getB().getURI() + ">."); - s.addAll(one.toNTriple()); - s.addAll(one.getB().toNTriple()); - } - - return s; - } - - @Override - public int compareTo(Node n){ - return super.compareTo(n); - // - } - -} Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java 2008-01-18 08:14:37 UTC (rev 384) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java 2008-01-18 08:43:11 UTC (rev 385) @@ -25,6 +25,10 @@ import java.util.SortedSet; import java.util.TreeSet; +import org.dllearner.kb.sparql.configuration.Configuration; +import org.dllearner.kb.sparql.configuration.SparqlQueryType; +import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; +import org.dllearner.kb.sparql.datastructure.Node; import org.dllearner.utilities.StringTuple; /** Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java 2008-01-18 08:14:37 UTC (rev 384) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java 2008-01-18 08:43:11 UTC (rev 385) @@ -24,6 +24,9 @@ import java.util.LinkedList; import java.util.Set; +import org.dllearner.kb.sparql.datastructure.ClassNode; +import org.dllearner.kb.sparql.datastructure.InstanceNode; +import org.dllearner.kb.sparql.datastructure.Node; import org.dllearner.utilities.StringTuple; /** Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/Node.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Node.java 2008-01-18 08:14:37 UTC (rev 384) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Node.java 2008-01-18 08:43:11 UTC (rev 385) @@ -1,64 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql; - -import java.net.URI; -import java.util.Set; -import java.util.Vector; - -/** - * Abstract class. - * - * @author Sebastian Hellmann - * - */ -public abstract class Node implements Comparable<Node> { - URI uri; - protected String type; - protected boolean expanded = false; - - public Node(URI u) { - this.uri = u; - } - - public abstract Vector<Node> expand(TypedSparqlQueryInterface tsq, Manipulator m); - - public abstract Vector<Node> expandProperties(TypedSparqlQueryInterface tsq, Manipulator m); - - public abstract Set<String> toNTriple(); - - @Override - public String toString() { - return "Node: " + uri + ":" + type; - - } - - public URI getURI() { - return uri; - } - public boolean equals(Node n){ - if(this.uri.equals(n.uri))return true; - else return false; - } - public int compareTo(Node n){ - return this.uri.toString().compareTo(n.uri.toString()); - } - -} Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedEndpoint.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedEndpoint.java 2008-01-18 08:14:37 UTC (rev 384) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedEndpoint.java 2008-01-18 08:43:11 UTC (rev 385) @@ -1,167 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql; - -import java.net.URL; -import java.util.HashMap; - -/** - * Holds some predefined endpoints. - * - * @author Sebastian Hellmann - * - */ -public class PredefinedEndpoint { - public static SpecificSparqlEndpoint getEndpoint(int i) { - - switch (i) { - case 1: - return dbpediaEndpoint(); - case 2: - return localJoseki(); - case 3: - return govTrack(); - case 4: - return revyu(); - case 5: - return myopenlink(); - case 6: - return worldFactBook(); - } - return null; - } - - public static SpecificSparqlEndpoint dbpediaEndpoint() { - URL u = null; - HashMap<String, String> m = new HashMap<String, String>(); - m.put("default-graph-uri", "http://dbpedia.org"); - m.put("format", "application/sparql-results.xml"); - try { - u = new URL("http://dbpedia.openlinksw.com:8890/sparql"); - } catch (Exception e) { - e.printStackTrace(); - } - return new SpecificSparqlEndpoint(u, "dbpedia.openlinksw.com", m); - } - - public static SpecificSparqlEndpoint localJoseki() { - URL u = null; - HashMap<String, String> m = new HashMap<String, String>(); - // m.put("default-graph-uri", "http://dbpedia.org"); - // m.put("format", "application/sparql-results.xml"); - try { - u = new URL("http://localhost:2020/books"); - } catch (Exception e) { - e.printStackTrace(); - } - return new SpecificSparqlEndpoint(u, "localhost", m); - } - public static SpecificSparqlEndpoint worldFactBook() { - URL u = null; - HashMap<String, String> m = new HashMap<String, String>(); - // m.put("default-graph-uri", "http://dbpedia.org"); - // m.put("format", "application/sparql-results.xml"); - try { - u = new URL("http://www4.wiwiss.fu-berlin.de/factbook/sparql"); - } catch (Exception e) { - e.printStackTrace(); - } - return new SpecificSparqlEndpoint(u, "www4.wiwiss.fu-berlin.de", m); - } - - /* - * it only has 4 classes - public static SpecificSparqlEndpoint dblp() { - URL u = null; - HashMap<String, String> m = new HashMap<String, String>(); - // m.put("default-graph-uri", "http://dbpedia.org"); - // m.put("format", "application/sparql-results.xml"); - try { - u = new URL("http://www4.wiwiss.fu-berlin.de/dblp/sparql"); - } catch (Exception e) { - e.printStackTrace(); - } - return new SpecificSparqlEndpoint(u, "www4.wiwiss.fu-berlin.de", m); - } - */ - public static SpecificSparqlEndpoint govTrack() { - URL u = null; - HashMap<String, String> m = new HashMap<String, String>(); - // m.put("default-graph-uri", "http://dbpedia.org"); - // m.put("format", "application/sparql-results.xml"); - try { - u = new URL("http://www.rdfabout.com/sparql"); - } catch (Exception e) { - e.printStackTrace(); - } - return new SpecificSparqlEndpoint(u, "www.rdfabout.com", m); - } - public static SpecificSparqlEndpoint revyu() { - URL u = null; - HashMap<String, String> m = new HashMap<String, String>(); - // m.put("default-graph-uri", "http://dbpedia.org"); - // m.put("format", "application/sparql-results.xml"); - //http://revyu.com/sparql?query=SELECT DISTINCT * WHERE {[] a ?c} - try { - u = new URL("http://revyu.com/sparql"); - } catch (Exception e) { - e.printStackTrace(); - } - return new SpecificSparqlEndpoint(u, "revyu.com", m); - } - - // returns strange xml - /*public static SpecificSparqlEndpoint dbtune() { - URL u = null; - HashMap<String, String> m = new HashMap<String, String>(); - // m.put("default-graph-uri", "http://dbpedia.org"); - // m.put("format", "application/sparql-results.xml"); - //http://dbtune.org:2020/sparql/?query=SELECT DISTINCT * WHERE {[] a ?c}Limit 10 - http://dbtune.org:2020/evaluateQuery?repository=default&serialization=rdfxml&queryLanguage=SPARQL&query=SELECT+DISTINCT+*+WHERE+%7B%5B%5D+a+%3Fc%7D - &resultFormat=xml - &resourceFormat=ns&entailment=none - http://dbtune.org:2020/evaluateQuery - ?repository=default&serialization=rdfxml&queryLanguage=SPARQL - &query=SELECT+DISTINCT+*+WHERE+%7B%5B%5D+a+%3Fc%7D - &resultFormat=xml - &resourceFormat=ns&entailment=none - try { - u = new URL("http://dbtune.org:2020/sparql/"); - } catch (Exception e) { - e.printStackTrace(); - } - return new SpecificSparqlEndpoint(u, "dbtune.org", m); - }*/ - - public static SpecificSparqlEndpoint myopenlink() { - URL u = null; - HashMap<String, String> m = new HashMap<String, String>(); - m.put("default-graph-uri", "http://myopenlink.net/dataspace"); - m.put("format", "application/sparql-results.xml"); - //http://myopenlink.net:8890/sparql/?query=select+distinct+%3FConcept+where+%7B%5B%5D+a+%3FConcept%7D - try { - u = new URL("http://myopenlink.net:8890/sparql/"); - } catch (Exception e) { - e.printStackTrace(); - } - return new SpecificSparqlEndpoint(u, "myopenlink.net", m); - } - -} Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedFilter.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedFilter.java 2008-01-18 08:14:37 UTC (rev 384) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/PredefinedFilter.java 2008-01-18 08:43:11 UTC (rev 385) @@ -1,170 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql; - -import java.util.HashSet; -import java.util.Set; - -/** - * Predefined filters. - * - * @author Sebastian Hellmann - * - */ -public class PredefinedFilter { - - - public static SparqlQueryType getFilter(int i) { - - switch (i) { - case 1: - return YagoFilter(); - case 2: - return SKOS(); - case 3: - return YAGOSKOS(); - case 4: - return YagoSpecialHierarchy(); - } - return null; - } - - - public static SparqlQueryType YagoFilter(){ - Set<String> pred = new HashSet<String>(); - pred.add("http://www.w3.org/2004/02/skos/core"); - pred.add("http://www.w3.org/2002/07/owl#sameAs"); - pred.add("http://xmlns.com/foaf/0.1/"); - - pred.add("http://dbpedia.org/property/reference"); - pred.add("http://dbpedia.org/property/website"); - pred.add("http://dbpedia.org/property/wikipage"); - pred.add("http://dbpedia.org/property/wikiPageUsesTemplate"); - pred.add("http://dbpedia.org/property/relatedInstance"); - - Set<String> obj = new HashSet<String>(); - //obj.add("http://dbpedia.org/resource/Category:Wikipedia_"); - //obj.add("http://dbpedia.org/resource/Category:Articles_"); - obj.add("http://dbpedia.org/resource/Category:"); - obj.add("http://dbpedia.org/resource/Template"); - obj.add("http://xmlns.com/foaf/0.1/"); - obj.add("http://upload.wikimedia.org/wikipedia/commons"); - obj.add("http://upload.wikimedia.org/wikipedia"); - obj.add("http://www.geonames.org"); - obj.add("http://www.w3.org/2006/03/wn/wn20/instances/synset"); - obj.add("http://www4.wiwiss.fu-berlin.de/flickrwrappr"); - obj.add("http://www.w3.org/2004/02/skos/core"); - - return new SparqlQueryType("forbid", obj, pred, "false"); - } - public static SparqlQueryType YagoSpecialHierarchy(){ - Set<String> pred = new HashSet<String>(); - pred.add("http://www.w3.org/2004/02/skos/core"); - pred.add("http://www.w3.org/2002/07/owl#sameAs"); - pred.add("http://xmlns.com/foaf/0.1/"); - - pred.add("http://dbpedia.org/property/reference"); - pred.add("http://dbpedia.org/property/website"); - pred.add("http://dbpedia.org/property/wikipage"); - pred.add("http://dbpedia.org/property/wikiPageUsesTemplate"); - pred.add("http://dbpedia.org/property/relatedInstance"); - pred.add("http://dbpedia.org/property/monarch"); - - - Set<String> obj = new HashSet<String>(); - obj.add("http://dbpedia.org/resource/Category:Wikipedia_"); - obj.add("http://dbpedia.org/resource/Category:Articles_"); - obj.add("http://dbpedia.org/resource/Template"); - obj.add("http://xmlns.com/foaf/0.1/"); - obj.add("http://upload.wikimedia.org/wikipedia/commons"); - obj.add("http://upload.wikimedia.org/wikipedia"); - obj.add("http://www.geonames.org"); - obj.add("http://www.w3.org/2006/03/wn/wn20/instances/synset"); - obj.add("http://www4.wiwiss.fu-berlin.de/flickrwrappr"); - obj.add("http://www.w3.org/2004/02/skos/core"); - - return new SparqlQueryType("forbid", obj, pred, "false"); - } - - - public static SparqlQueryType SKOS(){ - Set<String> pred = new HashSet<String>(); - //pred.add("http://www.w3.org/2004/02/skos/core"); - pred.add("http://www.w3.org/2002/07/owl#sameAs"); - pred.add("http://xmlns.com/foaf/0.1/"); - - pred.add("http://dbpedia.org/property/reference"); - pred.add("http://dbpedia.org/property/website"); - pred.add("http://dbpedia.org/property/wikipage"); - pred.add("http://www.w3.org/2004/02/skos/core#narrower"); - pred.add("http://dbpedia.org/property/wikiPageUsesTemplate"); - - Set<String> obj = new HashSet<String>(); - //obj.add("http://dbpedia.org/resource/Category:Wikipedia_"); - //obj.add("http://dbpedia.org/resource/Category:Articles_"); - obj.add("http://xmlns.com/foaf/0.1/"); - obj.add("http://upload.wikimedia.org/wikipedia/commons"); - obj.add("http://upload.wikimedia.org/wikipedia"); - - obj.add("http://www.geonames.org"); - obj.add("http://www.w3.org/2006/03/wn/wn20/instances/synset"); - obj.add("http://www4.wiwiss.fu-berlin.de/flickrwrappr"); - - - obj.add("http://dbpedia.org/class/yago"); - obj.add("http://dbpedia.org/resource/Template"); - - - return new SparqlQueryType("forbid", obj, pred, "false"); - } - public static SparqlQueryType YAGOSKOS(){ - Set<String> pred = new HashSet<String>(); - //pred.add("http://www.w3.org/2004/02/skos/core"); - pred.add("http://www.w3.org/2002/07/owl#sameAs"); - pred.add("http://xmlns.com/foaf/0.1/"); - - pred.add("http://dbpedia.org/property/reference"); - pred.add("http://dbpedia.org/property/website"); - pred.add("http://dbpedia.org/property/wikipage"); - //pred.add("http://www.w3.org/2004/02/skos/core#narrower"); - pred.add("http://dbpedia.org/property/wikiPageUsesTemplate"); - - Set<String> obj = new HashSet<String>(); - //obj.add("http://dbpedia.org/resource/Category:Wikipedia_"); - //obj.add("http://dbpedia.org/resource/Category:Articles_"); - obj.add("http://xmlns.com/foaf/0.1/"); - obj.add("http://upload.wikimedia.org/wikipedia/commons"); - obj.add("http://upload.wikimedia.org/wikipedia"); - - obj.add("http://www.geonames.org"); - obj.add("http://www.w3.org/2006/03/wn/wn20/instances/synset"); - obj.add("http://www4.wiwiss.fu-berlin.de/flickrwrappr"); - - - //obj.add("http://dbpedia.org/class/yago"); - obj.add("http://dbpedia.org/resource/Template"); - - - return new SparqlQueryType("forbid", obj, pred, "false"); - } - - - -} Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java 2008-01-18 08:14:37 UTC (rev 384) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java 2008-01-18 08:43:11 UTC (rev 385) @@ -1,120 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql; - -import java.net.URI; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Set; -import java.util.Vector; - -import org.dllearner.utilities.StringTuple; - -/** - * Property node. - * - * @author Sebastian Hellmann - * - */ -public class PropertyNode extends Node { - - // the a and b part of a property - private Node a; - private Node b; - // specialtypes like owl:symmetricproperty - private Set<String> specialTypes; - - public PropertyNode(URI u) { - super(u); - this.type = "property"; - - } - - public PropertyNode(URI u, Node a, Node b) { - super(u); - this.type = "property"; - this.a = a; - this.b = b; - this.specialTypes = new HashSet<String>(); - } - - @Override - public Vector<Node> expand(TypedSparqlQueryInterface tsq, Manipulator m) { - Set<StringTuple> s = tsq.query(uri); - Vector<Node> Nodes = new Vector<Node>(); - // Manipulation - - Iterator<StringTuple> it = s.iterator(); - while (it.hasNext()) { - StringTuple t = (StringTuple) it.next(); - try { - if (t.a.equals(m.type)) { - specialTypes.add(t.b); - } - } catch (Exception e) { - System.out.println(t); - e.printStackTrace(); - } - - } - return Nodes; - } - - // gets the types for properties recursively - @Override - public Vector<Node> expandProperties(TypedSparqlQueryInterface tsq, Manipulator m) { - b.expandProperties(tsq, m); - return this.expand(tsq, m); - } - - public Node getA() { - return a; - } - - public Node getB() { - return b; - } - - @Override - public Set<String> toNTriple() { - Set<String> s = new HashSet<String>(); - s.add("<" + uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" - + "http://www.w3.org/2002/07/owl#ObjectProperty" + ">."); - for (String one : specialTypes) { - s.add("<" + uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" - + one + ">."); - - } - - return s; - } - - @Override - public boolean equals(Node n){ - if(this.uri.equals(n.uri))return true; - else return false; - } - - @Override - public int compareTo(Node n){ - return super.compareTo(n); - } - -} Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlCache.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlCache.java 2008-01-18 08:14:37 UTC (rev 384) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlCache.java 2008-01-18 08:43:11 UTC (rev 385) @@ -1,202 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.io.Serializable; -import java.net.URLEncoder; -/** - * - * This is a primitive cache. - * The objects of this class can be either the cache itself or just on entry in the cache - * - * the cache remembers: a timestamp, the original sparql-query, the result - * key is the subject http://dbpedia.org/resource/Angela_Merkel which is first urlencoded - * and so serves as the hash for the filename. - * Cache validates if timestamp too old and Sparql-Query the same - * before returning the SPARQL xml-result - * - * @author Sebastian Hellmann - * @author Sebastian Knappe - */ -public class SparqlCache implements Serializable{ - - final static long serialVersionUID=104; - transient String basedir=""; - transient String fileending=".cache"; - long timestamp; - String content=""; - long daysoffreshness=15; - long multiplier=24*60*60*1000;//h m s ms - String sparqlquery=""; - - - /** - * Constructor for the cache itself. - * Called once at the beginning - * - * @param path Where the base path to the cache is - */ - public SparqlCache(String path){ - this.basedir=path+File.separator; - if(!new File(path).exists()) - {System.out.println(new File(path).mkdir());;} - - } - -// - /** - * Constructor for single cache object(one entry) - * - * @param content the sparql xml result - * @param sparql the sparql query - */ - public SparqlCache(String content, String sparql){ - this.content=content; - this.sparqlquery=sparql; - this.timestamp=System.currentTimeMillis(); - } - - - - /** - * use only on the cache object describing the cache itself - * - * @param key the individual - * @param sparql the sparql query - * @return the cached sparql result or null - */ - public String get(String key, String sparql){ - String ret=null; - try{ - SparqlCache c =readFromFile(makeFilename(key)); - if(c==null)return null; - if(!c.checkFreshness())return null; - if(!c.validate(sparql))return null; - - ret=c.content; - }catch (Exception e) {e.printStackTrace();} - return ret; - } - - /** - * - * constructor for single cache object(one entry) - * - * @param key the individual - * @param content the sparql result - * @param sparql the sparql query - */ - public void put(String key, String content, String sparql){ - SparqlCache c=new SparqlCache(content,sparql); - putIntoFile(makeFilename(key), c); - } - - - /** - * to normalize the filenames - * - * @param key - * @return - */ - String makeFilename(String key){ - String ret=""; - try{ - ret=basedir+URLEncoder.encode(key, "UTF-8")+fileending; - }catch (Exception e) {e.printStackTrace();} - return ret; - } - - /** - * how old is the result - * @return - */ - boolean checkFreshness(){ - if((System.currentTimeMillis()-this.timestamp)<=(daysoffreshness*multiplier)) - //fresh - return true; - else return false; - } - - - /** - * some sparql query - * @param sparql - * @return - */ - boolean validate(String sparql){ - if(this.sparqlquery.equals(sparql)) - //valid - return true; - else return false; - } - - /** - * makes a new file if none exists - * @param Filename - */ - public void checkFile(String Filename){ - if(!new File(Filename).exists()){ - try{ - new File(Filename).createNewFile(); - }catch (Exception e) {e.printStackTrace();} - - } - - } - - /** - * internal saving function - * puts a cache object into a file - * - * @param Filename - * @param content - */ - public void putIntoFile(String Filename,SparqlCache content){ - try{ - FileOutputStream fos = new FileOutputStream( Filename , false ); - ObjectOutputStream o = new ObjectOutputStream( fos ); - o.writeObject( content ); - fos.flush(); - fos.close(); - }catch (Exception e) {System.out.println("Not in cache creating: "+Filename);} - } - - /** - * internal retrieval function - * - * @param Filename - * @return one entry object - */ - public SparqlCache readFromFile(String Filename){ - SparqlCache content=null; - try{ - FileInputStream fos = new FileInputStream( Filename ); - ObjectInputStream o = new ObjectInputStream( fos ); - content=(SparqlCache)o.readObject(); - }catch (Exception e) {} - return content; - - } -} Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlFilter.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlFilter.java 2008-01-18 08:14:37 UTC (rev 384) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlFilter.java 2008-01-18 08:43:11 UTC (rev 385) @@ -1,119 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql; - -/** - * - * - * encapsulates all the options - * see the documentation for more help - * - * @author Sebastian Hellmann - * @author Sebastian Knappe - * - */ -public class SparqlFilter { - public int mode=0; - // 0 yago, 1 only cat, 2 skos+cat - String[] PredFilter=null; - String[] ObjFilter=null; - public boolean useLiterals=false; - - - String[] yagoPredFilterDefault={ - "http://www.w3.org/2004/02/skos/core", - "http://xmlns.com/foaf/0.1/", - "http://dbpedia.org/property/wikipage-", - "http://www.w3.org/2002/07/owl#sameAs", - "http://dbpedia.org/property/reference" }; - String[] yagoObjFilterDefault={ - "http://dbpedia.org/resource/Category:Articles_", - "http://dbpedia.org/resource/Category:Wikipedia_", - "http://xmlns.com/foaf/0.1/", - "http://dbpedia.org/resource/Category", - "http://dbpedia.org/resource/Template", - "http://upload.wikimedia.org/wikipedia/commons"}; - - String[] onlyCatPredFilterDefault={ - "http://www.w3.org/2004/02/skos/core", - "http://xmlns.com/foaf/0.1/", - "http://dbpedia.org/property/wikipage-", - "http://www.w3.org/2002/07/owl#sameAs", - "http://dbpedia.org/property/reference" }; - String[] onlyCatObjFilterDefault={ - "http://dbpedia.org/resource/Category:Articles_", - "http://dbpedia.org/resource/Category:Wikipedia_", - "http://xmlns.com/foaf/0.1/", - "http://dbpedia.org/class/yago", - "http://dbpedia.org/resource/Template", - "http://upload.wikimedia.org/wikipedia/commons"}; - - String[] skosPredFilterDefault={ - "http://www.w3.org/2004/02/skos/core#narrower", - "http://xmlns.com/foaf/0.1/", - "http://dbpedia.org/property/wikipage-", - "http://www.w3.org/2002/07/owl#sameAs", - "http://dbpedia.org/property/reference" }; - String[] skosObjFilterDefault={ - "http://dbpedia.org/resource/Category:Articles_", - "http://dbpedia.org/resource/Category:Wikipedia_", - "http://xmlns.com/foaf/0.1/", - "http://dbpedia.org/class/yago", - "http://dbpedia.org/resource/Template", - "http://upload.wikimedia.org/wikipedia/commons"}; - - public SparqlFilter(int mode, String[] pred, String[] obj) { - if (mode==-1 && (pred==null || obj==null)) - {mode=0;} - this.mode=mode; - - switch (mode){ - case 0: //yago - ObjFilter=yagoObjFilterDefault; - PredFilter=yagoPredFilterDefault; - break; - case 1: // only Categories - ObjFilter=onlyCatObjFilterDefault; - PredFilter=onlyCatPredFilterDefault; - break; - case 2: // there are some other changes to, which are made directly in other functions - ObjFilter=skosObjFilterDefault; - PredFilter=skosPredFilterDefault; - break; - default: - ObjFilter=obj; - PredFilter=pred; - break; - } - } - - public SparqlFilter(int mode, String[] pred, String[] obj,boolean uselits) throws Exception{ - this(mode,pred,obj); - this.useLiterals=uselits; - } - - public String[] getObjFilter(){ - return this.ObjFilter; - } - - public String[] getPredFilter(){ - return this.PredFilter; - } -} \ No newline at end of file Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2008-01-18 08:14:37 UTC (rev 384) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2008-01-18 08:43:11 UTC (rev 385) @@ -43,6 +43,11 @@ import org.dllearner.core.config.StringSetConfigOption; import org.dllearner.core.config.StringTupleListConfigOption; import org.dllearner.core.dl.KB; +import org.dllearner.kb.sparql.configuration.PredefinedEndpoint; +import org.dllearner.kb.sparql.configuration.PredefinedFilter; +import org.dllearner.kb.sparql.configuration.SparqlQueryType; +import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; +import org.dllearner.kb.sparql.old.SparqlOntologyCollector; import org.dllearner.parser.KBParser; import org.dllearner.reasoning.DIGConverter; import org.dllearner.reasoning.JenaOWLDIGConverter; Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlOntologyCollector.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlOntologyCollector.java 2008-01-18 08:14:37 UTC (rev 384) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlOntologyCollector.java 2008-01-18 08:43:11 UTC (rev 385) @@ -1,511 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.io.OutputStreamWriter; -import java.net.HttpURLConnection; -import java.net.URL; -import java.net.URLEncoder; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Vector; - -import org.dllearner.kb.sparql.SparqlQueryMaker; - - -/** - * This class collects the ontology from dbpedia, - * everything is saved in hashsets, so the doublettes are taken care of - * - * - * @author Sebastian Hellmann - * @author Sebastian Knappe - * - */ -public class SparqlOntologyCollector { - - boolean print_flag=false; - SparqlQueryMaker queryMaker; - SparqlCache cache; - URL url; - SparqlFilter sf; - String[] subjectList; - int numberOfRecursions; - HashSet<String> properties; - HashSet<String> classes; - HashSet<String> instances; - HashSet<String> triples; - String format; - - // some namespaces - String subclass="http://www.w3.org/2000/01/rdf-schema#subClassOf"; - String type="http://www.w3.org/1999/02/22-rdf-syntax-ns#type"; - String objectProperty="http://www.w3.org/2002/07/owl#ObjectProperty"; - String classns="http://www.w3.org/2002/07/owl#Class"; - String thing="http://www.w3.org/2002/07/owl#Thing"; - - - String[] defaultClasses={ - "http://dbpedia.org/class/yago", - "http://dbpedia.org/resource/Category:", - "http://dbpedia.org/resource/Template:", - "http://www.w3.org/2004/02/skos/core", - "http://dbpedia.org/class/"}; - - - /** - * - * - * @param subjectList - * @param numberOfRecursions - * @param filterMode - * @param FilterPredList - * @param FilterObjList - * @param defClasses - */ - public SparqlOntologyCollector(String[] subjectList,int numberOfRecursions, - int filterMode, String[] FilterPredList,String[] FilterObjList,String[] defClasses, String format, URL url, boolean useLits){ - this.subjectList=subjectList; - this.numberOfRecursions=numberOfRecursions; - this.format=format; - //this.queryMaker=new SparqlQueryMaker(); - this.cache=new SparqlCache("cache"); - if(defClasses!=null && defClasses.length>0 ){ - this.defaultClasses=defClasses; - } - try{ - this.sf=new SparqlFilter(filterMode,FilterPredList,FilterObjList,useLits); - this.url=url; - this.properties=new HashSet<String>(); - this.classes=new HashSet<String>(); - this.instances=new HashSet<String>(); - this.triples=new HashSet<String>(); - }catch (Exception e) {e.printStackTrace();} - - } - - public SparqlOntologyCollector(URL url) - { - // this.queryMaker=new SparqlQueryMaker(); - this.cache=new SparqlCache("cache"); - this.url=url; - } - - /** - * first collects the ontology - * then types everything so it becomes owl-dl - * - * @return all triples in n-triple format - */ - public String collectOntology() throws IOException{ - getRecursiveList(subjectList, numberOfRecursions); - finalize(); - String ret=""; - for (Iterator<String> iter = triples.iterator(); iter.hasNext();) { - ret += iter.next(); - } - return ret; - } - - public String[] collectTriples(String subject) throws IOException{ - System.out.println("Searching for Article: "+subject); - String sparql=SparqlQueryMaker.makeArticleQuery(subject); - String fromCache=cache.get(subject, sparql); - String xml; - // if not in cache get it from dbpedia - if(fromCache==null){ - xml=sendAndReceive(sparql); - cache.put(subject, xml, sparql); - System.out.print("\n"); - } - else{ - xml=fromCache; - System.out.println("FROM CACHE"); - } - - return processArticle(xml); - } - - public String[] processArticle(String xml) - { - Vector<String> vec=new Vector<String>(); - String one="<binding name=\"predicate\"><uri>"; - String two="<binding name=\"object\">"; - String end="</uri></binding>"; - String predtmp=""; - String objtmp=""; - // ArrayList<String> al=new ArrayList<String>(); - while(xml.indexOf(one)!=-1){ - //get pred - xml=xml.substring(xml.indexOf(one)+one.length()); - predtmp=xml.substring(0,xml.indexOf(end)); - //getobj - xml=xml.substring(xml.indexOf(two)+two.length()); - if (xml.startsWith("<literal xml:lang=\"en\">")){ - xml=xml.substring(xml.indexOf(">")+1); - objtmp=xml.substring(0,xml.indexOf("</literal>")); - } - else if (xml.startsWith("<uri>")) objtmp=xml.substring(5,xml.indexOf(end)); - else continue; - - System.out.println("Pred: "+predtmp+" Obj: "+objtmp); - - vec.add(predtmp+"<"+objtmp); - } - - String[] ret=new String[vec.size()]; - return vec.toArray(ret); - } - - public String[] getSubjectsFromLabel(String label, int limit) throws IOException{ - System.out.println("Searching for Label: "+label); - String sparql=SparqlQueryMaker.makeLabelQuery(label,limit); - String FromCache=cache.get(label, sparql); - String xml; - // if not in cache get it from dbpedia - if(FromCache==null){ - xml=sendAndReceive(sparql); - cache.put(label, xml, sparql); - System.out.print("\n"); - } - else{ - xml=FromCache; - System.out.println("FROM CACHE"); - } - - return processSubjects(xml); - } - - public String[] getSubjectsFromConcept(String concept) throws IOException - { - System.out.println("Searching for Subjects of type: "+concept); - String sparql=SparqlQueryMaker.makeConceptQuery(concept); - String FromCache=cache.get(concept, sparql); - String xml; - // if not in cache get it from dbpedia - if(FromCache==null){ - xml=sendAndReceive(sparql); - cache.put(concept, xml, sparql); - System.out.print("\n"); - } - else{ - xml=FromCache; - System.out.println("FROM CACHE"); - } - - return processSubjects(xml); - } - - /** - * calls getRecursive for each subject in list - * @param subjects - * @param NumberofRecursions - */ - public void getRecursiveList(String[] subjects,int NumberofRecursions) throws IOException{ - for (int i = 0; i < subjects.length; i++) { - getRecursive(subjects[i], NumberofRecursions); - } - } - - /** - * gets all triples until numberofrecursion-- gets 0 - * - * @param StartingSubject - * @param NumberofRecursions - */ - public void getRecursive(String StartingSubject,int NumberofRecursions) throws IOException{ - System.out.print("SparqlModul: Depth: "+NumberofRecursions+" @ "+StartingSubject+" "); - if(NumberofRecursions<=0) - return; - else {NumberofRecursions--;} - - String sparql=SparqlQueryMaker.makeQueryFilter(StartingSubject,this.sf); - // checks cache - String FromCache=cache.get(StartingSubject, sparql); - String xml; - // if not in cache get it from dbpedia - if(FromCache==null){ - xml=sendAndReceive(sparql); - cache.put(StartingSubject, xml, sparql); - System.out.print("\n"); - } - else{ - xml=FromCache; - System.out.println("FROM CACHE"); - } - - // get new Subjects - String[] newSubjects=processResult(StartingSubject,xml); - - for (int i = 0; (i < newSubjects.length)&& NumberofRecursions!=0; i++) { - getRecursive(newSubjects[i], NumberofRecursions); - } - } - - /** - * process the sparql result xml in a simple manner - * - * - * @param subject - * @param xml - * @return list of new individuals - */ - public String[] processResult(String subject,String xml){ - //TODO if result is empty, catch exceptions - String one="<binding name=\"predicate\"><uri>"; - String two="<binding name=\"object\">"; - String end="</uri></binding>"; - String predtmp=""; - String objtmp=""; - ArrayList<String> al=new ArrayList<String>(); - while(xml.indexOf(one)!=-1){ - //get pred - xml=xml.substring(xml.indexOf(one)+one.length()); - predtmp=xml.substring(0,... [truncated message content] |
From: <ku...@us...> - 2008-01-18 08:57:54
|
Revision: 386 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=386&view=rev Author: kurzum Date: 2008-01-18 00:57:51 -0800 (Fri, 18 Jan 2008) Log Message: ----------- package old can be removed after three functions in Sparqlknowledgesource are improved, see todo Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlCache.java trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlFilter.java trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlOntologyCollector.java Added Paths: ----------- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java Removed Paths: ------------- trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlQueryMaker.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/TestSparqlQuery.java Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2008-01-18 08:43:11 UTC (rev 385) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2008-01-18 08:57:51 UTC (rev 386) @@ -47,7 +47,7 @@ import org.dllearner.kb.sparql.configuration.PredefinedFilter; import org.dllearner.kb.sparql.configuration.SparqlQueryType; import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; -import org.dllearner.kb.sparql.old.SparqlOntologyCollector; +import org.dllearner.kb.sparql.old.*; import org.dllearner.parser.KBParser; import org.dllearner.reasoning.DIGConverter; import org.dllearner.reasoning.JenaOWLDIGConverter; @@ -410,9 +410,15 @@ return ontArray; } + + /** + * TODO SparqlOntologyCollector needs to be removed + * @param label + * @param limit + */ public void calculateSubjects(String label, int limit) { System.out.println("SparqlModul: Collecting Subjects"); - SparqlOntologyCollector oc = new SparqlOntologyCollector(url); + oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); try { subjects = oc.getSubjectsFromLabel(label, limit); } catch (IOException e) { @@ -422,9 +428,13 @@ System.out.println("SparqlModul: ****Finished"); } + /** + * TODO SparqlOntologyCollector needs to be removed + * @param subject + */ public void calculateTriples(String subject) { System.out.println("SparqlModul: Collecting Triples"); - SparqlOntologyCollector oc = new SparqlOntologyCollector(url); + oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); try { triples = oc.collectTriples(subject); } catch (IOException e) { @@ -434,9 +444,13 @@ System.out.println("SparqlModul: ****Finished"); } + /** + * TODO SparqlOntologyCollector needs to be removed + * @param concept + */ public void calculateConceptSubjects(String concept) { System.out.println("SparqlModul: Collecting Subjects"); - SparqlOntologyCollector oc = new SparqlOntologyCollector(url); + oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); try { conceptSubjects = oc.getSubjectsFromConcept(concept); } catch (IOException e) { Added: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java 2008-01-18 08:57:51 UTC (rev 386) @@ -0,0 +1,193 @@ +/** + * Copyright (C) 2007, Sebastian Hellmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package org.dllearner.kb.sparql; + +import org.dllearner.kb.sparql.configuration.SparqlQueryType; +import org.dllearner.kb.sparql.old.oldSparqlFilter; + + +/** + * Can assemble sparql queries. + * + * @author Sebastian Hellmann + * + */ +public class SparqlQueryMaker { + String lineend="\n"; + boolean print_flag=false; + /* can make queries for subject, predicate, object + * according to the filter settings + * object not yet implemented + * + * */ + + private SparqlQueryType sparqlQueryType; + + public SparqlQueryMaker(SparqlQueryType SparqlQueryType) { + this.sparqlQueryType = SparqlQueryType; + } + + public String makeSubjectQueryUsingFilters(String subject) { + + String Filter = internalFilterAssemblySubject(); + String ret = "SELECT * WHERE { " + lineend + "<" + subject + "> ?predicate ?object. " + + lineend + "FILTER( " + lineend + "(" + Filter + ").}"; + // System.out.println(ret); + //System.out.println(sparqlQueryType.getPredicatefilterlist().length); + return ret; + } + + public String makeRoleQueryUsingFilters(String role) { + + String Filter = internalFilterAssemblyRole(); + String ret = "SELECT * WHERE { " + lineend + " ?subject <" + role + "> ?object. " + lineend + + "FILTER( " + lineend + "(" + Filter + ").}"; + // System.out.println(ret); + + return ret; + } + public String makeRoleQueryUsingFilters(String role,boolean domain) { + + String Filter = internalFilterAssemblyRole(); + String ret=""; + if(domain){ + ret = "SELECT * WHERE { " + lineend + + "?subject <" + role + "> ?object; a []. " + lineend + + "FILTER( " + lineend + "(" + Filter + ").}" ; + //"ORDER BY ?subject"; + // System.out.println(ret); + }else{ + ret = "SELECT * WHERE { " + lineend + + "?object a [] . " + + "?subject <" + role + "> ?object . " + lineend + + "FILTER( " + lineend + "(" + Filter + ").}"; + //"ORDER BY ?object"; + + } + //System.out.println(ret); + + return ret; + } + + private String internalFilterAssemblySubject() { + + String Filter = ""; + if (!this.sparqlQueryType.isLiterals()) + Filter += "!isLiteral(?object))"; + for (String p : sparqlQueryType.getPredicatefilterlist()) { + Filter += lineend + filterPredicate(p); + } + for (String o : sparqlQueryType.getObjectfilterlist()) { + Filter += lineend + filterObject(o); + } + return Filter; + } + + private String internalFilterAssemblyRole() { + + String Filter = ""; + if (!this.sparqlQueryType.isLiterals()) + Filter += "!isLiteral(?object))"; + for (String s : sparqlQueryType.getObjectfilterlist()) { + Filter += lineend + filterSubject(s); + } + for (String o : sparqlQueryType.getObjectfilterlist()) { + Filter += lineend + filterObject(o); + } + return Filter; + } + + public String filterSubject(String ns) { + return "&&( !regex(str(?subject), '" + ns + "') )"; + } + + public static String filterPredicate(String ns) { + return "&&( !regex(str(?predicate), '" + ns + "') )"; + } + + public static String filterObject(String ns) { + return "&&( !regex(str(?object), '" + ns + "') )"; + } + + public void p(String str){ + if(print_flag){ + System.out.println(str); + } + } + + /** + * creates a query with the specified filters for alls triples with subject + * @param subject the searched subject + * @param sf special object encapsulating all options + * @return sparql query + */ + public static String makeQueryFilter(String subject, oldSparqlFilter sf){ + + + String Filter=""; + if(!sf.useLiterals)Filter+="!isLiteral(?object)"; + for (String p : sf.getPredFilter()) { + Filter+="\n" + filterPredicate(p); + } + for (String o : sf.getObjFilter()) { + Filter+="\n" + filterObject(o); + } + + String ret= + "SELECT * WHERE { \n" + + "<"+ + subject+ + "> ?predicate ?object.\n"; + if (!(Filter.length()==0)) + ret+="FILTER( \n" + + "(" +Filter+"))."; + ret+="}"; + //System.out.println(ret); + return ret; + } + + /** + * creates a query for subjects with the specified label + * @param label a phrase that is part of the label of a subject + * @param limit this limits the amount of results + * @return + */ + public static String makeLabelQuery(String label,int limit){ + //TODO maybe use http://xmlns:com/foaf/0.1/page + return "SELECT DISTINCT ?subject\n"+ + "WHERE { ?subject <http://www.w3.org/2000/01/rdf-schema#label> ?object.?object bif:contains '\""+label+"\"'@en}\n"+ + "LIMIT "+limit; + } + + /** + * creates a query for all subjects that are of the type concept + * @param concept the type that subjects are searched for + * @return + */ + public static String makeConceptQuery(String concept){ + return "SELECT DISTINCT ?subject\n"+ + "WHERE { ?subject a <"+concept+">}\n"; + } + + public static String makeArticleQuery(String subject){ + return "SELECT ?predicate,?object\n"+ + "WHERE { <"+subject+"> ?predicate ?object}\n"; + } +} Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2008-01-18 08:43:11 UTC (rev 385) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2008-01-18 08:57:51 UTC (rev 386) @@ -34,7 +34,6 @@ import org.dllearner.kb.sparql.configuration.Configuration; import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; -import org.dllearner.kb.sparql.old.SparqlQueryMaker; import org.dllearner.kb.sparql.query.Cache; import org.dllearner.utilities.StringTuple; Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlCache.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlCache.java 2008-01-18 08:43:11 UTC (rev 385) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlCache.java 2008-01-18 08:57:51 UTC (rev 386) @@ -90,7 +90,7 @@ public String get(String key, String sparql){ String ret=null; try{ - SparqlCache c =readFromFile(makeFilename(key)); + oldSparqlCache c =readFromFile(makeFilename(key)); if(c==null)return null; if(!c.checkFreshness())return null; if(!c.validate(sparql))return null; @@ -109,7 +109,7 @@ * @param sparql the sparql query */ public void put(String key, String content, String sparql){ - SparqlCache c=new SparqlCache(content,sparql); + oldSparqlCache c=new oldSparqlCache(content,sparql); putIntoFile(makeFilename(key), c); } @@ -173,7 +173,7 @@ * @param Filename * @param content */ - public void putIntoFile(String Filename,SparqlCache content){ + public void putIntoFile(String Filename,oldSparqlCache content){ try{ FileOutputStream fos = new FileOutputStream( Filename , false ); ObjectOutputStream o = new ObjectOutputStream( fos ); @@ -189,12 +189,12 @@ * @param Filename * @return one entry object */ - public SparqlCache readFromFile(String Filename){ - SparqlCache content=null; + public oldSparqlCache readFromFile(String Filename){ + oldSparqlCache content=null; try{ FileInputStream fos = new FileInputStream( Filename ); ObjectInputStream o = new ObjectInputStream( fos ); - content=(SparqlCache)o.readObject(); + content=(oldSparqlCache)o.readObject(); }catch (Exception e) {} return content; Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlFilter.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlFilter.java 2008-01-18 08:43:11 UTC (rev 385) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlFilter.java 2008-01-18 08:57:51 UTC (rev 386) @@ -29,7 +29,7 @@ * @author Sebastian Knappe * */ -public class SparqlFilter { +public class oldSparqlFilter { public int mode=0; // 0 yago, 1 only cat, 2 skos+cat String[] PredFilter=null; @@ -79,7 +79,7 @@ "http://dbpedia.org/resource/Template", "http://upload.wikimedia.org/wikipedia/commons"}; - public SparqlFilter(int mode, String[] pred, String[] obj) { + public oldSparqlFilter(int mode, String[] pred, String[] obj) { if (mode==-1 && (pred==null || obj==null)) {mode=0;} this.mode=mode; @@ -104,7 +104,7 @@ } } - public SparqlFilter(int mode, String[] pred, String[] obj,boolean uselits) throws Exception{ + public oldSparqlFilter(int mode, String[] pred, String[] obj,boolean uselits) throws Exception{ this(mode,pred,obj); this.useLiterals=uselits; } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlOntologyCollector.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlOntologyCollector.java 2008-01-18 08:43:11 UTC (rev 385) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlOntologyCollector.java 2008-01-18 08:57:51 UTC (rev 386) @@ -33,7 +33,7 @@ import java.util.Iterator; import java.util.Vector; -import org.dllearner.kb.sparql.old.SparqlQueryMaker; +import org.dllearner.kb.sparql.SparqlQueryMaker; /** @@ -45,13 +45,13 @@ * @author Sebastian Knappe * */ -public class SparqlOntologyCollector { +public class oldSparqlOntologyCollector { boolean print_flag=false; SparqlQueryMaker queryMaker; - SparqlCache cache; + oldSparqlCache cache; URL url; - SparqlFilter sf; + oldSparqlFilter sf; String[] subjectList; int numberOfRecursions; HashSet<String> properties; @@ -86,18 +86,18 @@ * @param FilterObjList * @param defClasses */ - public SparqlOntologyCollector(String[] subjectList,int numberOfRecursions, + public oldSparqlOntologyCollector(String[] subjectList,int numberOfRecursions, int filterMode, String[] FilterPredList,String[] FilterObjList,String[] defClasses, String format, URL url, boolean useLits){ this.subjectList=subjectList; this.numberOfRecursions=numberOfRecursions; this.format=format; //this.queryMaker=new SparqlQueryMaker(); - this.cache=new SparqlCache("cache"); + this.cache=new oldSparqlCache("cache"); if(defClasses!=null && defClasses.length>0 ){ this.defaultClasses=defClasses; } try{ - this.sf=new SparqlFilter(filterMode,FilterPredList,FilterObjList,useLits); + this.sf=new oldSparqlFilter(filterMode,FilterPredList,FilterObjList,useLits); this.url=url; this.properties=new HashSet<String>(); this.classes=new HashSet<String>(); @@ -107,10 +107,10 @@ } - public SparqlOntologyCollector(URL url) + public oldSparqlOntologyCollector(URL url) { // this.queryMaker=new SparqlQueryMaker(); - this.cache=new SparqlCache("cache"); + this.cache=new oldSparqlCache("cache"); this.url=url; } Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlQueryMaker.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlQueryMaker.java 2008-01-18 08:43:11 UTC (rev 385) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlQueryMaker.java 2008-01-18 08:57:51 UTC (rev 386) @@ -1,192 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql.old; - -import org.dllearner.kb.sparql.configuration.SparqlQueryType; - - -/** - * Can assemble sparql queries. - * - * @author Sebastian Hellmann - * - */ -public class SparqlQueryMaker { - String lineend="\n"; - boolean print_flag=false; - /* can make queries for subject, predicate, object - * according to the filter settings - * object not yet implemented - * - * */ - - private SparqlQueryType sparqlQueryType; - - public SparqlQueryMaker(SparqlQueryType SparqlQueryType) { - this.sparqlQueryType = SparqlQueryType; - } - - public String makeSubjectQueryUsingFilters(String subject) { - - String Filter = internalFilterAssemblySubject(); - String ret = "SELECT * WHERE { " + lineend + "<" + subject + "> ?predicate ?object. " - + lineend + "FILTER( " + lineend + "(" + Filter + ").}"; - // System.out.println(ret); - //System.out.println(sparqlQueryType.getPredicatefilterlist().length); - return ret; - } - - public String makeRoleQueryUsingFilters(String role) { - - String Filter = internalFilterAssemblyRole(); - String ret = "SELECT * WHERE { " + lineend + " ?subject <" + role + "> ?object. " + lineend - + "FILTER( " + lineend + "(" + Filter + ").}"; - // System.out.println(ret); - - return ret; - } - public String makeRoleQueryUsingFilters(String role,boolean domain) { - - String Filter = internalFilterAssemblyRole(); - String ret=""; - if(domain){ - ret = "SELECT * WHERE { " + lineend + - "?subject <" + role + "> ?object; a []. " + lineend - + "FILTER( " + lineend + "(" + Filter + ").}" ; - //"ORDER BY ?subject"; - // System.out.println(ret); - }else{ - ret = "SELECT * WHERE { " + lineend + - "?object a [] . " + - "?subject <" + role + "> ?object . " + lineend - + "FILTER( " + lineend + "(" + Filter + ").}"; - //"ORDER BY ?object"; - - } - //System.out.println(ret); - - return ret; - } - - private String internalFilterAssemblySubject() { - - String Filter = ""; - if (!this.sparqlQueryType.isLiterals()) - Filter += "!isLiteral(?object))"; - for (String p : sparqlQueryType.getPredicatefilterlist()) { - Filter += lineend + filterPredicate(p); - } - for (String o : sparqlQueryType.getObjectfilterlist()) { - Filter += lineend + filterObject(o); - } - return Filter; - } - - private String internalFilterAssemblyRole() { - - String Filter = ""; - if (!this.sparqlQueryType.isLiterals()) - Filter += "!isLiteral(?object))"; - for (String s : sparqlQueryType.getObjectfilterlist()) { - Filter += lineend + filterSubject(s); - } - for (String o : sparqlQueryType.getObjectfilterlist()) { - Filter += lineend + filterObject(o); - } - return Filter; - } - - public String filterSubject(String ns) { - return "&&( !regex(str(?subject), '" + ns + "') )"; - } - - public static String filterPredicate(String ns) { - return "&&( !regex(str(?predicate), '" + ns + "') )"; - } - - public static String filterObject(String ns) { - return "&&( !regex(str(?object), '" + ns + "') )"; - } - - public void p(String str){ - if(print_flag){ - System.out.println(str); - } - } - - /** - * creates a query with the specified filters for alls triples with subject - * @param subject the searched subject - * @param sf special object encapsulating all options - * @return sparql query - */ - public static String makeQueryFilter(String subject, SparqlFilter sf){ - - - String Filter=""; - if(!sf.useLiterals)Filter+="!isLiteral(?object)"; - for (String p : sf.getPredFilter()) { - Filter+="\n" + filterPredicate(p); - } - for (String o : sf.getObjFilter()) { - Filter+="\n" + filterObject(o); - } - - String ret= - "SELECT * WHERE { \n" + - "<"+ - subject+ - "> ?predicate ?object.\n"; - if (!(Filter.length()==0)) - ret+="FILTER( \n" + - "(" +Filter+"))."; - ret+="}"; - //System.out.println(ret); - return ret; - } - - /** - * creates a query for subjects with the specified label - * @param label a phrase that is part of the label of a subject - * @param limit this limits the amount of results - * @return - */ - public static String makeLabelQuery(String label,int limit){ - //TODO maybe use http://xmlns:com/foaf/0.1/page - return "SELECT DISTINCT ?subject\n"+ - "WHERE { ?subject <http://www.w3.org/2000/01/rdf-schema#label> ?object.?object bif:contains '\""+label+"\"'@en}\n"+ - "LIMIT "+limit; - } - - /** - * creates a query for all subjects that are of the type concept - * @param concept the type that subjects are searched for - * @return - */ - public static String makeConceptQuery(String concept){ - return "SELECT DISTINCT ?subject\n"+ - "WHERE { ?subject a <"+concept+">}\n"; - } - - public static String makeArticleQuery(String subject){ - return "SELECT ?predicate,?object\n"+ - "WHERE { <"+subject+"> ?predicate ?object}\n"; - } -} Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/query/TestSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/TestSparqlQuery.java 2008-01-18 08:43:11 UTC (rev 385) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/TestSparqlQuery.java 2008-01-18 08:57:51 UTC (rev 386) @@ -1,13 +0,0 @@ -package org.dllearner.kb.sparql.query; - -public class TestSparqlQuery { - - /** - * @param args - */ - public static void main(String[] args) { - // TODO Auto-generated method stub - - } - -} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ku...@us...> - 2008-01-18 10:29:29
|
Revision: 387 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=387&view=rev Author: kurzum Date: 2008-01-18 02:29:26 -0800 (Fri, 18 Jan 2008) Log Message: ----------- sparql queries are now made by Jena. Jena is 6 times slower, see TestSparqlQuery XML processing is still the old thing Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java Added Paths: ----------- trunk/src/dl-learner/org/dllearner/kb/sparql/query/CachedSparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/JenaTestScript.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQueryAbstract.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQueryConventional.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/TestSparqlQuery.java Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java 2008-01-18 08:57:51 UTC (rev 386) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java 2008-01-18 10:29:26 UTC (rev 387) @@ -19,6 +19,7 @@ */ package org.dllearner.kb.sparql; +import java.io.IOException; import java.net.URI; import java.util.HashSet; import java.util.Set; @@ -29,6 +30,7 @@ import org.dllearner.kb.sparql.configuration.SparqlQueryType; import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; import org.dllearner.kb.sparql.datastructure.Node; +import org.dllearner.kb.sparql.old.oldSparqlOntologyCollector; import org.dllearner.utilities.StringTuple; /** @@ -145,5 +147,53 @@ public Configuration getConfiguration(){ return configuration; } + + /*public void calculateSubjects(String label, int limit) { + System.out.println("SparqlModul: Collecting Subjects"); + oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); + try { + subjects = oc.getSubjectsFromLabel(label, limit); + } catch (IOException e) { + subjects = new String[1]; + subjects[0] = "[Error]Sparql Endpoint could not be reached."; + } + System.out.println("SparqlModul: ****Finished"); + } + /** + * TODO SparqlOntologyCollector needs to be removed + * @param subject + */ + /* + public void calculateTriples(String subject) { + System.out.println("SparqlModul: Collecting Triples"); + oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); + try { + triples = oc.collectTriples(subject); + } catch (IOException e) { + triples = new String[1]; + triples[0] = "[Error]Sparql Endpoint could not be reached."; + } + System.out.println("SparqlModul: ****Finished"); + } +*/ + /** + * TODO SparqlOntologyCollector needs to be removed + * @param concept + */ + + /*public void calculateConceptSubjects(String concept) { + System.out.println("SparqlModul: Collecting Subjects"); + oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); + try { + conceptSubjects = oc.getSubjectsFromConcept(concept); + } catch (IOException e) { + conceptSubjects = new String[1]; + conceptSubjects[0] = "[Error]Sparql Endpoint could not be reached."; + } + System.out.println("SparqlModul: ****Finished"); + } + */ + + } \ No newline at end of file Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2008-01-18 08:57:51 UTC (rev 386) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2008-01-18 10:29:26 UTC (rev 387) @@ -35,6 +35,8 @@ import org.dllearner.kb.sparql.configuration.Configuration; import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; import org.dllearner.kb.sparql.query.Cache; +import org.dllearner.kb.sparql.query.CachedSparqlQuery; +import org.dllearner.kb.sparql.query.SparqlQuery; import org.dllearner.utilities.StringTuple; /** @@ -49,6 +51,8 @@ private Configuration configuration; // private SparqlHTTPRequest SparqlHTTPRequest; private SparqlQueryMaker sparqlQueryMaker; + private SparqlQuery sparqlQuery; + private CachedSparqlQuery cachedSparqlQuery; Cache cache; public TypedSparqlQuery(Configuration Configuration) { @@ -56,7 +60,9 @@ // this.SparqlHTTPRequest = new // SparqlHTTPRequest(Configuration.getSparqlEndpoint()); this.sparqlQueryMaker = new SparqlQueryMaker(Configuration.getSparqlQueryType()); + this.sparqlQuery=new SparqlQuery(configuration.getSparqlEndpoint()); this.cache = new Cache("cache"); + this.cachedSparqlQuery=new CachedSparqlQuery(this.sparqlQuery,this.cache); } // standard query get a tupels (p,o) for subject s public Set<StringTuple> query(URI u) { @@ -93,31 +99,7 @@ // uses a cache private Set<StringTuple> cachedSparql(URI u, String sparql, String a, String b) { // check cache - String FromCache = cache.get(u.toString(), sparql); - if(debug_no_cache) { - FromCache=null; - } - String xml = null; - // if not in cache get it from EndPoint - if (FromCache == null) { - configuration.increaseNumberOfuncachedSparqlQueries(); - try { - xml = sendAndReceiveSPARQL(sparql); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - p(sparql); - // System.out.println(xml); - if(!debug_no_cache) { - cache.put(u.toString(), sparql, xml); - } - //System.out.print("\n"); - } else { - configuration.increaseNumberOfCachedSparqlQueries(); - xml = FromCache; - //System.out.println("FROM CACHE"); - } + String xml=this.cachedSparqlQuery.getAsXMLString(u, sparql); // System.out.println(sparql); // System.out.println(xml); @@ -202,66 +184,11 @@ return xml; } - private String sendAndReceiveSPARQL(String sparql) throws IOException { - p("sendAndReceiveSPARQL"); - StringBuilder answer = new StringBuilder(); - //sparql="SELECT * WHERE {?a ?b ?c}LIMIT 10"; - - // String an Sparql-Endpoint schicken - HttpURLConnection connection; - SpecificSparqlEndpoint se = configuration.getSparqlEndpoint(); - p("URL: "+se.getURL()); - p("Host: "+se.getHost()); - - connection = (HttpURLConnection) se.getURL().openConnection(); - connection.setDoOutput(true); - - //connection.addRequestProperty("Host", se.getHost()); - connection.addRequestProperty("Connection", "close"); - connection - .addRequestProperty( - "Accept", - "text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"); - connection.addRequestProperty("Accept-Language", "de-de,de;q=0.8,en-us;q=0.5,en;q=0.3"); - connection.addRequestProperty("Accept-Charset", "utf-8;q=1.0"); - connection - .addRequestProperty( - "User-Agent", - "Mozilla/5.0 (Windows; U; Windows NT 5.1; de; rv:1.8.1.4) Gecko/20070515 Firefox/2.0.0.4 Web-Sniffer/1.0.24"); - - OutputStream os = connection.getOutputStream(); - OutputStreamWriter osw = new OutputStreamWriter(os); - - Set<String> s = se.getParameters().keySet(); - Iterator<String> it = s.iterator(); - String FullURI = ""; - while (it.hasNext()) { - String element = it.next(); - FullURI += "" + URLEncoder.encode(element, "UTF-8") + "=" - + URLEncoder.encode(se.getParameters().get(element), "UTF-8") + "&"; - } - - FullURI += "" + se.getHasQueryParameter() + "=" + URLEncoder.encode(sparql, "UTF-8"); - p(FullURI); - osw.write(FullURI); - osw.close(); - - // receive answer - InputStream is = connection.getInputStream(); - InputStreamReader isr = new InputStreamReader(is, "UTF-8"); - BufferedReader br = new BufferedReader(isr); - - String line; - do { - line = br.readLine(); - if (line != null) - answer.append(line); - } while (line != null); - - br.close(); - p(answer.toString()); - return answer.toString(); + public String sendAndReceiveSPARQL(String queryString){ + //SparqlQuery sq=new SparqlQuery(configuration.getSparqlEndpoint()); + return sparqlQuery.getAsXMLString(queryString); } + public void p(String str){ if(print_flag){ System.out.println(str); Added: trunk/src/dl-learner/org/dllearner/kb/sparql/query/CachedSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/CachedSparqlQuery.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/CachedSparqlQuery.java 2008-01-18 10:29:26 UTC (rev 387) @@ -0,0 +1,57 @@ +package org.dllearner.kb.sparql.query; + +import java.net.URI; + +import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; + +import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.query.ResultSetFormatter; + +public class CachedSparqlQuery { + +//SpecificSparqlEndpoint specificSparqlEndpoint; +Cache cache; +SparqlQuery sparqlQuery; +boolean debug_no_cache=false; + + public CachedSparqlQuery(SpecificSparqlEndpoint endpoint,Cache c) { + //this.specificSparqlEndpoint=endpoint; + this.sparqlQuery=new SparqlQuery(endpoint); + this.cache=c; + + } + public CachedSparqlQuery(SparqlQuery sparqlQuery,Cache c) { + + this.sparqlQuery=sparqlQuery; + this.cache=c; + + } + + public String getAsXMLString(URI u, String sparql){ + String FromCache = cache.get(u.toString(), sparql); + if(debug_no_cache) { + FromCache=null; + } + String xml = null; + // if not in cache get it from EndPoint + if (FromCache == null) { + //configuration.increaseNumberOfuncachedSparqlQueries(); + + xml = this.sparqlQuery.getAsXMLString(sparql); + //sendAndReceiveSPARQL(sparql); + + //p(sparql); + // System.out.println(xml); + if(!debug_no_cache) { + cache.put(u.toString(), sparql, xml); + } + //System.out.print("\n"); + } else { + //configuration.increaseNumberOfCachedSparqlQueries(); + xml = FromCache; + //System.out.println("FROM CACHE"); + } + + return xml; + } +} Added: trunk/src/dl-learner/org/dllearner/kb/sparql/query/JenaTestScript.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/JenaTestScript.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/JenaTestScript.java 2008-01-18 10:29:26 UTC (rev 387) @@ -0,0 +1,70 @@ +/** + * Copyright (C) 2007-2008, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package org.dllearner.kb.sparql.query; + +import org.dllearner.kb.sparql.configuration.PredefinedEndpoint; +import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; + +import com.hp.hpl.jena.query.Query; +import com.hp.hpl.jena.query.QueryExecution; +import com.hp.hpl.jena.query.QueryExecutionFactory; +import com.hp.hpl.jena.query.QueryFactory; +import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.query.ResultSetFormatter; + +/** + * Represents a SPARQL query. It includes support for stopping the SPARQL + * query (which may be necessary if a timeout is reached). + * + * TODO: It is probably good to change all SPARQL query calls to use only + * this class. + * + * TODO: Could we use Jena as a solid foundation here? (com.hp.jena.query) + * + * @author Jens Lehmann + * + */ +public class JenaTestScript { + // this is a working Jena script + // TODO: query runtime seems to be much too high (compared to running it in http://dbpedia.org/sparql) + // verify whether our SPARQL query implementation is faster and why; + // TODO: check whether Jena works with the other endpoints in PredefinedEndpoint; if not + // check whether it can be configured to run with these + public static void main(String[] args) { + + + String queryString = "PREFIX dbpedia2: <http://dbpedia.org/property/> " + + "PREFIX skos: <http://www.w3.org/2004/02/skos/core#>" + + "SELECT ?episode ?chalkboard_gag WHERE { ?episode skos:subject" + + " <http://dbpedia.org/resource/Category:The_Simpsons_episodes%2C_season_12>." + + " ?episode dbpedia2:blackboard ?chalkboard_gag }"; + + //System.out.println(queryString); + // create a query and parse it into Jena + Query query = QueryFactory.create(queryString); + query.validate(); + // Jena access to DBpedia SPARQL endpoint + QueryExecution queryExecution = QueryExecutionFactory.sparqlService("http://dbpedia.org/sparql", query); + System.out.println("query SPARQL server"); + ResultSet rs = queryExecution.execSelect(); + ResultSetFormatter.out(System.out, rs, query) ; + } + +} Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java 2008-01-18 08:57:51 UTC (rev 386) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java 2008-01-18 10:29:26 UTC (rev 387) @@ -40,52 +40,34 @@ * @author Jens Lehmann * */ -public class SparqlQuery { - - private boolean isRunning = false; - - public SparqlQuery(SpecificSparqlEndpoint endpoint, String query) { +public class SparqlQuery extends SparqlQueryAbstract{ + public SparqlQuery(SpecificSparqlEndpoint endpoint) { + super(endpoint); + // TODO Auto-generated constructor stub } - - public void send() { - isRunning = true; - - // ... send query - // ... check periodically whether isRunning is still true, if not - // abort the query - } - - public void stop() { - isRunning = false; - } - public boolean isRunning() { - return isRunning; - } - - // this is a working Jena script - // TODO: query runtime seems to be much too high (compared to running it in http://dbpedia.org/sparql) - // verify whether our SPARQL query implementation is faster and why; - // TODO: check whether Jena works with the other endpoints in PredefinedEndpoint; if not - // check whether it can be configured to run with these - public static void main(String[] args) { + private ResultSet sendAndReceive(String queryString){ - String queryString = "PREFIX dbpedia2: <http://dbpedia.org/property/> " + - "PREFIX skos: <http://www.w3.org/2004/02/skos/core#>" + - "SELECT ?episode ?chalkboard_gag WHERE { ?episode skos:subject" + - " <http://dbpedia.org/resource/Category:The_Simpsons_episodes%2C_season_12>." + - " ?episode dbpedia2:blackboard ?chalkboard_gag }"; - - System.out.println(queryString); + p(queryString); // create a query and parse it into Jena Query query = QueryFactory.create(queryString); query.validate(); // Jena access to DBpedia SPARQL endpoint - QueryExecution queryExecution = QueryExecutionFactory.sparqlService("http://dbpedia.org/sparql", query); - System.out.println("query SPARQL server"); + QueryExecution queryExecution = + QueryExecutionFactory.sparqlService(specificSparqlEndpoint.getURL().toString(), query); + + p("query SPARQL server"); ResultSet rs = queryExecution.execSelect(); - ResultSetFormatter.out(System.out, rs, query) ; + + //ResultSetFormatter.out(System.out, rs, query) ; + + return rs; } + public String getAsXMLString(String queryString){ + ResultSet rs=sendAndReceive(queryString); + return ResultSetFormatter.asXMLString(rs); + } + } Added: trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQueryAbstract.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQueryAbstract.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQueryAbstract.java 2008-01-18 10:29:26 UTC (rev 387) @@ -0,0 +1,39 @@ +package org.dllearner.kb.sparql.query; + +import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; + + public abstract class SparqlQueryAbstract { + private boolean isRunning = false; + boolean print_flag=false; + SpecificSparqlEndpoint specificSparqlEndpoint; + + public SparqlQueryAbstract(SpecificSparqlEndpoint endpoint) { + this.specificSparqlEndpoint=endpoint; + } + + public void send() { + isRunning = true; + + // ... send query + // ... check periodically whether isRunning is still true, if not + // abort the query + } + + public void stop() { + isRunning = false; + } + + public boolean isRunning() { + return isRunning; + } + + + public abstract String getAsXMLString(String queryString); + + public void p(String str){ + if(print_flag){ + System.out.println(str); + } + } + +} Added: trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQueryConventional.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQueryConventional.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQueryConventional.java 2008-01-18 10:29:26 UTC (rev 387) @@ -0,0 +1,100 @@ +package org.dllearner.kb.sparql.query; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.net.HttpURLConnection; +import java.net.URLEncoder; +import java.util.Iterator; +import java.util.Set; + +import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; + +import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.query.ResultSetFormatter; + +public class SparqlQueryConventional extends SparqlQueryAbstract{ + + + + + + public SparqlQueryConventional(SpecificSparqlEndpoint specificSparqlEndpoint) { + super(specificSparqlEndpoint); + } + + private String sendAndReceiveSPARQL(String sparql) throws IOException { + p("sendAndReceiveSPARQL"); + StringBuilder answer = new StringBuilder(); + //sparql="SELECT * WHERE {?a ?b ?c}LIMIT 10"; + + // String an Sparql-Endpoint schicken + HttpURLConnection connection; + + //SpecificSparqlEndpoint specificSparqlEndpoint = configuration.getSparqlEndpoint(); + p("URL: "+specificSparqlEndpoint.getURL()); + p("Host: "+specificSparqlEndpoint.getHost()); + + connection = (HttpURLConnection) specificSparqlEndpoint.getURL().openConnection(); + connection.setDoOutput(true); + + //connection.addRequestProperty("Host", specificSparqlEndpoint.getHost()); + connection.addRequestProperty("Connection", "close"); + connection + .addRequestProperty( + "Accept", + "text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"); + connection.addRequestProperty("Accept-Language", "de-de,de;q=0.8,en-us;q=0.5,en;q=0.3"); + connection.addRequestProperty("Accept-Charset", "utf-8;q=1.0"); + connection + .addRequestProperty( + "User-Agent", + "Mozilla/5.0 (Windows; U; Windows NT 5.1; de; rv:1.8.1.4) Gecko/20070515 Firefox/2.0.0.4 Web-Sniffer/1.0.24"); + + OutputStream os = connection.getOutputStream(); + OutputStreamWriter osw = new OutputStreamWriter(os); + + Set<String> s = specificSparqlEndpoint.getParameters().keySet(); + Iterator<String> it = s.iterator(); + String FullURI = ""; + while (it.hasNext()) { + String element = it.next(); + FullURI += "" + URLEncoder.encode(element, "UTF-8") + "=" + + URLEncoder.encode(specificSparqlEndpoint.getParameters().get(element), "UTF-8") + "&"; + } + + FullURI += "" + specificSparqlEndpoint.getHasQueryParameter() + "=" + URLEncoder.encode(sparql, "UTF-8"); + p(FullURI); + osw.write(FullURI); + osw.close(); + + // receive answer + InputStream is = connection.getInputStream(); + InputStreamReader isr = new InputStreamReader(is, "UTF-8"); + BufferedReader br = new BufferedReader(isr); + + String line; + do { + line = br.readLine(); + if (line != null) + answer.append(line); + } while (line != null); + + br.close(); + p(answer.toString()); + return answer.toString(); + } + + public String getAsXMLString(String queryString){ + try{ + return sendAndReceiveSPARQL(queryString); + }catch (IOException e) {e.printStackTrace();} + return null; + } + + + +} Added: trunk/src/dl-learner/org/dllearner/kb/sparql/query/TestSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/TestSparqlQuery.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/TestSparqlQuery.java 2008-01-18 10:29:26 UTC (rev 387) @@ -0,0 +1,96 @@ +/** + * Copyright (C) 2007-2008, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package org.dllearner.kb.sparql.query; + +import org.dllearner.kb.sparql.configuration.PredefinedEndpoint; +import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; + +import com.hp.hpl.jena.query.Query; +import com.hp.hpl.jena.query.QueryExecution; +import com.hp.hpl.jena.query.QueryExecutionFactory; +import com.hp.hpl.jena.query.QueryFactory; +import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.query.ResultSetFormatter; +import com.hp.hpl.jena.reasoner.rulesys.impl.oldCode.TestTrail; + +/** + * Represents a SPARQL query. It includes support for stopping the SPARQL + * query (which may be necessary if a timeout is reached). + * + * TODO: It is probably good to change all SPARQL query calls to use only + * this class. + * + * TODO: Could we use Jena as a solid foundation here? (com.hp.jena.query) + * + * @author Jens Lehmann + * + */ +public class TestSparqlQuery { + + // this is a working Jena script + // TODO: query runtime seems to be much too high (compared to running it in http://dbpedia.org/sparql) + // verify whether our SPARQL query implementation is faster and why; + // TODO: check whether Jena works with the other endpoints in PredefinedEndpoint; if not + // check whether it can be configured to run with these + public static void main(String[] args) { + + String queryString = "PREFIX dbpedia2: <http://dbpedia.org/property/> " + + "PREFIX skos: <http://www.w3.org/2004/02/skos/core#>" + + "SELECT ?episode ?chalkboard_gag WHERE { ?episode skos:subject" + + " <http://dbpedia.org/resource/Category:The_Simpsons_episodes%2C_season_12>." + + " ?episode dbpedia2:blackboard ?chalkboard_gag }"; + + testTime(5,queryString); + + //compareResults( queryString); + + + } + + public static void testTime(int howOften, String queryString){ + SpecificSparqlEndpoint sse= PredefinedEndpoint.dbpediaEndpoint(); + SparqlQuery sqJena=new SparqlQuery(sse); + SparqlQueryConventional sqConv=new SparqlQueryConventional(sse); + + + long now=System.currentTimeMillis(); + for (int i = 0; i < howOften; i++) { + sqJena.getAsXMLString(queryString); + + + } + System.out.println("Jena needed: "+(System.currentTimeMillis()-now)); + now=System.currentTimeMillis(); + for (int i = 0; i < howOften; i++) { + sqConv.getAsXMLString(queryString); + } + System.out.println("Conv needed: "+(System.currentTimeMillis()-now)); + } + + public static void compareResults( String queryString){ + SpecificSparqlEndpoint sse= PredefinedEndpoint.dbpediaEndpoint(); + SparqlQuery sqJena=new SparqlQuery(sse); + SparqlQueryConventional sqConv=new SparqlQueryConventional(sse); + + System.out.println(sqJena.getAsXMLString(queryString)); + System.out.println(sqConv.getAsXMLString(queryString)); + + } +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ku...@us...> - 2008-01-18 23:22:04
|
Revision: 395 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=395&view=rev Author: kurzum Date: 2008-01-18 15:21:56 -0800 (Fri, 18 Jan 2008) Log Message: ----------- a lot of changes, see email, will remove warnings soon Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlEndpoint.java trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlQueryType.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/CachedSparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java Added Paths: ----------- trunk/src/dl-learner/org/dllearner/kb/sparql/old/SparqlQueryConventional.java trunk/src/dl-learner/org/dllearner/kb/sparql/test/ trunk/src/dl-learner/org/dllearner/kb/sparql/test/JenaTestScript.java trunk/src/dl-learner/org/dllearner/kb/sparql/test/Test.java trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestResultSet.java trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestSparqlQuery.java Removed Paths: ------------- trunk/src/dl-learner/org/dllearner/kb/sparql/Test.java trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/PredefinedEndpoint.java trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/PredefinedFilter.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/JenaTestScript.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQueryAbstract.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQueryConventional.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/TestSparqlQuery.java Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2008-01-18 18:49:49 UTC (rev 394) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2008-01-18 23:21:56 UTC (rev 395) @@ -43,11 +43,9 @@ import org.dllearner.core.config.StringSetConfigOption; import org.dllearner.core.config.StringTupleListConfigOption; import org.dllearner.core.dl.KB; -import org.dllearner.kb.sparql.configuration.PredefinedEndpoint; -import org.dllearner.kb.sparql.configuration.PredefinedFilter; +import org.dllearner.kb.sparql.configuration.SparqlEndpoint; import org.dllearner.kb.sparql.configuration.SparqlQueryType; -import org.dllearner.kb.sparql.configuration.SparqlEndpoint; -import org.dllearner.kb.sparql.old.*; +import org.dllearner.kb.sparql.old.oldSparqlOntologyCollector; import org.dllearner.kb.sparql.query.SparqlQuery; import org.dllearner.parser.KBParser; import org.dllearner.reasoning.DIGConverter; @@ -66,13 +64,13 @@ // ConfigOptions private URL url; String host; - private Set<String> instances=new HashSet<String>();; + private Set<String> instances = new HashSet<String>();; private URL dumpFile; private int recursionDepth = 1; private int predefinedFilter = 0; private int predefinedEndpoint = 0; - private Set<String> predList=new HashSet<String>(); - private Set<String> objList=new HashSet<String>(); + private Set<String> predList = new HashSet<String>(); + private Set<String> objList = new HashSet<String>(); // private Set<String> classList; private String format = "N-TRIPLES"; private boolean dumpToFile = true; @@ -83,16 +81,16 @@ private boolean learnDomain = false; private boolean learnRange = false; - private int numberOfInstancesUsedForRoleLearning=40; - private String role=""; + private int numberOfInstancesUsedForRoleLearning = 40; + private String role = ""; private String blankNodeIdentifier = "bnode"; - + LinkedList<StringTuple> URIParameters = new LinkedList<StringTuple>(); LinkedList<StringTuple> replacePredicate = new LinkedList<StringTuple>(); LinkedList<StringTuple> replaceObject = new LinkedList<StringTuple>(); SparqlEndpoint sse = null; - + /** * Holds the results of the calculateSubjects method */ @@ -146,37 +144,58 @@ Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); options.add(new StringConfigOption("url", "URL of SPARQL Endpoint")); options.add(new StringConfigOption("host", "host of SPARQL Endpoint")); - options.add(new StringSetConfigOption("instances", - "relevant instances e.g. positive and negative examples in a learning problem")); + options + .add(new StringSetConfigOption("instances", + "relevant instances e.g. positive and negative examples in a learning problem")); options.add(new IntegerConfigOption("recursionDepth", "recursion depth of KB fragment selection", 2)); - options.add(new IntegerConfigOption("predefinedFilter", "the mode of the SPARQL Filter")); - options.add(new IntegerConfigOption("predefinedEndpoint", "the mode of the SPARQL Filter")); + options.add(new IntegerConfigOption("predefinedFilter", + "the mode of the SPARQL Filter")); + options.add(new IntegerConfigOption("predefinedEndpoint", + "the mode of the SPARQL Filter")); - options.add(new StringSetConfigOption("predList", "list of all ignored roles")); - options.add(new StringSetConfigOption("objList", "list of all ignored objects")); - options.add(new StringSetConfigOption("classList", "list of all ignored classes")); - options.add(new StringConfigOption("format", "N-TRIPLES or KB format", "N-TRIPLES")); - options.add(new BooleanConfigOption("dumpToFile", - "Specifies whether the extracted ontology is written to a file or not.", true)); - options.add(new BooleanConfigOption("useLits", "use Literals in SPARQL query")); - options.add(new BooleanConfigOption("getAllSuperClasses", "If true then all superclasses are retrieved until the most general class (owl:Thing) is reached.", true)); + options.add(new StringSetConfigOption("predList", + "list of all ignored roles")); + options.add(new StringSetConfigOption("objList", + "list of all ignored objects")); + options.add(new StringSetConfigOption("classList", + "list of all ignored classes")); + options.add(new StringConfigOption("format", "N-TRIPLES or KB format", + "N-TRIPLES")); + options + .add(new BooleanConfigOption( + "dumpToFile", + "Specifies whether the extracted ontology is written to a file or not.", + true)); + options.add(new BooleanConfigOption("useLits", + "use Literals in SPARQL query")); + options + .add(new BooleanConfigOption( + "getAllSuperClasses", + "If true then all superclasses are retrieved until the most general class (owl:Thing) is reached.", + true)); - options.add(new BooleanConfigOption("learnDomain", "learns the Domain for a Role")); - options.add(new BooleanConfigOption("learnRange", "learns the Range for a Role")); - options.add(new StringConfigOption("role", "role to learn Domain/Range from")); + options.add(new BooleanConfigOption("learnDomain", + "learns the Domain for a Role")); + options.add(new BooleanConfigOption("learnRange", + "learns the Range for a Role")); + options.add(new StringConfigOption("role", + "role to learn Domain/Range from")); options.add(new StringConfigOption("blankNodeIdentifier", "used to identify blanknodes in Tripels")); options.add(new StringTupleListConfigOption("example", "example")); - options.add(new StringTupleListConfigOption("replacePredicate", "rule for replacing predicates")); - options.add(new StringTupleListConfigOption("replaceObject", "rule for replacing predicates")); - options.add(new IntegerConfigOption("breakSuperClassRetrievalAfter", "stops a cyclic hierarchy after specified number of classes")); - options.add(new IntegerConfigOption("numberOfInstancesUsedForRoleLearning", "")); - options.add(new BooleanConfigOption("closeAfterRecursion", "gets all classes for all instances")); - - - + options.add(new StringTupleListConfigOption("replacePredicate", + "rule for replacing predicates")); + options.add(new StringTupleListConfigOption("replaceObject", + "rule for replacing predicates")); + options.add(new IntegerConfigOption("breakSuperClassRetrievalAfter", + "stops a cyclic hierarchy after specified number of classes")); + options.add(new IntegerConfigOption( + "numberOfInstancesUsedForRoleLearning", "")); + options.add(new BooleanConfigOption("closeAfterRecursion", + "gets all classes for all instances")); + return options; } @@ -185,15 +204,16 @@ */ @Override @SuppressWarnings( { "unchecked" }) - public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { + public <T> void applyConfigEntry(ConfigEntry<T> entry) + throws InvalidConfigOptionValueException { String option = entry.getOptionName(); if (option.equals("url")) { String s = (String) entry.getValue(); try { url = new URL(s); } catch (MalformedURLException e) { - throw new InvalidConfigOptionValueException(entry.getOption(), entry.getValue(), - "malformed URL " + s); + throw new InvalidConfigOptionValueException(entry.getOption(), + entry.getValue(), "malformed URL " + s); } } else if (option.equals("host")) { host = (String) entry.getValue(); @@ -205,8 +225,8 @@ predList = (Set<String>) entry.getValue(); } else if (option.equals("objList")) { objList = (Set<String>) entry.getValue(); - //} else if (option.equals("classList")) { - // classList = (Set<String>) entry.getValue(); + // } else if (option.equals("classList")) { + // classList = (Set<String>) entry.getValue(); } else if (option.equals("predefinedEndpoint")) { predefinedEndpoint = (Integer) entry.getValue(); } else if (option.equals("predefinedFilter")) { @@ -221,26 +241,26 @@ getAllSuperClasses = (Boolean) entry.getValue(); } else if (option.equals("learnDomain")) { learnDomain = (Boolean) entry.getValue(); - }else if (option.equals("learnRange")) { + } else if (option.equals("learnRange")) { learnRange = (Boolean) entry.getValue(); } else if (option.equals("role")) { role = (String) entry.getValue(); } else if (option.equals("blankNodeIdentifier")) { blankNodeIdentifier = (String) entry.getValue(); } else if (option.equals("example")) { - //System.out.println(entry.getValue()); - }else if (option.equals("replacePredicate")) { - replacePredicate = (LinkedList)entry.getValue(); - }else if (option.equals("replaceObject")) { - replaceObject = (LinkedList)entry.getValue(); - }else if (option.equals("breakSuperClassRetrievalAfter")) { + // System.out.println(entry.getValue()); + } else if (option.equals("replacePredicate")) { + replacePredicate = (LinkedList) entry.getValue(); + } else if (option.equals("replaceObject")) { + replaceObject = (LinkedList) entry.getValue(); + } else if (option.equals("breakSuperClassRetrievalAfter")) { breakSuperClassRetrievalAfter = (Integer) entry.getValue(); - }else if (option.equals("numberOfInstancesUsedForRoleLearning")) { + } else if (option.equals("numberOfInstancesUsedForRoleLearning")) { numberOfInstancesUsedForRoleLearning = (Integer) entry.getValue(); - }else if (option.equals("closeAfterRecursion")) { + } else if (option.equals("closeAfterRecursion")) { closeAfterRecursion = (Boolean) entry.getValue(); - } - + } + } /* @@ -259,92 +279,99 @@ Manager m = new Manager(); SparqlQueryType sqt = null; // get Options for Manipulator - Manipulator man = new Manipulator(blankNodeIdentifier,breakSuperClassRetrievalAfter,replacePredicate,replaceObject); + Manipulator man = new Manipulator(blankNodeIdentifier, + breakSuperClassRetrievalAfter, replacePredicate, replaceObject); HashMap<String, String> parameters = new HashMap<String, String>(); parameters.put("default-graph-uri", "http://dbpedia.org"); parameters.put("format", "application/sparql-results.xml"); // get Options for endpoints if (predefinedEndpoint >= 1) { - sse = PredefinedEndpoint.getEndpoint(predefinedEndpoint); + sse = SparqlEndpoint.getEndpointByNumber(predefinedEndpoint); } else { - sse = new SparqlEndpoint(url, host, parameters); + // TODO this is not optimal, because not all options are used + sse = new SparqlEndpoint(url); } // get Options for Filters - + if (predefinedFilter >= 1) { - sqt = PredefinedFilter.getFilter(predefinedFilter); + sqt = SparqlQueryType.getFilter(predefinedFilter); } else { sqt = new SparqlQueryType("forbid", objList, predList, useLits + ""); - + } // give everything to the manager - m.useConfiguration(sqt, sse, man, recursionDepth, getAllSuperClasses,closeAfterRecursion); + m.useConfiguration(sqt, sse, man, recursionDepth, getAllSuperClasses, + closeAfterRecursion); try { String ont = ""; - //System.out.println(learnDomain); + // System.out.println(learnDomain); // used to learn a domain of a role if (learnDomain || learnRange) { - Set<String> pos=new HashSet<String>(); - Set<String> neg=new HashSet<String>(); - if(learnDomain){ + Set<String> pos = new HashSet<String>(); + Set<String> neg = new HashSet<String>(); + if (learnDomain) { pos = m.getDomainInstancesForRole(role); neg = m.getRangeInstancesForRole(role); - }else if(learnRange){ + } else if (learnRange) { neg = m.getDomainInstancesForRole(role); pos = m.getRangeInstancesForRole(role); } - //choose 30 - - - Set<String> tmp=new HashSet<String>(); - for(String one:pos){ - tmp.add(one); - if(tmp.size()>=numberOfInstancesUsedForRoleLearning)break; - } - pos=tmp; - System.out.println("Instances used: "+pos.size()); - - tmp=new HashSet<String>(); - for(String one:neg){ - tmp.add(one); - if(tmp.size()>=numberOfInstancesUsedForRoleLearning)break; - } - neg=tmp; - - instances=new HashSet<String>(); - instances.addAll(pos); - - instances.addAll(neg); - - for(String one:pos){ - System.out.println("+\""+one+"\""); - } - for(String one:neg){ - System.out.println("-\""+one+"\""); - } - - /*Random r= new Random(); - - - Object[] arr=instances.toArray(); - while(instances.size()>=30){ - - }*/ + // choose 30 + + Set<String> tmp = new HashSet<String>(); + for (String one : pos) { + tmp.add(one); + if (tmp.size() >= numberOfInstancesUsedForRoleLearning) + break; + } + pos = tmp; + System.out.println("Instances used: " + pos.size()); + + tmp = new HashSet<String>(); + for (String one : neg) { + tmp.add(one); + if (tmp.size() >= numberOfInstancesUsedForRoleLearning) + break; + } + neg = tmp; + + instances = new HashSet<String>(); + instances.addAll(pos); + + instances.addAll(neg); + + for (String one : pos) { + System.out.println("+\"" + one + "\""); + } + for (String one : neg) { + System.out.println("-\"" + one + "\""); + } + + /* + * Random r= new Random(); + * + * + * Object[] arr=instances.toArray(); + * while(instances.size()>=30){ + * } + */ // add the role to the filter(a solution is always EXISTS // role.TOP) m.addPredicateFilter(role); - //System.out.println(instances); - // THIS is a workaround - + // System.out.println(instances); + // THIS is a workaround + } // the actual extraction is started here ont = m.extract(instances); - System.out.println("Number of cached SPARQL queries: "+m.getConfiguration().numberOfCachedSparqlQueries); - System.out.println("Number of uncached SPARQL queries: "+m.getConfiguration().numberOfUncachedSparqlQueries); - + System.out.println("Number of cached SPARQL queries: " + + m.getConfiguration().numberOfCachedSparqlQueries); + System.out.println("Number of uncached SPARQL queries: " + + m.getConfiguration().numberOfUncachedSparqlQueries); + System.out.println("Finished collecting Fragment"); if (dumpToFile) { @@ -354,7 +381,8 @@ if (!new File(basedir).exists()) new File(basedir).mkdir(); - FileWriter fw = new FileWriter(new File(basedir + filename), true); + FileWriter fw = new FileWriter( + new File(basedir + filename), true); fw.write(ont); fw.flush(); fw.close(); @@ -366,8 +394,8 @@ } if (format.equals("KB")) { try { - //kb = KBParser.parseKBFile(new StringReader(ont)); - kb=KBParser.parseKBFile(dumpFile); + // kb = KBParser.parseKBFile(new StringReader(ont)); + kb = KBParser.parseKBFile(dumpFile); } catch (Exception e) { e.printStackTrace(); } @@ -386,7 +414,8 @@ @Override public String toDIG(URI kbURI) { if (format.equals("N-TRIPLES")) - return JenaOWLDIGConverter.getTellsString(dumpFile, OntologyFormat.N_TRIPLES, kbURI); + return JenaOWLDIGConverter.getTellsString(dumpFile, + OntologyFormat.N_TRIPLES, kbURI); else return DIGConverter.getDIGString(kb, kbURI).toString(); } @@ -398,7 +427,8 @@ * org.dllearner.core.OntologyFormat) */ @Override - public void export(File file, OntologyFormat format) throws OntologyFormatUnsupportedException { + public void export(File file, OntologyFormat format) + throws OntologyFormatUnsupportedException { // currently no export functions implemented, so we just throw an // exception throw new OntologyFormatUnsupportedException("export", format); @@ -412,9 +442,9 @@ return ontArray; } - /** * TODO SparqlOntologyCollector needs to be removed + * * @param label * @param limit */ @@ -432,6 +462,7 @@ /** * TODO SparqlOntologyCollector needs to be removed + * * @param subject */ public void calculateTriples(String subject) { @@ -448,6 +479,7 @@ /** * TODO SparqlOntologyCollector needs to be removed + * * @param concept */ public void calculateConceptSubjects(String concept) { @@ -521,8 +553,8 @@ public String[] getConceptSubjects() { return conceptSubjects; } - + public SparqlQuery sparqlQuery(String query) { - return new SparqlQuery(sse, query); + return new SparqlQuery(query, sse); } } Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/Test.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Test.java 2008-01-18 18:49:49 UTC (rev 394) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Test.java 2008-01-18 23:21:56 UTC (rev 395) @@ -1,56 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql; - -import java.io.File; -import java.io.FileWriter; -import java.net.URI; - -/** - * Test class. - * - * @author Sebastian Hellmann - * - */ -public class Test { - - public static void main(String[] args) { - System.out.println("Start"); - // String test2 = "http://www.extraction.org/config#dbpediatest"; - // String test = "http://www.extraction.org/config#localjoseki"; - try { - // URI u = new URI(test); - Manager m = new Manager(); - // m.usePredefinedConfiguration(u); - - URI u2 = new URI("http://dbpedia.org/resource/Angela_Merkel"); - - String filename = System.currentTimeMillis() + ".nt"; - FileWriter fw = new FileWriter(new File(filename), true); - fw.write(m.extract(u2)); - fw.flush(); - fw.close(); - - } catch (Exception e) { - e.printStackTrace(); - } - } - -} Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2008-01-18 18:49:49 UTC (rev 394) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2008-01-18 23:21:56 UTC (rev 395) @@ -43,15 +43,15 @@ * Can execute different queries. * * @author Sebastian Hellmann - * + * */ -public class TypedSparqlQuery implements TypedSparqlQueryInterface{ - boolean print_flag=false; - boolean debug_no_cache=false;// true means no cahce is used +public class TypedSparqlQuery implements TypedSparqlQueryInterface { + boolean print_flag = false; + boolean debug_no_cache = false;// true means no cahce is used private Configuration configuration; // private SparqlHTTPRequest SparqlHTTPRequest; private SparqlQueryMaker sparqlQueryMaker; - private SparqlQuery sparqlQuery; + // private SparqlQuery sparqlQuery; private CachedSparqlQuery cachedSparqlQuery; Cache cache; @@ -59,16 +59,20 @@ this.configuration = Configuration; // this.SparqlHTTPRequest = new // SparqlHTTPRequest(Configuration.getSparqlEndpoint()); - this.sparqlQueryMaker = new SparqlQueryMaker(Configuration.getSparqlQueryType()); - this.sparqlQuery=new SparqlQuery(configuration.getSparqlEndpoint()); + this.sparqlQueryMaker = new SparqlQueryMaker(Configuration + .getSparqlQueryType()); + // this.sparqlQuery=new SparqlQuery(configuration.getSparqlEndpoint()); this.cache = new Cache("cache"); - this.cachedSparqlQuery=new CachedSparqlQuery(this.sparqlQuery,this.cache); + // this.cachedSparqlQuery=new + // CachedSparqlQuery(this.sparqlQuery,this.cache); } + // standard query get a tupels (p,o) for subject s public Set<StringTuple> query(URI u) { // getQuery - String sparql = sparqlQueryMaker.makeSubjectQueryUsingFilters(u.toString()); + String sparql = sparqlQueryMaker.makeSubjectQueryUsingFilters(u + .toString()); return cachedSparql(u, sparql, "predicate", "object"); } @@ -77,17 +81,20 @@ public Set<StringTuple> getTupelsForRole(URI u) { // getQuery - String sparql = sparqlQueryMaker.makeRoleQueryUsingFilters(u.toString()); + String sparql = sparqlQueryMaker + .makeRoleQueryUsingFilters(u.toString()); Set<StringTuple> s = cachedSparql(u, sparql, "subject", "object"); // System.out.println(s); return s; } - public Set<StringTuple> getTupelsForRole(URI u,boolean domain) { + public Set<StringTuple> getTupelsForRole(URI u, boolean domain) { + // getQuery - String sparql = sparqlQueryMaker.makeRoleQueryUsingFilters(u.toString(),domain); + String sparql = sparqlQueryMaker.makeRoleQueryUsingFilters( + u.toString(), domain); Set<StringTuple> s = cachedSparql(u, sparql, "subject", "object"); // System.out.println(s); @@ -95,106 +102,135 @@ } - - // uses a cache - private Set<StringTuple> cachedSparql(URI u, String sparql, String a, String b) { + // uses a cache + private Set<StringTuple> cachedSparql(URI u, String sparql, String a, + String b) { // check cache - String xml=this.cachedSparqlQuery.getAsXMLString(u, sparql); + String FromCache = cache.get(u.toString(), sparql); + if (debug_no_cache) { + FromCache = null; + } + String xml = null; + // if not in cache get it from EndPoint + if (FromCache == null) { + configuration.increaseNumberOfuncachedSparqlQueries(); + // try { + xml = sendAndReceiveSPARQL(sparql); + /* + * } catch (IOException e) {e.printStackTrace();} + */ + p(sparql); + // System.out.println(xml); + if (!debug_no_cache) { + cache.put(u.toString(), sparql, xml); + } + // System.out.print("\n"); + } else { + configuration.increaseNumberOfCachedSparqlQueries(); + xml = FromCache; + // System.out.println("FROM CACHE"); + } // System.out.println(sparql); // System.out.println(xml); // process XML Set<StringTuple> s = processResult(xml, a, b); try { - //System.out.println("retrieved " + s.size() + " tupels\n"); + // System.out.println("retrieved " + s.size() + " tupels\n"); } catch (Exception e) { } return s; + } public Set<StringTuple> processResult(String xml, String a, String b) { Set<StringTuple> ret = new HashSet<StringTuple>(); // TODO if result is empty, catch exceptions - String resEnd="</result>"; + String resEnd = "</result>"; String one = "binding name=\"" + a + "\""; String two = "binding name=\"" + b + "\""; - String endbinding= "binding"; - String uri="uri"; - //String uridel = "<uri>"; + String endbinding = "binding"; + String uri = "uri"; + // String uridel = "<uri>"; String bnode = "<bnode>"; - //String uriend = "</uri>"; + // String uriend = "</uri>"; String predtmp = ""; String objtmp = ""; - //System.out.println(getNextResult(xml)); - String nextResult=""; - while ((nextResult=getNextResult( xml))!=null){ - //System.out.println(xml.indexOf(resEnd)); - //System.out.println(xml); - if(nextResult.indexOf(bnode)!=-1) - {xml=xml.substring(xml.indexOf(resEnd)+resEnd.length());continue;} + // System.out.println(getNextResult(xml)); + String nextResult = ""; + while ((nextResult = getNextResult(xml)) != null) { + // System.out.println(xml.indexOf(resEnd)); + // System.out.println(xml); + if (nextResult.indexOf(bnode) != -1) { + xml = xml.substring(xml.indexOf(resEnd) + resEnd.length()); + continue; + } // get pred - //predtmp = nextResult.substring(nextResult.indexOf(one) + one.length()); - predtmp=getinTag(nextResult, one,endbinding); - predtmp=getinTag(predtmp, uri,uri); - //System.out.println(predtmp); - + // predtmp = nextResult.substring(nextResult.indexOf(one) + + // one.length()); + predtmp = getinTag(nextResult, one, endbinding); + predtmp = getinTag(predtmp, uri, uri); + // System.out.println(predtmp); + // getobj - objtmp=getinTag(nextResult, two,endbinding); - objtmp=getinTag(objtmp, uri,uri); - //System.out.println(objtmp); - - StringTuple st=new StringTuple(predtmp, objtmp); - //System.out.println(st); + objtmp = getinTag(nextResult, two, endbinding); + objtmp = getinTag(objtmp, uri, uri); + // System.out.println(objtmp); + + StringTuple st = new StringTuple(predtmp, objtmp); + // System.out.println(st); ret.add(st); - xml=xml.substring(xml.indexOf(resEnd)+resEnd.length()); - + xml = xml.substring(xml.indexOf(resEnd) + resEnd.length()); + } - /*while (xml.indexOf(one) != -1) { - - + /* + * while (xml.indexOf(one) != -1) { + * + * + * + * // System.out.println(new Tupel(predtmp,objtmp)); } + */ - - // System.out.println(new Tupel(predtmp,objtmp)); - }*/ - return ret; } - - private String getNextResult(String xml){ - String res1="<result>"; - String res2="</result>"; - if(xml.indexOf(res1)==-1)return null; + + private String getNextResult(String xml) { + String res1 = "<result>"; + String res2 = "</result>"; + if (xml.indexOf(res1) == -1) + return null; xml = xml.substring(xml.indexOf(res1) + res1.length()); - xml = xml.substring(0,xml.indexOf(res2) ); - //System.out.println(xml); + xml = xml.substring(0, xml.indexOf(res2)); + // System.out.println(xml); return xml; } - private String getinTag(String xml, String starttag, String endtag){ - String res1="<"+starttag+">"; - //System.out.println(res1); - String res2="</"+endtag+">"; - if(xml.indexOf(res1)==-1)return null; + + private String getinTag(String xml, String starttag, String endtag) { + String res1 = "<" + starttag + ">"; + // System.out.println(res1); + String res2 = "</" + endtag + ">"; + if (xml.indexOf(res1) == -1) + return null; xml = xml.substring(xml.indexOf(res1) + res1.length()); - //System.out.println(xml); - xml = xml.substring(0,xml.indexOf(res2) ); - //System.out.println(xml); - + // System.out.println(xml); + xml = xml.substring(0, xml.indexOf(res2)); + // System.out.println(xml); + return xml; } - public String sendAndReceiveSPARQL(String queryString){ - //SparqlQuery sq=new SparqlQuery(configuration.getSparqlEndpoint()); - return sparqlQuery.getAsXMLString(queryString); + public String sendAndReceiveSPARQL(String queryString) { + // SparqlQuery sq=new SparqlQuery(configuration.getSparqlEndpoint()); + return new SparqlQuery(queryString, configuration.getSparqlEndpoint()) + .getAsXMLString(); } - - public void p(String str){ - if(print_flag){ + + public void p(String str) { + if (print_flag) { System.out.println(str); } } - - } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java 2008-01-18 18:49:49 UTC (rev 394) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java 2008-01-18 23:21:56 UTC (rev 395) @@ -19,88 +19,76 @@ */ package org.dllearner.kb.sparql; -import java.io.BufferedReader; import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.io.OutputStreamWriter; -import java.net.HttpURLConnection; import java.net.URI; -import java.net.URLEncoder; import java.util.HashSet; -import java.util.Iterator; +import java.util.LinkedHashSet; import java.util.Set; import org.dllearner.kb.sparql.configuration.Configuration; -import org.dllearner.kb.sparql.configuration.SparqlEndpoint; import org.dllearner.kb.sparql.query.Cache; +import org.dllearner.kb.sparql.query.SparqlQuery; import org.dllearner.utilities.StringTuple; /** * Can execute different queries. * * @author Sebastian Hellmann - * + * */ -public class TypedSparqlQueryClasses implements TypedSparqlQueryInterface{ - boolean print_flag=false; - boolean debug_no_cache=false; +public class TypedSparqlQueryClasses implements TypedSparqlQueryInterface { + boolean print_flag = false; + boolean debug_no_cache = false; private Configuration configuration; // private SparqlHTTPRequest SparqlHTTPRequest; - //private SparqlQueryMaker sparqlQueryMaker; + // private SparqlQueryMaker sparqlQueryMaker; Cache cache; - public TypedSparqlQueryClasses(Configuration configuration) { this.configuration = configuration; this.cache = new Cache("cache"); } - + // standard query get a tupels (p,o) for subject s public Set<StringTuple> query(URI u) { // getQuery - String sparql = "SELECT ?predicate ?object " + - "WHERE {" + - "<"+u.toString()+"> ?predicate ?object;" + - "a ?object . " + - " FILTER (!regex(str(?object),'http://xmlns.com/foaf/0.1/'))"+ - "}"; - + String sparql = "SELECT ?predicate ?object " + "WHERE {" + "<" + + u.toString() + "> ?predicate ?object;" + "a ?object . " + + " FILTER (!regex(str(?object),'http://xmlns.com/foaf/0.1/'))" + + "}"; + return cachedSparql(u, sparql, "predicate", "object"); } - - - // uses a cache - private Set<StringTuple> cachedSparql(URI u, String sparql, String a, String b) { + // uses a cache + private Set<StringTuple> cachedSparql(URI u, String sparql, String a, + String b) { // check cache String FromCache = cache.get(u.toString(), sparql); - if(debug_no_cache) { - FromCache=null; - } + if (debug_no_cache) { + FromCache = null; + } String xml = null; // if not in cache get it from EndPoint if (FromCache == null) { configuration.increaseNumberOfuncachedSparqlQueries(); - try { - xml = sendAndReceiveSPARQL(sparql); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } + // try { + xml = sendAndReceiveSPARQL(sparql); + /* + * } catch (IOException e) {e.printStackTrace();} + */ p(sparql); // System.out.println(xml); - if(!debug_no_cache) { + if (!debug_no_cache) { cache.put(u.toString(), sparql, xml); } - //System.out.print("\n"); + // System.out.print("\n"); } else { configuration.increaseNumberOfCachedSparqlQueries(); xml = FromCache; - //System.out.println("FROM CACHE"); + // System.out.println("FROM CACHE"); } // System.out.println(sparql); @@ -108,7 +96,7 @@ // process XML Set<StringTuple> s = processResult(xml, a, b); try { - //System.out.println("retrieved " + s.size() + " tupels\n"); + // System.out.println("retrieved " + s.size() + " tupels\n"); } catch (Exception e) { } return s; @@ -118,136 +106,89 @@ Set<StringTuple> ret = new HashSet<StringTuple>(); // TODO if result is empty, catch exceptions - String resEnd="</result>"; + String resEnd = "</result>"; String one = "binding name=\"" + a + "\""; String two = "binding name=\"" + b + "\""; - String endbinding= "binding"; - String uri="uri"; - //String uridel = "<uri>"; + String endbinding = "binding"; + String uri = "uri"; + // String uridel = "<uri>"; String bnode = "<bnode>"; - //String uriend = "</uri>"; + // String uriend = "</uri>"; String predtmp = ""; String objtmp = ""; - //System.out.println(getNextResult(xml)); - String nextResult=""; - while ((nextResult=getNextResult( xml))!=null){ - //System.out.println(xml.indexOf(resEnd)); - //System.out.println(xml); - if(nextResult.indexOf(bnode)!=-1) - {xml=xml.substring(xml.indexOf(resEnd)+resEnd.length());continue;} + // System.out.println(getNextResult(xml)); + String nextResult = ""; + while ((nextResult = getNextResult(xml)) != null) { + // System.out.println(xml.indexOf(resEnd)); + // System.out.println(xml); + if (nextResult.indexOf(bnode) != -1) { + xml = xml.substring(xml.indexOf(resEnd) + resEnd.length()); + continue; + } // get pred - //predtmp = nextResult.substring(nextResult.indexOf(one) + one.length()); - predtmp=getinTag(nextResult, one,endbinding); - predtmp=getinTag(predtmp, uri,uri); - //System.out.println(predtmp); - + // predtmp = nextResult.substring(nextResult.indexOf(one) + + // one.length()); + predtmp = getinTag(nextResult, one, endbinding); + predtmp = getinTag(predtmp, uri, uri); + // System.out.println(predtmp); + // getobj - objtmp=getinTag(nextResult, two,endbinding); - objtmp=getinTag(objtmp, uri,uri); - //System.out.println(objtmp); - - StringTuple st=new StringTuple(predtmp, objtmp); - //System.out.println(st); + objtmp = getinTag(nextResult, two, endbinding); + objtmp = getinTag(objtmp, uri, uri); + // System.out.println(objtmp); + + StringTuple st = new StringTuple(predtmp, objtmp); + // System.out.println(st); ret.add(st); - xml=xml.substring(xml.indexOf(resEnd)+resEnd.length()); - + xml = xml.substring(xml.indexOf(resEnd) + resEnd.length()); + } - /*while (xml.indexOf(one) != -1) { - - + /* + * while (xml.indexOf(one) != -1) { + * + * + * + * // System.out.println(new Tupel(predtmp,objtmp)); } + */ - - // System.out.println(new Tupel(predtmp,objtmp)); - }*/ - return ret; } - - private String getNextResult(String xml){ - String res1="<result>"; - String res2="</result>"; - if(xml.indexOf(res1)==-1)return null; + + private String getNextResult(String xml) { + String res1 = "<result>"; + String res2 = "</result>"; + if (xml.indexOf(res1) == -1) + return null; xml = xml.substring(xml.indexOf(res1) + res1.length()); - xml = xml.substring(0,xml.indexOf(res2) ); - //System.out.println(xml); + xml = xml.substring(0, xml.indexOf(res2)); + // System.out.println(xml); return xml; } - private String getinTag(String xml, String starttag, String endtag){ - String res1="<"+starttag+">"; - //System.out.println(res1); - String res2="</"+endtag+">"; - if(xml.indexOf(res1)==-1)return null; + + private String getinTag(String xml, String starttag, String endtag) { + String res1 = "<" + starttag + ">"; + // System.out.println(res1); + String res2 = "</" + endtag + ">"; + if (xml.indexOf(res1) == -1) + return null; xml = xml.substring(xml.indexOf(res1) + res1.length()); - //System.out.println(xml); - xml = xml.substring(0,xml.indexOf(res2) ); - //System.out.println(xml); - + // System.out.println(xml); + xml = xml.substring(0, xml.indexOf(res2)); + // System.out.println(xml); + return xml; } - private String sendAndReceiveSPARQL(String sparql) throws IOException { - p("sendAndReceiveSPARQL"); - StringBuilder answer = new StringBuilder(); - //sparql="SELECT * WHERE {?a ?b ?c}LIMIT 10"; + private String sendAndReceiveSPARQL(String sparql) { + LinkedHashSet l = new LinkedHashSet<String>(); - // String an Sparql-Endpoint schicken - HttpURLConnection connection; - SparqlEndpoint se = configuration.getSparqlEndpoint(); - p("URL: "+se.getURL()); - p("Host: "+se.getHost()); - - connection = (HttpURLConnection) se.getURL().openConnection(); - connection.setDoOutput(true); - - //connection.addRequestProperty("Host", se.getHost()); - connection.addRequestProperty("Connection", "close"); - connection - .addRequestProperty( - "Accept", - "text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"); - connection.addRequestProperty("Accept-Language", "de-de,de;q=0.8,en-us;q=0.5,en;q=0.3"); - connection.addRequestProperty("Accept-Charset", "utf-8;q=1.0"); - connection - .addRequestProperty( - "User-Agent", - "Mozilla/5.0 (Windows; U; Windows NT 5.1; de; rv:1.8.1.4) Gecko/20070515 Firefox/2.0.0.4 Web-Sniffer/1.0.24"); - - OutputStream os = connection.getOutputStream(); - OutputStreamWriter osw = new OutputStreamWriter(os); - - Set<String> s = se.getParameters().keySet(); - Iterator<String> it = s.iterator(); - String FullURI = ""; - while (it.hasNext()) { - String element = it.next(); - FullURI += "" + URLEncoder.encode(element, "UTF-8") + "=" - + URLEncoder.encode(se.getParameters().get(element), "UTF-8") + "&"; - } - - FullURI += "" + se.getHasQueryParameter() + "=" + URLEncoder.encode(sparql, "UTF-8"); - p(FullURI); - osw.write(FullURI); - osw.close(); - - // receive answer - InputStream is = connection.getInputStream(); - InputStreamReader isr = new InputStreamReader(is, "UTF-8"); - BufferedReader br = new BufferedReader(isr); - - String line; - do { - line = br.readLine(); - if (line != null) - answer.append(line); - } while (line != null); - - br.close(); - p(answer.toString()); - return answer.toString(); + return new SparqlQuery(sparql, configuration.getSparqlEndpoint()) + .getAsXMLString(); } - public void p(String str){ - if(print_flag){ + + public void p(String str) { + if (print_flag) { System.out.println(str); } } Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/PredefinedEndpoint.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/PredefinedEndpoint.java 2008-01-18 18:49:49 UTC (rev 394) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/PredefinedEndpoint.java 2008-01-18 23:21:56 UTC (rev 395) @@ -1,168 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql.configuration; - -import java.net.URL; -import java.util.HashMap; - - -/** - * Holds some predefined endpoints. - * - * @author Sebastian Hellmann - * - */ -public class PredefinedEndpoint { - public static SparqlEndpoint getEndpoint(int i) { - - switch (i) { - case 1: - return dbpediaEndpoint(); - case 2: - return localJoseki(); - case 3: - return govTrack(); - case 4: - return revyu(); - case 5: - return myopenlink(); - case 6: - return worldFactBook(); - } - return null; - } - - public static SparqlEndpoint dbpediaEndpoint() { - URL u = null; - HashMap<String, String> m = new HashMap<String, String>(); - m.put("default-graph-uri", "http://dbpedia.org"); - m.put("format", "application/sparql-results.xml"); - try { - u = new URL("http://dbpedia.openlinksw.com:8890/sparql"); - } catch (Exception e) { - e.printStackTrace(); - } - return new SparqlEndpoint(u, "dbpedia.openlinksw.com", m); - } - - public static SparqlEndpoint localJoseki() { - URL u = null; - HashMap<String, String> m = new HashMap<String, String>(); - // m.put("default-graph-uri", "http://dbpedia.org"); - // m.put("format", "application/sparql-results.xml"); - try { - u = new URL("http://localhost:2020/books"); - } catch (Exception e) { - e.printStackTrace(); - } - return new SparqlEndpoint(u, "localhost", m); - } - public static SparqlEndpoint worldFactBook() { - URL u = null; - HashMap<String, String> m = new HashMap<String, String>(); - // m.put("default-graph-uri", "http://dbpedia.org"); - // m.put("format", "application/sparql-results.xml"); - try { - u = new URL("http://www4.wiwiss.fu-berlin.de/factbook/sparql"); - } catch (Exception e) { - e.printStackTrace(); - } - return new SparqlEndpoint(u, "www4.wiwiss.fu-berlin.de", m); - } - - /* - * it only has 4 classes - public static SpecificSparqlEndpoint dblp() { - URL u = null; - HashMap<String, String> m = new HashMap<String, String>(); - // m.put("default-graph-uri", "http://dbpedia.org"); - // m.put("format", "application/sparql-results.xml"); - try { - u = new URL("http://www4.wiwiss.fu-berlin.de/dblp/sparql"); - } catch (Exception e) { - e.printStackTrace(); - } - return new SpecificSparqlEndpoint(u, "www4.wiwiss.fu-berlin.de", m); - } - */ - public static SparqlEndpoint govTrack() { - URL u = null; - HashMap<String, String> m = new HashMap<String, String>(); - // m.put("default-graph-uri", "http://dbpedia.org"); - // m.put("format", "application/sparql-results.xml"); - try { - u = new URL("http://www.rdfabout.com/sparql"); - } catch (Exception e) { - e.printStackTrace(); - } - return new SparqlEndpoint(u, "www.rdfabout.com", m); - } - public static SparqlEndpoint revyu() { - URL u = null; - HashMap<String, String> m = new HashMap<String, String>(); - // m.put("default-graph-uri", "http://dbpedia.org"); - // m.put("format", "application/sparql-results.xml"); - //http://revyu.com/sparql?query=SELECT DISTINCT * WHERE {[] a ?c} - try { - u = new URL("http://revyu.com/sparql"); - } catch (Exception e) { - e.printStackTrace(); - } - return new SparqlEndpoint(u, "revyu.com", m); - } - - // returns strange xml - /*public static SpecificSparqlEndpoint dbtune() { - URL u = null; - HashMap<String, String> m = new HashMap<String, String>(); - // m.put("default-graph-uri", "http://dbpedia.org"); - // m.put("format", "application/sparql-results.xml"); - //http://dbtune.org:2020/sparql/?query=SELECT DISTINCT * WHERE {[] a ?c}Limit 10 - http://dbtune.org:2020/evaluateQuery?repository=default&serialization=rdfxml&queryLanguage=SPARQL&query=SELECT+DISTINCT+*+WHERE+%7B%5B%5D+a+%3Fc%7D - &resultFormat=xml - &resourceFormat=ns&entailment=none - http://dbtune.org:2020/evaluateQuery - ?repository=default&serialization=rdfxml&queryLanguage=SPARQL - &query=SELECT+DISTINCT+*+WHERE+%7B%5B%5D+a+%3Fc%7D - &resultFormat=xml - &resourceFormat=ns&entailment=none - try { - u = new URL("http://dbtune.org:2020/sparql/"); - } catch (Exception e) { - e.printStackTrace(); - } - return new SpecificSparqlEndpoint(u, "dbtune.org", m); - }*/ - - public static SparqlEndpoint myopenlink() { - URL u = null; - HashMap<String, String> m = new HashMap<String, String>(); - m.put("default-graph-uri", "http://myopenlink.net/dataspace"); - m.put("format", "application/sparql-results.xml"); - //http://myopenlink.net:8890/sparql/?query=select+distinct+%3FConcept+where+%7B%5B%5D+a+%3FConcept%7D - try { - u = new URL("http://myopenlink.net:8890/sparql/"); - } catch (Exception e) { - e.printStackTrace(); - } - return new SparqlEndpoint(u, "myopenlink.net", m); - } - -} Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/PredefinedFilter.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/PredefinedFilter.java 2008-01-18 18:49:49 UTC (rev 394) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/PredefinedFilter.java 2008-01-18 23:21:56 UTC (rev 395) @@ -1,171 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql.configuration; - -import java.util.HashSet; -import java.util.Set; - - -/** - * Predefined filters. - * - * @author Sebastian Hellmann - * - */ -public class PredefinedFilter { - - - public static SparqlQueryType getFilter(int i) { - - switch (i) { - case 1: - return YagoFilter(); - case 2: - return SKOS(); - case 3: - return YAGOSKOS(); - case 4: - return YagoSpecialHierarchy(); - } - return null; - } - - - public static SparqlQueryType YagoFilter(){ - Set<String> pred = new HashSet<String>(); - pred.add("http://www.w3.org/2004/02/skos/core"); - pred.add("http://www.w3.org/2002/07/owl#sameAs"); - pred.add("http://xmlns.com/foaf/0.1/"); - - pred.add("http://dbpedia.org/property/reference"); - pred.add("http://dbpedia.org/property/website"); - pred.add("http://dbpedia.org/property/wikipage"); - pred.add("http://dbpedia.org/property/wikiPageUsesTemplate"); - pred.add("http://dbpedia.org/property/relatedInstance"); - - Set<String> obj = new HashSet<String>(); - //obj.add("http://dbpedia.org/resource/Category:Wikipedia_"); - //obj.add("http://dbpedia.org/resource/Category:Articles_"); - obj.add("http://dbpedia.org/resource/Category:"); - obj.add("http://dbpedia.org/resource/Template"); - obj.add("http://xmlns.com/foaf/0.1/"); - obj.add("http://upload.wikimedia.org/wikipedia/commons"); - obj.add("http://upload.wikimedia.org/wikipedia"); - obj.add("http://www.geonames.org"); - obj.add("http://www.w3.org/2006/03/wn/wn20/instances/synset"); - obj.add("http://www4.wiwiss.fu-berlin.de/flickrwrappr"); - obj.add("http://www.w3.org/2004/02/skos/core"); - - return new SparqlQueryType("forbid", obj, pred, "false"); - } - public static SparqlQueryType YagoSpecialHierarchy(){ - Set<String> pred = new HashSet<String>(); - pred.add("http://www.w3.org/2004/02/skos/core"); - pred.add("http://www.w3.org/2002/07/owl#sameAs"); - pred.add("http://xmlns.com/foaf/0.1/"); - - pred.add("http://dbpedia.org/property/reference"); - pred.add("http://dbpedia.org/property/website"); - pred.add("http://dbpedia.org/property/wikipage"); - pred.add("http://dbpedia.org/property/wikiPageUsesTemplate"); - pred.add("http://dbpedia.org/property/relatedInstance"); - pred.add("http://dbpedia.org/property/monarch"); - - - Set<String> obj = new HashSet<String>(); - obj.add("http://dbpedia.org/resource/Category:Wikipedia_"); - obj.add("http://dbpedia.org/resource/Category:Articles_"); - obj.add("http://dbpedia.org/resource/Template"); - obj.add("http://xmlns.com/foaf/0.1/"); - obj.add("http://upload.wikimedia.org/wikipedia/commons"); - obj.add("http://upload.wikimedia.org/wikipedia"); - obj.add("http://www.geonames.org"); - obj.add("http://www.w3.org/2006/03/wn/wn20/instances/synset"); - obj.add("http://www4.wiwiss.fu-berlin.de/flickrwrappr"); - obj.add("http://www.w3.org/2004/02/skos/core"); - - return new SparqlQueryType("forbid", obj, pred, "false"); - } - - - public static SparqlQueryType SKOS(){ - Set<String> pred = new HashSet<String>(); - //pred.add("http://www.w3.org/2004/02/skos/core"); - pred.add("http://www.w3.org/2002/07/owl#sameAs"); - pred.add("http://xmlns.com/foaf/0.1/"); - - pred.add("http://dbpedia.org/property/reference"); - pred.add("http://dbpedia.org/property/website"); - pred.add("http://dbpedia.org/property/wikipage"); - pred.add("http://www.w3.org/2004/02/skos/core#narrower"); - pred.add("http://dbpedia.org/property/wikiPageUsesTemplate"); - - Set<String> obj = new HashSet<String>(); - //obj.add("http://dbpedia.org/resource/Category:Wikipedia_"); - //obj.add("http://dbpedia.org/resource/Category:Articles_"); - obj.add("http://xmlns.com/foaf/0.1/"); - obj.add("http://upload.wikimedia.org/wikipedia/commons"); - obj.add("http://upload.wikimedia.org/wikipedia"); - - obj.add("http://www.geonames.org"); - obj.add("http://www.w3.org/2006/03/wn/wn20/instances/synset"); - obj.add("http://www4.wiwiss.fu-berlin.de/flickrwrappr"); - - - obj.add("http://dbpedia.org/class/yago"); - obj.add("http://dbpedia.org/resource/Template"); - - - return new SparqlQueryType("forbid", obj, pred, "false"); - } - public static SparqlQueryType YAGOSKOS(){ - Set<String> pred = new HashSet<String>(); - //pred.add("http://www.w3.org/2004/02/skos/core"); - pred.add("http://www.w3.org/2002/07/owl#sameAs"); - pred.add("http://xmlns.com/foaf/0.1/"); - - pred.add("http://dbpedia.org/property/reference"); - pred.add("http://dbpedia.org/property/website"); - pred.add("http://dbpedia.org/property/wikipage"); - //pred.add("http://www.w3.org/2004/02/skos/core#narrower"); - pred.add("http://dbpedia.org/property/wikiPageUsesTemplate"); - - Set<String> obj = new HashSet<String>(); - //obj.add("http://dbpedia.org/resource/Category:Wikipedia_"); - //obj.add("http://dbpedia.org/resource/Category:Articles_"); - obj.add("http://xmlns.com/foaf/0.1/"); - obj.add("http://upload.wikimedia.org/wikipedia/commons"); - obj.add("http://upload.wikimedia.org/wikipedia"); - - obj.add("http://www.geonames.org"); - obj.add("http://www.w3.org/2006/03/wn/wn20/instances/synset"); - obj.add("http://www4.wiwiss.fu-berlin.de/flickrwrappr"); - - - //obj.add("http://dbpedia.org/class/yago"); - obj.add("http://dbpedia.org/resource/Template"); - - - return new SparqlQueryType("forbid", obj, pred, "false"); - } - - - -} Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlEndpoint.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlEndpoint.java 2008-01-18 18:49:49 UTC (rev 394) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlEndpoint.java 2008-01-18 23:21:56 UTC (rev 395) @@ -20,55 +20,175 @@ package org.dllearner.kb.sparql.configuration; import java.net.URL; -import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; /** - * One sparql endpoint configuration. + * One sparql endpoint configuration, + * made to comply with Jena * * @author Sebastian Hellmann * */ public class SparqlEndpoint { - - String host; - String hasQueryParameter; URL url; - public HashMap<String, String> parameters = new HashMap<String, String>(); + LinkedList<String> defaultGraphURIs; + LinkedList<String> namedGraphURIs; + //public HashMap<String, String> parameters = new HashMap<String, String>(); - public SparqlEndpoint(URL url, String host, HashMap<String, String> parameters) { - super(); - this.host = host; - this.url = url; - this.hasQueryParameter = "query"; - this.parameters = parameters; + public SparqlEndpoint(URL u) { + this.url = u; + this.defaultGraphURIs=new LinkedList<String>(); + this.namedGraphURIs=new LinkedList<String>(); } + + public SparqlEndpoint(URL u,List<String> defaultGraphURIs,List<String> namedGraphURIs) { + this.url = u; + this.defaultGraphURIs=new LinkedList<String>(); + this.namedGraphURIs=new LinkedList<String>(); + } + - public String getHasQueryParameter() { - return hasQueryParameter; + public URL getURL() { + return this.url; } - public void setHasQueryParameter(String hasQueryParameter) { - this.hasQueryParameter = hasQueryParameter; + public LinkedList<String> getDefaultGraphURIs() { + return defaultGraphURIs; } - public String getHost() { - return host; + public LinkedList<String> getNamedGraphURIs() { + return namedGraphURIs; } + + public static SparqlEndpoint getEndpointByNumber(int i) { - public void setHost(String host) { - this.host = host; + switch (i) { + case 0: + return dbpediaEndpoint(); + case 1: + return localJoseki(); + case 2: + return govTrack(); + case 3: + return revyu(); + case 4: + return myopenlink(); + case 5: + return worldFactBook(); + } + return null; } + + public static SparqlEndpoint dbpediaEndpoint() { + URL u = null; + try { + u = new URL("http://dbpedia.openlinksw.com:8890/sparql"); + } catch (Exception e) { + e.printStackTrace(); + } + LinkedList<String> defaultGraphURIs=new LinkedList<String>(); + defaultGraphURIs.add("http://dbpedia.org"); + return new SparqlEndpoint(u, defaultGraphURIs, new LinkedList<String>()); + } - public HashMap<String, String> getParameters() { - return parameters; + public static SparqlEndpoint localJoseki() { + URL u = null; + try { + u = new URL("http://localhost:2020/books"); + } catch (Exception e) { + e.printStackTrace(); + } + return new SparqlEndpoint(u, new LinkedList<String>(), new LinkedList<String>()); } + + public static SparqlEndpoint worldFactBook() { + URL u = null; + try { + u = new URL("http://www4.wiwiss.fu-berlin.de/factbook/sparql"); + } catch (Exception e) { + e.printStackTrace(); + } + return new SparqlEndpoint(u, new LinkedList<String>(), new LinkedList<String>()); + } + - public void setParameters(HashMap<String, String> parameters) { - this.parameters = parameters; + public static SparqlEndpoint govTrack() { + URL u = null; + try { + u = new URL("http://www.rdfabout.com/sparql"); + } catch (Exception e) { + e.printStackTrace(); + } + return new SparqlEndpoint(u, new LinkedList<String>(), new LinkedList<String>()); } + + public static SparqlEndpoint revyu() { + URL u = null; + try { + u = new URL("http://revyu.com/sparql"); + } catch (Exception e) { + e.printStackTrace(); + } + return new SparqlEndpoint(u, new LinkedList<String>(), new LinkedList<String>()); + } + + public static SparqlEndpoint myopenlink() { + URL u = null; + try { + u = new URL("http://myopenlink.net:8890/sparql/"); + } catch (Exception e) { + e.printStackTrace(); + } + LinkedList<String> defaultGraphURIs=new LinkedList<String>(); + defaultGraphURIs.add("http://myopenlink.net/dataspace"); + return new SparqlEndpoint(u, defaultGraphURIs, new LinkedList<String>()); - public URL getURL() { - return this.url; + } + + + // returns strange xml + /*public static SpecificSparqlEndpoint dbtune() { + URL u = null; + HashMap<String, String> m = new HashMap<String, String>(); + // m.put("default-graph-uri", "http://dbpedia.org"); + // m.put("format", "application/sparql-results.xml"); + //http://dbtune.org:2020/sparql/?query=SELECT DISTINCT * WHERE {[] a ?c}Limit 10 + http://dbtune.org:2020/evaluateQuery?repository=default&serialization=rdfxml&queryLanguage=SPARQL&query=SELECT+DISTINCT+*+WHERE+%7B%5B%5D+a+%3Fc%7D + &resultFormat=xml + &resourceFormat=ns&entailment=none + http://dbtune.org:2020/evaluateQuery + ?repository=default&serialization=rdfxml&queryLanguage=SPARQL + &query=SELECT+DISTINCT+*+WHERE+%7B%5B%5D+a+%3Fc%7D + &resultFormat=xml + &resourceFormat=ns&entailment=none + try { + u = new URL("http://dbtune.org:2020/sparql/"); + } catch (Exception e) { + e.printStackTrace(); + } + return new SpecificSparqlEndpoint(u, "dbtune.org", m); + }*/ + + + + + /* + * it only has 4 classes + public static SpecificSparqlEndpoint dblp() { + URL u = null; + HashMap<String, String> m = new HashMap<String, String>(); + // m.put("default-graph-uri", "http://dbpedia.org"); + // m.put("format", "application/sparql-results.xml"); + try { + u = new URL("http://www4.wiwiss.fu-berlin.de/dblp/sparql"); + } catch (Exception e) { + e.printStackTrace(); + } + return new SpecificSparqlEndpoint(u, "www4.wiwiss.fu-berlin.de", m); } + */ + + } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlQueryType.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlQueryType.java 2008-01-18 18:49:49 UTC (rev 394) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlQueryType.java 2008-01-18 23:21:56 UTC (rev 395) @@ -19,6 +19,7 @@ */ package org.dllearner.kb.sparql.configuration; +import java.util.HashSet; import java.util.Set; /** @@ -97,5 +98,142 @@ //System.out.println("added filter: "+filter); } + + public static SparqlQueryType getFilter(int i) { + switch (i) { + case 1: + return YagoFilter(); + case 2: + return SKOS(); + case 3: + return YAGOSKOS(); + case 4: + return YagoSpecialHierarchy(); + } + return null; + } + + + public static SparqlQueryType YagoFilter(){ + Set<String> pred = new HashSet<String>(); + pred.add("http://www.w3.org/2004/02/skos/core"); + pred.add("http://www.w3.org/2002/07/owl#sameAs"); + pred.add("http://xmlns.com/foaf/0.1/"); + + pred.add("http://dbpedia.org/property/reference"); + pred.add("http://dbpedia.org/property/website"); + pred.add("http://dbpedia.org/property/wikipage"); + pred.add("http://dbpedia.org/property/wikiPageUsesTemplate"); + pred.add("http://dbpedia.org/property/relatedInstance"); + + Set<String> obj = new HashSet<String>(); + //obj.add("http://dbpedia.org/resource/Category:Wikipedia_"); + //obj.add("http://dbpedia.org/resource/Category:Articles_"); + obj.add("http://dbpedia.org/resource/Category:"); + obj.add("http://dbpedia.org/resource/Template"); + obj.add("http://xmlns.com/foaf/0.1/"); + obj.add("http://upload.wikimedia.org/wikipedia/commons"); + obj.add("http://upload.wikimedia.org/wikipedia"); + obj.add("http://www.geonames.org"); + obj.add("http://www.w3.org/2006/03/wn/wn20/instances/synset"); + obj.add("http://www4.wiwiss.fu-berlin.de/flickrwrappr"); + obj.add("http://www.w3.org/2004/02/skos/core"); + + return new SparqlQueryType("forbid", obj, pred, "false"); + } + public static SparqlQueryType YagoSpecialHierarchy(){ + Set<String> pred = new HashSet<String>(); + pred.add("http://www.w3.org/2004/02/skos/core"); + pred.add("http://www.w3.org/2002/07/owl#sameAs"); + pred.add("http://xmlns.com/foaf/0.1/"); + + pred.add("http://dbpedia.org/property/reference"); + pred.add("http://dbpedia.org/property/website"); + pred.add("http://dbpedia.org/property/wikipage"); + pred.add("http://dbpedia.org/property/wikiPageUsesTemplate"); + pred.add("http://dbpedia.org/property/relatedInstance"); + pred.add("http://dbpedia.org/property/monarch"); + + + Set<String> obj = new HashSet<String>(); + obj.add("http://dbpedia.org/resource/Category:Wikipedia_"); + obj.add("http://dbpedia.org/resource/Category:Articles_"); + obj.add("http://dbpedia.org/resource/Template"); + obj.add("http://xmlns.com/foaf/0.1/"); + obj.add("http://up... [truncated message content] |
From: <ku...@us...> - 2008-01-19 01:26:41
|
Revision: 396 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=396&view=rev Author: kurzum Date: 2008-01-18 17:26:40 -0800 (Fri, 18 Jan 2008) Log Message: ----------- more changes Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/Configuration.java trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlEndpoint.java trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlQueryType.java trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/ClassNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/InstanceNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/Node.java trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/PropertyNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/old/SparqlQueryConventional.java trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlOntologyCollector.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/CachedSparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/test/Test.java trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestResultSet.java trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestSparqlQuery.java Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java 2008-01-19 01:26:40 UTC (rev 396) @@ -19,7 +19,6 @@ */ package org.dllearner.kb.sparql; -import java.io.IOException; import java.net.URI; import java.util.HashSet; import java.util.Set; @@ -27,10 +26,9 @@ import java.util.TreeSet; import org.dllearner.kb.sparql.configuration.Configuration; -import org.dllearner.kb.sparql.configuration.SparqlQueryType; import org.dllearner.kb.sparql.configuration.SparqlEndpoint; +import org.dllearner.kb.sparql.configuration.SparqlQueryType; import org.dllearner.kb.sparql.datastructure.Node; -import org.dllearner.kb.sparql.old.oldSparqlOntologyCollector; import org.dllearner.utilities.StringTuple; /** Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java 2008-01-19 01:26:40 UTC (rev 396) @@ -36,23 +36,21 @@ * */ public class Manipulator { - public String subclass = "http://www.w3.org/2000/01/rdf-schema#subClassOf"; - public String type = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type"; + public final String subclass = "http://www.w3.org/2000/01/rdf-schema#subClassOf"; + public final String type = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type"; + final String objectProperty = "http://www.w3.org/2002/07/owl#ObjectProperty"; + final String classns = "http://www.w3.org/2002/07/owl#Class"; + final String thing = "http://www.w3.org/2002/07/owl#Thing"; + + public String blankNodeIdentifier = "bnode"; public int breakSuperClassRetrievalAfter=200; public LinkedList<StringTuple> replacePredicate; public LinkedList<StringTuple> replaceObject; - String objectProperty = "http://www.w3.org/2002/07/owl#ObjectProperty"; - String classns = "http://www.w3.org/2002/07/owl#Class"; - String thing = "http://www.w3.org/2002/07/owl#Thing"; - + Set<String> classproperties; - String[] defaultClasses = { "http://dbpedia.org/class/yago", - "http://dbpedia.org/resource/Category:", "http://dbpedia.org/resource/Template:", - "http://www.w3.org/2004/02/skos/core", "http://dbpedia.org/class/" }; - public Manipulator(String blankNodeIdentifier,int breakSuperClassRetrievalAfter,LinkedList<StringTuple> replacePredicate,LinkedList<StringTuple> replaceObject) { this.blankNodeIdentifier = blankNodeIdentifier; this.replaceObject=replaceObject; Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2008-01-19 01:26:40 UTC (rev 396) @@ -21,15 +21,14 @@ import java.io.File; import java.io.FileWriter; -import java.io.IOException; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.util.Collection; -import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.Set; +import java.util.Vector; import org.dllearner.core.KnowledgeSource; import org.dllearner.core.OntologyFormat; @@ -45,7 +44,6 @@ import org.dllearner.core.dl.KB; import org.dllearner.kb.sparql.configuration.SparqlEndpoint; import org.dllearner.kb.sparql.configuration.SparqlQueryType; -import org.dllearner.kb.sparql.old.oldSparqlOntologyCollector; import org.dllearner.kb.sparql.query.SparqlQuery; import org.dllearner.parser.KBParser; import org.dllearner.reasoning.DIGConverter; @@ -85,7 +83,7 @@ private String role = ""; private String blankNodeIdentifier = "bnode"; - LinkedList<StringTuple> URIParameters = new LinkedList<StringTuple>(); + //LinkedList<StringTuple> URIParameters = new LinkedList<StringTuple>(); LinkedList<StringTuple> replacePredicate = new LinkedList<StringTuple>(); LinkedList<StringTuple> replaceObject = new LinkedList<StringTuple>(); @@ -276,14 +274,15 @@ // numberOfRecursions, filterMode, // Datastructures.setToArray(predList),Datastructures.setToArray( // objList),Datastructures.setToArray(classList),format,url,useLits); + Manager m = new Manager(); SparqlQueryType sqt = null; // get Options for Manipulator Manipulator man = new Manipulator(blankNodeIdentifier, breakSuperClassRetrievalAfter, replacePredicate, replaceObject); - HashMap<String, String> parameters = new HashMap<String, String>(); - parameters.put("default-graph-uri", "http://dbpedia.org"); - parameters.put("format", "application/sparql-results.xml"); + //HashMap<String, String> parameters = new HashMap<String, String>(); + //parameters.put("default-graph-uri", "http://dbpedia.org"); + //parameters.put("format", "application/sparql-results.xml"); // get Options for endpoints if (predefinedEndpoint >= 1) { @@ -296,10 +295,10 @@ // get Options for Filters if (predefinedFilter >= 1) { - sqt = SparqlQueryType.getFilter(predefinedFilter); + sqt = SparqlQueryType.getFilterByNumber(predefinedFilter); } else { - sqt = new SparqlQueryType("forbid", objList, predList, useLits + ""); + sqt = new SparqlQueryType("forbid", objList, predList, useLits ); } // give everything to the manager @@ -355,8 +354,7 @@ * * * Object[] arr=instances.toArray(); - * while(instances.size()>=30){ - * } + * while(instances.size()>=30){ } */ // add the role to the filter(a solution is always EXISTS // role.TOP) @@ -443,20 +441,23 @@ } /** - * TODO SparqlOntologyCollector needs to be removed * * @param label * @param limit */ public void calculateSubjects(String label, int limit) { System.out.println("SparqlModul: Collecting Subjects"); - oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); - try { - subjects = oc.getSubjectsFromLabel(label, limit); - } catch (IOException e) { - subjects = new String[1]; - subjects[0] = "[Error]Sparql Endpoint could not be reached."; - } + // oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); + // try { + Vector<String> v = (SparqlQuery.makeLabelQuery(label, limit, sse) + .getAsVector("subject")); + subjects = (String[]) v.toArray(new String[v.size()]); + // subjects = oc.getSubjectsFromLabel(label, limit); + // } catch (IOException e) { + // TODO I removed IOException, please check + // subjects = new String[1]; + // subjects[0] = "[Error]Sparql Endpoint could not be reached."; + // } System.out.println("SparqlModul: ****Finished"); } @@ -467,30 +468,44 @@ */ public void calculateTriples(String subject) { System.out.println("SparqlModul: Collecting Triples"); - oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); - try { - triples = oc.collectTriples(subject); - } catch (IOException e) { - triples = new String[1]; - triples[0] = "[Error]Sparql Endpoint could not be reached."; + Vector<StringTuple> v = (SparqlQuery.makeArticleQuery(subject, sse) + .getAsVectorOfTupels("predicate", "objcet")); + //String[] subjects = (String[]) v.toArray(new String[v.size()]); + String[] tmp = new String[v.size()]; + int i=0; + for (StringTuple stringTuple : v) { + tmp[i++]=stringTuple.a+"<"+stringTuple.b; } + triples=tmp; + //oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); + //try { + // triples = oc.collectTriples(subject); + //} catch (IOException e) { + // triples = new String[1]; + // triples[0] = "[Error]Sparql Endpoint could not be reached."; + //} System.out.println("SparqlModul: ****Finished"); } /** - * TODO SparqlOntologyCollector needs to be removed * + * * @param concept */ public void calculateConceptSubjects(String concept) { System.out.println("SparqlModul: Collecting Subjects"); - oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); - try { - conceptSubjects = oc.getSubjectsFromConcept(concept); - } catch (IOException e) { - conceptSubjects = new String[1]; - conceptSubjects[0] = "[Error]Sparql Endpoint could not be reached."; - } + Vector<String> v = (SparqlQuery.makeConceptQuery(concept, sse) + .getAsVector("subject")); + conceptSubjects = (String[]) v.toArray(new String[v.size()]); + + // oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); + // try { + // conceptSubjects = oc.getSubjectsFromConcept(concept); + // } catch (IOException e) { + // TODO I removed IOException, please check + // conceptSubjects = new String[1]; + // conceptSubjects[0] = "[Error]Sparql Endpoint could not be reached."; + // } System.out.println("SparqlModul: ****Finished"); } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java 2008-01-19 01:26:40 UTC (rev 396) @@ -22,22 +22,16 @@ import org.dllearner.kb.sparql.configuration.SparqlQueryType; import org.dllearner.kb.sparql.old.oldSparqlFilter; - /** - * Can assemble sparql queries. + * Can assemble sparql queries. can make queries for subject, predicate, object + * according to the filter settings object not yet implemented * * @author Sebastian Hellmann - * + * */ public class SparqlQueryMaker { - String lineend="\n"; - boolean print_flag=false; - /* can make queries for subject, predicate, object - * according to the filter settings - * object not yet implemented - * - * */ - + String lineend = "\n"; + boolean print_flag = false; private SparqlQueryType sparqlQueryType; public SparqlQueryMaker(SparqlQueryType SparqlQueryType) { @@ -45,49 +39,51 @@ } public String makeSubjectQueryUsingFilters(String subject) { - + String Filter = internalFilterAssemblySubject(); - String ret = "SELECT * WHERE { " + lineend + "<" + subject + "> ?predicate ?object. " - + lineend + "FILTER( " + lineend + "(" + Filter + ").}"; + String ret = "SELECT * WHERE { " + lineend + "<" + subject + + "> ?predicate ?object. " + lineend + "FILTER( " + lineend + + "(" + Filter + ").}"; // System.out.println(ret); - //System.out.println(sparqlQueryType.getPredicatefilterlist().length); + // System.out.println(sparqlQueryType.getPredicatefilterlist().length); return ret; } public String makeRoleQueryUsingFilters(String role) { - + String Filter = internalFilterAssemblyRole(); - String ret = "SELECT * WHERE { " + lineend + " ?subject <" + role + "> ?object. " + lineend - + "FILTER( " + lineend + "(" + Filter + ").}"; + String ret = "SELECT * WHERE { " + lineend + " ?subject <" + role + + "> ?object. " + lineend + "FILTER( " + lineend + "(" + Filter + + ").}"; // System.out.println(ret); return ret; } - public String makeRoleQueryUsingFilters(String role,boolean domain) { - + + public String makeRoleQueryUsingFilters(String role, boolean domain) { + String Filter = internalFilterAssemblyRole(); - String ret=""; - if(domain){ - ret = "SELECT * WHERE { " + lineend + - "?subject <" + role + "> ?object; a []. " + lineend - + "FILTER( " + lineend + "(" + Filter + ").}" ; - //"ORDER BY ?subject"; - // System.out.println(ret); - }else{ - ret = "SELECT * WHERE { " + lineend + - "?object a [] . " + - "?subject <" + role + "> ?object . " + lineend - + "FILTER( " + lineend + "(" + Filter + ").}"; - //"ORDER BY ?object"; - + String ret = ""; + if (domain) { + ret = "SELECT * WHERE { " + lineend + "?subject <" + role + + "> ?object; a []. " + lineend + "FILTER( " + lineend + + "(" + Filter + ").}"; + // "ORDER BY ?subject"; + // System.out.println(ret); + } else { + ret = "SELECT * WHERE { " + lineend + "?object a [] . " + + "?subject <" + role + "> ?object . " + lineend + + "FILTER( " + lineend + "(" + Filter + ").}"; + // "ORDER BY ?object"; + } - //System.out.println(ret); + // System.out.println(ret); return ret; } private String internalFilterAssemblySubject() { - + String Filter = ""; if (!this.sparqlQueryType.isLiterals()) Filter += "!isLiteral(?object))"; @@ -101,7 +97,7 @@ } private String internalFilterAssemblyRole() { - + String Filter = ""; if (!this.sparqlQueryType.isLiterals()) Filter += "!isLiteral(?object))"; @@ -125,69 +121,65 @@ public static String filterObject(String ns) { return "&&( !regex(str(?object), '" + ns + "') )"; } - - public void p(String str){ - if(print_flag){ + + public void p(String str) { + if (print_flag) { System.out.println(str); } } - + /** - * creates a query with the specified filters for alls triples with subject - * @param subject the searched subject - * @param sf special object encapsulating all options + * creates a query with the specified filters for all triples with subject + * + * @param subject + * the searched subject + * @param sf + * special object encapsulating all options * @return sparql query */ - public static String makeQueryFilter(String subject, oldSparqlFilter sf){ - - - String Filter=""; - if(!sf.useLiterals)Filter+="!isLiteral(?object)"; - for (String p : sf.getPredFilter()) { - Filter+="\n" + filterPredicate(p); + public static String makeQueryFilter(String subject, oldSparqlFilter sf) { + + String Filter = ""; + if (!sf.useLiterals) + Filter += "!isLiteral(?object)"; + for (String p : sf.getPredFilter()) { + Filter += "\n" + filterPredicate(p); } - for (String o : sf.getObjFilter()) { - Filter+="\n" + filterObject(o); + for (String o : sf.getObjFilter()) { + Filter += "\n" + filterObject(o); } - - String ret= - "SELECT * WHERE { \n" + - "<"+ - subject+ - "> ?predicate ?object.\n"; - if (!(Filter.length()==0)) - ret+="FILTER( \n" + - "(" +Filter+"))."; - ret+="}"; - //System.out.println(ret); + + String ret = "SELECT * WHERE { \n" + "<" + subject + + "> ?predicate ?object.\n"; + if (!(Filter.length() == 0)) + ret += "FILTER( \n" + "(" + Filter + "))."; + ret += "}"; + // System.out.println(ret); return ret; - } - - /** - * creates a query for subjects with the specified label - * @param label a phrase that is part of the label of a subject - * @param limit this limits the amount of results - * @return - */ - public static String makeLabelQuery(String label,int limit){ - //TODO maybe use http://xmlns:com/foaf/0.1/page - return "SELECT DISTINCT ?subject\n"+ - "WHERE { ?subject <http://www.w3.org/2000/01/rdf-schema#label> ?object.?object bif:contains '\""+label+"\"'@en}\n"+ - "LIMIT "+limit; } - - /** - * creates a query for all subjects that are of the type concept - * @param concept the type that subjects are searched for - * @return + + /* + * moved to SparqlQuery TODO remove here creates a query for subjects with + * the specified label @param label a phrase that is part of the label of a + * subject @param limit this limits the amount of results @return + * + * @Deprecated public static String makeLabelQuery(String label,int limit){ + * //TODO maybe use http://xmlns:com/foaf/0.1/page return + * "SELECT DISTINCT ?subject\n"+ "WHERE { ?subject + * <http://www.w3.org/2000/01/rdf-schema#label> ?object.?object + * bif:contains '\""+label+"\"'@en}\n"+ "LIMIT "+limit; } + * + * + * creates a query for all subjects that are of the type concept @param + * concept the type that subjects are searched for @return + * + * + * moved to SparqlQuery TODO remove here + * @Deprecated public static String makeConceptQuery(String concept){ return + * "SELECT DISTINCT ?subject\n"+ "WHERE { ?subject a + * <"+concept+">}\n"; } moved to SparqlQuery TODO remove here + * @Deprecated public static String makeArticleQuery(String subject){ return + * "SELECT ?predicate,?object\n"+ "WHERE { <"+subject+"> + * ?predicate ?object}\n"; } */ - public static String makeConceptQuery(String concept){ - return "SELECT DISTINCT ?subject\n"+ - "WHERE { ?subject a <"+concept+">}\n"; - } - - public static String makeArticleQuery(String subject){ - return "SELECT ?predicate,?object\n"+ - "WHERE { <"+subject+"> ?predicate ?object}\n"; - } } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2008-01-19 01:26:40 UTC (rev 396) @@ -19,23 +19,12 @@ */ package org.dllearner.kb.sparql; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.io.OutputStreamWriter; -import java.net.HttpURLConnection; import java.net.URI; -import java.net.URLEncoder; import java.util.HashSet; -import java.util.Iterator; import java.util.Set; import org.dllearner.kb.sparql.configuration.Configuration; -import org.dllearner.kb.sparql.configuration.SparqlEndpoint; import org.dllearner.kb.sparql.query.Cache; -import org.dllearner.kb.sparql.query.CachedSparqlQuery; import org.dllearner.kb.sparql.query.SparqlQuery; import org.dllearner.utilities.StringTuple; @@ -52,7 +41,7 @@ // private SparqlHTTPRequest SparqlHTTPRequest; private SparqlQueryMaker sparqlQueryMaker; // private SparqlQuery sparqlQuery; - private CachedSparqlQuery cachedSparqlQuery; + //private CachedSparqlQuery cachedSparqlQuery; Cache cache; public TypedSparqlQuery(Configuration Configuration) { Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java 2008-01-19 01:26:40 UTC (rev 396) @@ -19,10 +19,8 @@ */ package org.dllearner.kb.sparql; -import java.io.IOException; import java.net.URI; import java.util.HashSet; -import java.util.LinkedHashSet; import java.util.Set; import org.dllearner.kb.sparql.configuration.Configuration; @@ -181,7 +179,7 @@ } private String sendAndReceiveSPARQL(String sparql) { - LinkedHashSet l = new LinkedHashSet<String>(); + return new SparqlQuery(sparql, configuration.getSparqlEndpoint()) .getAsXMLString(); Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/Configuration.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/Configuration.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/Configuration.java 2008-01-19 01:26:40 UTC (rev 396) @@ -33,7 +33,7 @@ * see the other classes, which are used as attributes here * */ - private SparqlEndpoint specificSparqlEndpoint; + private SparqlEndpoint endpoint; private SparqlQueryType sparqlQueryType; private Manipulator manipulator; // the following needs to be moved to @@ -47,7 +47,7 @@ public Configuration(SparqlEndpoint specificSparqlEndpoint, SparqlQueryType sparqlQueryType, Manipulator manipulator, int recursiondepth, boolean getAllSuperClasses, boolean closeAfterRecursion) { - this.specificSparqlEndpoint = specificSparqlEndpoint; + this.endpoint = specificSparqlEndpoint; this.sparqlQueryType = sparqlQueryType; this.manipulator = manipulator; this.recursiondepth = recursiondepth; @@ -58,7 +58,7 @@ public Configuration changeQueryType(SparqlQueryType sqt) { // TODO must clone here - return new Configuration(this.specificSparqlEndpoint, sqt, this.manipulator, + return new Configuration(this.endpoint, sqt, this.manipulator, this.recursiondepth, this.getAllSuperClasses,this.closeAfterRecursion); } @@ -68,7 +68,7 @@ } public SparqlEndpoint getSparqlEndpoint() { - return specificSparqlEndpoint; + return endpoint; } public SparqlQueryType getSparqlQueryType() { Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlEndpoint.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlEndpoint.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlEndpoint.java 2008-01-19 01:26:40 UTC (rev 396) @@ -64,17 +64,19 @@ public static SparqlEndpoint getEndpointByNumber(int i) { switch (i) { - case 0: + case 0:break; + //should not be filled + case 1: return dbpediaEndpoint(); - case 1: + case 2: return localJoseki(); - case 2: + case 3: return govTrack(); - case 3: + case 4: return revyu(); - case 4: + case 5: return myopenlink(); - case 5: + case 6: return worldFactBook(); } return null; Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlQueryType.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlQueryType.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlQueryType.java 2008-01-19 01:26:40 UTC (rev 396) @@ -29,20 +29,13 @@ * */ public class SparqlQueryType { - // TODO make sets out of them - private String mode = "forbid"; - private String[] objectfilterlist = { "http://dbpedia.org/resource/Category:Articles_", - "http://dbpedia.org/resource/Category:Wikipedia_", "http://xmlns.com/foaf/0.1/", - "http://dbpedia.org/resource/Category", "http://dbpedia.org/resource/Template", - "http://upload.wikimedia.org/wikipedia/commons" }; - private String[] predicatefilterlist = { "http://www.w3.org/2004/02/skos/core", - "http://xmlns.com/foaf/0.1/", "http://dbpedia.org/property/wikipage-", - "http://www.w3.org/2002/07/owl#sameAs", "http://dbpedia.org/property/reference" }; + private Set<String> objectfilterlist; + private Set<String> predicatefilterlist; private boolean literals = false; - public SparqlQueryType(String mode, String[] obectfilterlist, String[] predicatefilterlist, + public SparqlQueryType(String mode, Set<String> obectfilterlist, Set<String> predicatefilterlist, boolean literals) { super(); this.mode = mode; @@ -51,25 +44,6 @@ this.literals = literals; } - public SparqlQueryType(String mode, Set<String> objectfilterlist, - Set<String> predicatefilterlist, String literals) { - super(); - this.mode = mode; - this.literals = (literals.equals("true")) ? true : false; - - Object[] arr = objectfilterlist.toArray(); - Object[] arr2 = predicatefilterlist.toArray(); - this.objectfilterlist = new String[arr.length]; - this.predicatefilterlist = new String[arr2.length]; - for (int i = 0; i < arr.length; i++) { - this.objectfilterlist[i] = (String) arr[i]; - } - for (int i = 0; i < arr2.length; i++) { - this.predicatefilterlist[i] = (String) arr2[i]; - } - - } - public boolean isLiterals() { return literals; } @@ -78,30 +52,24 @@ return mode; } - public String[] getObjectfilterlist() { + public Set<String> getObjectfilterlist() { return objectfilterlist; } - public String[] getPredicatefilterlist() { + public Set<String> getPredicatefilterlist() { return predicatefilterlist; } public void addPredicateFilter(String filter) { - String[] tmp = new String[predicatefilterlist.length + 1]; - int i = 0; - for (; i < predicatefilterlist.length; i++) { - tmp[i] = predicatefilterlist[i]; - //System.out.println(tmp[i]); - } - tmp[i] = filter; - predicatefilterlist=tmp; + predicatefilterlist.add(filter); //System.out.println("added filter: "+filter); - } - public static SparqlQueryType getFilter(int i) { + public static SparqlQueryType getFilterByNumber(int i) { switch (i) { + case 0:break; + //should not be filled case 1: return YagoFilter(); case 2: @@ -126,7 +94,7 @@ pred.add("http://dbpedia.org/property/wikipage"); pred.add("http://dbpedia.org/property/wikiPageUsesTemplate"); pred.add("http://dbpedia.org/property/relatedInstance"); - + Set<String> obj = new HashSet<String>(); //obj.add("http://dbpedia.org/resource/Category:Wikipedia_"); //obj.add("http://dbpedia.org/resource/Category:Articles_"); @@ -140,7 +108,7 @@ obj.add("http://www4.wiwiss.fu-berlin.de/flickrwrappr"); obj.add("http://www.w3.org/2004/02/skos/core"); - return new SparqlQueryType("forbid", obj, pred, "false"); + return new SparqlQueryType("forbid", obj, pred, false); } public static SparqlQueryType YagoSpecialHierarchy(){ Set<String> pred = new HashSet<String>(); @@ -168,7 +136,7 @@ obj.add("http://www4.wiwiss.fu-berlin.de/flickrwrappr"); obj.add("http://www.w3.org/2004/02/skos/core"); - return new SparqlQueryType("forbid", obj, pred, "false"); + return new SparqlQueryType("forbid", obj, pred, false); } @@ -200,7 +168,7 @@ obj.add("http://dbpedia.org/resource/Template"); - return new SparqlQueryType("forbid", obj, pred, "false"); + return new SparqlQueryType("forbid", obj, pred, false); } public static SparqlQueryType YAGOSKOS(){ Set<String> pred = new HashSet<String>(); @@ -230,7 +198,7 @@ obj.add("http://dbpedia.org/resource/Template"); - return new SparqlQueryType("forbid", obj, pred, "false"); + return new SparqlQueryType("forbid", obj, pred, false); } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/ClassNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/ClassNode.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/ClassNode.java 2008-01-19 01:26:40 UTC (rev 396) @@ -39,18 +39,17 @@ public ClassNode(URI u) { super(u); - this.type = "class"; + // this.type = "class"; } - //expands all directly connected nodes + // expands all directly connected nodes @Override public Vector<Node> expand(TypedSparqlQueryInterface tsq, Manipulator m) { + Set<StringTuple> s = tsq.query(this.uri); // see manipulator s = m.check(s, this); Vector<Node> Nodes = new Vector<Node>(); - - Iterator<StringTuple> it = s.iterator(); while (it.hasNext()) { StringTuple t = (StringTuple) it.next(); @@ -58,7 +57,8 @@ // substitute rdf:type with owl:subclassof if (t.a.equals(m.type) || t.a.equals(m.subclass)) { ClassNode tmp = new ClassNode(new URI(t.b)); - properties.add(new PropertyNode(new URI(m.subclass), this, tmp)); + properties.add(new PropertyNode(new URI(m.subclass), this, + tmp)); Nodes.add(tmp); } else { // further expansion stops here @@ -67,7 +67,7 @@ properties.add(new PropertyNode(new URI(t.a), this, tmp)); // System.out.println(m.blankNodeIdentifier); // System.out.println("XXXXX"+t.b); - + // if o is a blank node expand further if (t.b.startsWith(m.blankNodeIdentifier)) { tmp.expand(tsq, m); @@ -87,29 +87,26 @@ // gets the types for properties recursively @Override - public Vector<Node> expandProperties(TypedSparqlQueryInterface tsq, Manipulator m) { - // TODO return type doesn't make sense - return new Vector<Node>(); + public void expandProperties(TypedSparqlQueryInterface tsq, Manipulator m) { } @Override public Set<String> toNTriple() { Set<String> s = new HashSet<String>(); - s.add("<" + this.uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" - + "http://www.w3.org/2002/07/owl#Class" + ">."); + s.add("<" + this.uri + "><" + rdftype + "><" + classns + ">."); for (PropertyNode one : properties) { - s.add("<" + this.uri + "><" + one.getURI() + "><" + one.getB().getURI() + ">."); + s.add("<" + this.uri + "><" + one.getURI() + "><" + + one.getB().getURI() + ">."); s.addAll(one.getB().toNTriple()); } return s; } - + @Override - public int compareTo(Node n){ + public int compareTo(Node n) { return super.compareTo(n); - // } } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/InstanceNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/InstanceNode.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/InstanceNode.java 2008-01-19 01:26:40 UTC (rev 396) @@ -33,7 +33,7 @@ * A node in the graph that is an instance. * * @author Sebastian Hellmann - * + * */ public class InstanceNode extends Node { @@ -43,11 +43,11 @@ public InstanceNode(URI u) { super(u); - this.type = "instance"; + // this.type = "instance"; } - //expands all directly connected nodes + // expands all directly connected nodes @Override public Vector<Node> expand(TypedSparqlQueryInterface tsq, Manipulator m) { @@ -85,34 +85,33 @@ // gets the types for properties recursively @Override - public Vector<Node> expandProperties(TypedSparqlQueryInterface tsq, Manipulator m) { + public void expandProperties(TypedSparqlQueryInterface tsq, Manipulator m) { for (PropertyNode one : properties) { one.expandProperties(tsq, m); } - return new Vector<Node>(); + } @Override public Set<String> toNTriple() { Set<String> s = new HashSet<String>(); - s.add("<" + uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" - + "http://www.w3.org/2002/07/owl#Thing" + ">."); + s.add("<" + uri + "><" + rdftype + "><" + thing + ">."); for (ClassNode one : classes) { - s.add("<" + uri + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" - + one.getURI() + ">."); + s.add("<" + uri + "><" + rdftype + "><" + one.getURI() + ">."); s.addAll(one.toNTriple()); } for (PropertyNode one : properties) { - s.add("<" + uri + "><" + one.getURI() + "><" + one.getB().getURI() + ">."); + s.add("<" + uri + "><" + one.getURI() + "><" + one.getB().getURI() + + ">."); s.addAll(one.toNTriple()); s.addAll(one.getB().toNTriple()); } return s; } - + @Override - public int compareTo(Node n){ + public int compareTo(Node n) { return super.compareTo(n); // } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/Node.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/Node.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/Node.java 2008-01-19 01:26:40 UTC (rev 396) @@ -30,37 +30,73 @@ * Abstract class. * * @author Sebastian Hellmann + * + */ +/** + * @author sebastian * */ public abstract class Node implements Comparable<Node> { + + final String subclass = "http://www.w3.org/2000/01/rdf-schema#subClassOf"; + final String rdftype = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type"; + final String objectProperty = "http://www.w3.org/2002/07/owl#ObjectProperty"; + final String classns = "http://www.w3.org/2002/07/owl#Class"; + final String thing = "http://www.w3.org/2002/07/owl#Thing"; + URI uri; - protected String type; + //protected String type; protected boolean expanded = false; public Node(URI u) { this.uri = u; } - public abstract Vector<Node> expand(TypedSparqlQueryInterface tsq, Manipulator m); + /** + * Nodes are expanded with a certain context, given by + * the typedSparqlQuery and the manipulator + * @param typedSparqlQuery + * @param manipulator + * @return Vector<Node> all Nodes that are new because of expansion + */ + public abstract Vector<Node> expand(TypedSparqlQueryInterface typedSparqlQuery, + Manipulator manipulator); - public abstract Vector<Node> expandProperties(TypedSparqlQueryInterface tsq, Manipulator m); + + /** + * used to get type defs for properties like rdf:type SymmetricProperties + * + * @param typedSparqlQuery + * @param manipulator + * @return Vector<Node> + */ + public abstract void expandProperties( + TypedSparqlQueryInterface typedSparqlQuery, Manipulator manipulator); + /** + * output + * @return a set of n-triple + */ public abstract Set<String> toNTriple(); @Override public String toString() { - return "Node: " + uri + ":" + type; + return "Node: " + uri + ":" + this.getClass(); } public URI getURI() { return uri; } - public boolean equals(Node n){ - if(this.uri.equals(n.uri))return true; - else return false; + + public boolean equals(Node n) { + if (this.uri.equals(n.uri)) + return true; + else + return false; } - public int compareTo(Node n){ + + public int compareTo(Node n) { return this.uri.toString().compareTo(n.uri.toString()); } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/PropertyNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/PropertyNode.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/datastructure/PropertyNode.java 2008-01-19 01:26:40 UTC (rev 396) @@ -30,11 +30,19 @@ import org.dllearner.utilities.StringTuple; /** - * Property node. + * Property node, has connection to a and b part * * @author Sebastian Hellmann * */ +/** + * @author sebastian + * + */ +/** + * @author sebastian + * + */ public class PropertyNode extends Node { // the a and b part of a property @@ -43,25 +51,27 @@ // specialtypes like owl:symmetricproperty private Set<String> specialTypes; - public PropertyNode(URI u) { - super(u); - this.type = "property"; - - } - public PropertyNode(URI u, Node a, Node b) { super(u); - this.type = "property"; + //this.type = "property"; this.a = a; this.b = b; this.specialTypes = new HashSet<String>(); } + + // Property Nodes are normally not expanded, + // this function is never called @Override public Vector<Node> expand(TypedSparqlQueryInterface tsq, Manipulator m) { + return null; + } + + // gets the types for properties recursively + @Override + public void expandProperties(TypedSparqlQueryInterface tsq, Manipulator m) { + b.expandProperties(tsq, m); Set<StringTuple> s = tsq.query(uri); - Vector<Node> Nodes = new Vector<Node>(); - // Manipulation Iterator<StringTuple> it = s.iterator(); while (it.hasNext()) { @@ -76,16 +86,10 @@ } } - return Nodes; + + } - // gets the types for properties recursively - @Override - public Vector<Node> expandProperties(TypedSparqlQueryInterface tsq, Manipulator m) { - b.expandProperties(tsq, m); - return this.expand(tsq, m); - } - public Node getA() { return a; } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/old/SparqlQueryConventional.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/old/SparqlQueryConventional.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/old/SparqlQueryConventional.java 2008-01-19 01:26:40 UTC (rev 396) @@ -7,9 +7,6 @@ import java.io.OutputStream; import java.io.OutputStreamWriter; import java.net.HttpURLConnection; -import java.net.URLEncoder; -import java.util.Iterator; -import java.util.Set; import org.dllearner.kb.sparql.configuration.SparqlEndpoint; Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlOntologyCollector.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlOntologyCollector.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/old/oldSparqlOntologyCollector.java 2008-01-19 01:26:40 UTC (rev 396) @@ -130,6 +130,7 @@ return ret; } + /* public String[] collectTriples(String subject) throws IOException{ System.out.println("Searching for Article: "+subject); String sparql=SparqlQueryMaker.makeArticleQuery(subject); @@ -148,6 +149,7 @@ return processArticle(xml); } + */ public String[] processArticle(String xml) { @@ -180,7 +182,7 @@ return vec.toArray(ret); } - public String[] getSubjectsFromLabel(String label, int limit) throws IOException{ + /*public String[] getSubjectsFromLabel(String label, int limit) throws IOException{ System.out.println("Searching for Label: "+label); String sparql=SparqlQueryMaker.makeLabelQuery(label,limit); String FromCache=cache.get(label, sparql); @@ -197,9 +199,9 @@ } return processSubjects(xml); - } + }*/ - public String[] getSubjectsFromConcept(String concept) throws IOException + /*public String[] getSubjectsFromConcept(String concept) throws IOException { System.out.println("Searching for Subjects of type: "+concept); String sparql=SparqlQueryMaker.makeConceptQuery(concept); @@ -217,13 +219,14 @@ } return processSubjects(xml); - } + }*/ /** * calls getRecursive for each subject in list * @param subjects * @param NumberofRecursions */ + public void getRecursiveList(String[] subjects,int NumberofRecursions) throws IOException{ for (int i = 0; i < subjects.length; i++) { getRecursive(subjects[i], NumberofRecursions); @@ -236,6 +239,7 @@ * @param StartingSubject * @param NumberofRecursions */ + public void getRecursive(String StartingSubject,int NumberofRecursions) throws IOException{ System.out.print("SparqlModul: Depth: "+NumberofRecursions+" @ "+StartingSubject+" "); if(NumberofRecursions<=0) Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/query/CachedSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/CachedSparqlQuery.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/CachedSparqlQuery.java 2008-01-19 01:26:40 UTC (rev 396) @@ -2,6 +2,13 @@ import org.dllearner.kb.sparql.configuration.SparqlEndpoint; +/** + * Does the same as SparqlQuery, but uses the cache. key should be an uri or + * something that can be mapped to a file see cache + * + * @author Jens Lehmann + * + */ public class CachedSparqlQuery { String key; @@ -11,19 +18,29 @@ SparqlQuery sparqlQuery; boolean debug_no_cache = false; - public CachedSparqlQuery(SparqlEndpoint e, Cache c, String key, + /** + * key should be an uri or something that can be mapped to a file see cache + * + * @param endpoint + * @param cache + * @param key + * @param queryString + */ + public CachedSparqlQuery(SparqlEndpoint endpoint, Cache cache, String key, String queryString) { - this.endpoint = e; - this.cache = c; + this.endpoint = endpoint; + this.cache = cache; this.key = key; this.queryString = queryString; - this.sparqlQuery = new SparqlQuery(queryString, e); + this.sparqlQuery = new SparqlQuery(queryString, endpoint); } - // URI u, String sparql - @Deprecated - public String send() { - + /** + * sends a query and returns XML using cache + * + * @return String xml + */ + public String getAsXMLString() { String FromCache = cache.get(key, queryString); if (debug_no_cache) { FromCache = null; Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java 2008-01-19 01:26:40 UTC (rev 396) @@ -35,7 +35,7 @@ import com.hp.hpl.jena.sparql.core.ResultBinding; /** - * Represents a SPARQL query. It includes support for stopping the SPARQL query + * Represents one SPARQL query. It includes support for stopping the SPARQL query * (which may be necessary if a timeout is reached). * * @author Jens Lehmann @@ -49,17 +49,33 @@ private QueryExecution queryExecution; SparqlEndpoint endpoint; - public SparqlQuery(String queryString, URL u) { + /** + * simplest contructor, works only with some endpoints, + * not with DBpedia + * @param queryString + * @param url + */ + public SparqlQuery(String queryString, URL url) { this.queryString = queryString; - this.endpoint = new SparqlEndpoint(u); + this.endpoint = new SparqlEndpoint(url); } - public SparqlQuery(String queryString, SparqlEndpoint se) { + /** + * standard constructor + * @param queryString + * @param endpoint + */ + public SparqlQuery(String queryString, SparqlEndpoint endpoint) { this.queryString = queryString; - this.endpoint = se; + this.endpoint = endpoint; } - public ResultSet send() { + + /** + * method used for sending over Jena + * @return jena ResultSet + */ + protected ResultSet send() { isRunning = true; p(queryString); @@ -88,16 +104,35 @@ return isRunning; } + /** + * sends a query and returns XML + * + * @return String xml + */ public String getAsXMLString() { ResultSet rs = send(); return ResultSetFormatter.asXMLString(rs); } + /** + * sends a query and returns complicated Jena List with ResultBindings + * + * + * @return jena List<ResultBinding> + */ public List<ResultBinding> getAsList() { ResultSet rs = send(); return ResultSetFormatter.toList(rs); } + + /** + * sends a query and returns the results for variable + * TODO untested and not used, feel free to change + * varName as Vector<String> + * @param varName + * @return Vector<String> + */ public Vector<String> getAsVector(String varName) { ResultSet rs = send(); Vector<String> vret = new Vector<String>(); @@ -108,6 +143,15 @@ return vret; } + /** + * sends a query and returns the results for two variables + * ex: getAsVectorOfTupels("predicate", "object") + * TODO untested and not used, feel free to change + * + * @param varName1 + * @param varName2 + * @return Vector<StringTuple> + */ public Vector<StringTuple> getAsVectorOfTupels(String varName1, String varName2) { ResultSet rs = send(); @@ -120,7 +164,13 @@ return vret; } - @Deprecated + + /** + * sends a query and returns the results for n variables + * TODO not working, finish + * @param varNames + * @return Vector<Vector<String>> + */ public Vector<Vector<String>> getAsVectorOfVectors(Vector<String> varNames) { // ResultSet rs = send(); Vector<Vector<String>> vret = new Vector<Vector<String>>(); @@ -138,6 +188,48 @@ * public Model asJenaModel(){ ResultSet rs=send(); return * ResultSetFormatter.toModel(rs); } */ + + /** + * creates a query for subjects with the specified label + * @param label a phrase that is part of the label of a subject + * @param limit this limits the amount of results + * @param endpoint a SparqlEndpoint + * @return SparqlQuery + */ + public static SparqlQuery makeLabelQuery(String label,int limit,SparqlEndpoint endpoint){ + //TODO maybe use http://xmlns:com/foaf/0.1/page + String queryString= + "SELECT DISTINCT ?subject\n"+ + "WHERE { ?subject <http://www.w3.org/2000/01/rdf-schema#label> ?object.?object bif:contains '\""+label+"\"'@en}\n"+ + "LIMIT "+limit; + return new SparqlQuery( queryString,endpoint); + } + + /** + * creates a query for all subjects that are of the type concept + * @param concept the type that subjects are searched for + * @param endpoint a SparqlEndpoint + * @return SparqlQuery + */ + public static SparqlQuery makeConceptQuery(String concept, SparqlEndpoint endpoint){ + String queryString = + "SELECT DISTINCT ?subject\n"+ + "WHERE { ?subject a <"+concept+">}\n"; + return new SparqlQuery( queryString,endpoint); + } + + /** + * @param subject + * @param endpoint a SparqlEndpoint + * @return SparqlQuery + */ + public static SparqlQuery makeArticleQuery(String subject,SparqlEndpoint endpoint){ + String queryString = + "SELECT ?predicate,?object\n"+ + "WHERE { <"+subject+"> ?predicate ?object}\n"; + return new SparqlQuery( queryString,endpoint); + } + public void p(String str) { if (print_flag) { Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/test/Test.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/test/Test.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/test/Test.java 2008-01-19 01:26:40 UTC (rev 396) @@ -26,7 +26,7 @@ import org.dllearner.kb.sparql.Manager; /** - * Test class. + * Test class, uses the whole thing * * @author Sebastian Hellmann * Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestResultSet.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestResultSet.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestResultSet.java 2008-01-19 01:26:40 UTC (rev 396) @@ -27,20 +27,12 @@ import java.util.List; import org.dllearner.kb.sparql.configuration.SparqlEndpoint; -import org.dllearner.kb.sparql.old.SparqlQueryConventional; import org.dllearner.kb.sparql.query.SparqlQuery; import com.hp.hpl.jena.sparql.core.ResultBinding; public class TestResultSet { - // this is a working Jena script - // TODO: query runtime seems to be much too high (compared to running it in - // http://dbpedia.org/sparql) - // verify whether our SPARQL query implementation is faster and why; - // TODO: check whether Jena works with the other endpoints in - // PredefinedEndpoint; if not - // check whether it can be configured to run with these public static void main(String[] args) { String queryString = "PREFIX dbpedia2: <http://dbpedia.org/property/> " @@ -69,7 +61,7 @@ return ""; } - public static void testSaving(List l) { + public static void testSaving(List<ResultBinding> l) { System.out.println(l + "\n****************************"); try { // FileWriter fw=new FileWriter(new File(Filename),true); @@ -96,8 +88,8 @@ SparqlEndpoint sse = SparqlEndpoint.dbpediaEndpoint(); SparqlQuery sqJena = new SparqlQuery(queryString, sse); - SparqlQueryConventional sqConv = new SparqlQueryConventional(sse); - + //SparqlQueryConventional sqConv = new SparqlQueryConventional(sse); + // first query is not counted long now = System.currentTimeMillis(); long tmp = now; @@ -142,10 +134,10 @@ public static void compareResults(String queryString) { SparqlEndpoint sse = SparqlEndpoint.dbpediaEndpoint(); SparqlQuery sqJena = new SparqlQuery(queryString, sse); - SparqlQueryConventional sqConv = new SparqlQueryConventional(sse); + // SparqlQueryConventional sqConv = new SparqlQueryConventional(sse); System.out.println(sqJena.getAsXMLString()); - System.out.println(sqConv.getAsXMLString(queryString)); + //System.out.println(sqConv.getAsXMLString(queryString)); } } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestSparqlQuery.java 2008-01-18 23:21:56 UTC (rev 395) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestSparqlQuery.java 2008-01-19 01:26:40 UTC (rev 396) @@ -20,30 +20,11 @@ package org.dllearner.kb.sparql.test; import org.dllearner.kb.sparql.configuration.SparqlEndpoint; -import org.dllearner.kb.sparql.old.SparqlQueryConventional; import org.dllearner.kb.sparql.query.SparqlQuery; -/** - * Represents a SPARQL query. It includes support for stopping the SPARQL query - * (which may be necessary if a timeout is reached). - * - * TODO: It is probably good to change all SPARQL query calls to use only this - * class. - * - * TODO: Could we use Jena as a solid foundation here? (com.hp.jena.query) - * - * @author Jens Lehmann - * - */ + public class TestSparqlQuery { - // this is a working Jena script - // TODO: query runtime seems to be much too high (compared to running it in - // http://dbpedia.org/sparql) - // verify whether our SPARQL query implementation is faster and why; - // TODO: check whether Jena works with the other endpoints in - // PredefinedEndpoint; if not - // check whether it can be configured to run with these public static void main(String[] args) { String queryString = "PREFIX dbpedia2: <http://dbpedia.org/property/> " @@ -61,7 +42,7 @@ public static void testTime(int howOften, String queryString) { SparqlEndpoint sse = SparqlEndpoint.dbpediaEndpoint(); SparqlQuery sqJena = new SparqlQuery(queryString, sse); - SparqlQueryConventional sqConv = new SparqlQueryConventional(sse); + //SparqlQueryConventional sqConv = new SparqlQueryConventional(sse); // first query is not counted sqJena.getAsList(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ku...@us...> - 2008-01-19 02:08:22
|
Revision: 397 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=397&view=rev Author: kurzum Date: 2008-01-18 18:08:16 -0800 (Fri, 18 Jan 2008) Log Message: ----------- I made some test classes, problem with TestStaticQueries everything in .sparql.old can be removed the extraction algorithm might not be working, because of problems with the cache or so check also how to return literals and such Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java Added Paths: ----------- trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestExtraction.java trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestStaticQueries.java Removed Paths: ------------- trunk/src/dl-learner/org/dllearner/kb/sparql/test/Test.java Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java 2008-01-19 01:26:40 UTC (rev 396) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java 2008-01-19 02:08:16 UTC (rev 397) @@ -155,9 +155,13 @@ public Vector<StringTuple> getAsVectorOfTupels(String varName1, String varName2) { ResultSet rs = send(); + Vector<StringTuple> vret = new Vector<StringTuple>(); List<ResultBinding> l = ResultSetFormatter.toList(rs); + //System.out.println(l); + //System.out.println(ResultSetFormatter.asXMLString(rs)); for (ResultBinding resultBinding : l) { + vret.add(new StringTuple(resultBinding.get(varName1).toString(), resultBinding.get(varName2).toString())); } @@ -200,7 +204,7 @@ //TODO maybe use http://xmlns:com/foaf/0.1/page String queryString= "SELECT DISTINCT ?subject\n"+ - "WHERE { ?subject <http://www.w3.org/2000/01/rdf-schema#label> ?object.?object bif:contains '\""+label+"\"'@en}\n"+ + "WHERE { ?subject <http://www.w3.org/2000/01/rdf-schema#label> ?object. ?object bif:contains '\""+label+"\"'@en}\n"+ "LIMIT "+limit; return new SparqlQuery( queryString,endpoint); } @@ -225,7 +229,7 @@ */ public static SparqlQuery makeArticleQuery(String subject,SparqlEndpoint endpoint){ String queryString = - "SELECT ?predicate,?object\n"+ + "SELECT ?predicate ?object\n"+ "WHERE { <"+subject+"> ?predicate ?object}\n"; return new SparqlQuery( queryString,endpoint); } Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/test/Test.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/test/Test.java 2008-01-19 01:26:40 UTC (rev 396) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/test/Test.java 2008-01-19 02:08:16 UTC (rev 397) @@ -1,58 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql.test; - -import java.io.File; -import java.io.FileWriter; -import java.net.URI; - -import org.dllearner.kb.sparql.Manager; - -/** - * Test class, uses the whole thing - * - * @author Sebastian Hellmann - * - */ -public class Test { - - public static void main(String[] args) { - System.out.println("Start"); - // String test2 = "http://www.extraction.org/config#dbpediatest"; - // String test = "http://www.extraction.org/config#localjoseki"; - try { - // URI u = new URI(test); - Manager m = new Manager(); - // m.usePredefinedConfiguration(u); - - URI u2 = new URI("http://dbpedia.org/resource/Angela_Merkel"); - - String filename = System.currentTimeMillis() + ".nt"; - FileWriter fw = new FileWriter(new File(filename), true); - fw.write(m.extract(u2)); - fw.flush(); - fw.close(); - - } catch (Exception e) { - e.printStackTrace(); - } - } - -} Added: trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestExtraction.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestExtraction.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestExtraction.java 2008-01-19 02:08:16 UTC (rev 397) @@ -0,0 +1,69 @@ +/** + * Copyright (C) 2007, Sebastian Hellmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package org.dllearner.kb.sparql.test; + +import java.io.File; +import java.io.FileWriter; +import java.net.URI; +import java.util.LinkedList; + +import org.dllearner.kb.sparql.Manager; +import org.dllearner.kb.sparql.Manipulator; +import org.dllearner.kb.sparql.configuration.SparqlEndpoint; +import org.dllearner.kb.sparql.configuration.SparqlQueryType; +import org.dllearner.utilities.StringTuple; + +/** + * Test class, uses the whole thing + * + * @author Sebastian Hellmann + * + */ +public class TestExtraction { + + public static void main(String[] args) { + System.out.println("Start"); + // String test2 = "http://www.extraction.org/config#dbpediatest"; + // String test = "http://www.extraction.org/config#localjoseki"; + try { + // URI u = new URI(test); + Manager m = new Manager(); + // m.usePredefinedConfiguration(u); + + URI u2 = new URI("http://dbpedia.org/resource/Angela_Merkel"); + m.useConfiguration( + SparqlQueryType.getFilterByNumber(1),SparqlEndpoint.getEndpointByNumber(1), + new Manipulator("",200,new LinkedList<StringTuple>(),new LinkedList<StringTuple>()), + 1,true,true); + //, , + //manipulator, recursiondepth, getAllSuperClasses, closeAfterRecursion) + + String filename = System.currentTimeMillis() + ".nt"; + FileWriter fw = new FileWriter(new File(filename), true); + fw.write(m.extract(u2)); + fw.flush(); + fw.close(); + + } catch (Exception e) { + e.printStackTrace(); + } + } + +} Added: trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestStaticQueries.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestStaticQueries.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestStaticQueries.java 2008-01-19 02:08:16 UTC (rev 397) @@ -0,0 +1,72 @@ +/** + * Copyright (C) 2007-2008, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package org.dllearner.kb.sparql.test; + +import org.dllearner.kb.sparql.configuration.SparqlEndpoint; +import org.dllearner.kb.sparql.query.SparqlQuery; + +public class TestStaticQueries { + + // tests makeArticleQuery + // + // + public static void main(String[] args) { + String test1="http://dbpedia.org/resource/Angela_Merkel"; + String test2="http://dbpedia.org/resource/Leipzig"; + String test3="http://dbpedia.org/class/yago/Woman110787470"; + boolean one=false; + boolean two=true; + boolean three=false; + try { + if(one){ + //System.out.println(SparqlQuery.makeArticleQuery(test1, + // SparqlEndpoint.getEndpointByNumber(1)).getAsXMLString()); + //System.out.println(SparqlQuery.makeArticleQuery(test1, + // SparqlEndpoint.getEndpointByNumber(1)).getAsList()); + System.out.println(SparqlQuery.makeArticleQuery(test1, + SparqlEndpoint.getEndpointByNumber(1)).getAsVectorOfTupels("predicate", "object")); + } + + if(two){ + System.out.println(SparqlQuery.makeLabelQuery(test2,10, + SparqlEndpoint.getEndpointByNumber(1)).getAsXMLString()); + System.out.println(SparqlQuery.makeLabelQuery(test2,10, + SparqlEndpoint.getEndpointByNumber(1)).getAsList()); + System.out.println(SparqlQuery.makeLabelQuery(test2,10, + SparqlEndpoint.getEndpointByNumber(1)).getAsVector("subject")); + } + if(three){ + System.out.println(SparqlQuery.makeConceptQuery(test3, + SparqlEndpoint.getEndpointByNumber(1)).getAsXMLString()); + System.out.println(SparqlQuery.makeConceptQuery(test3, + SparqlEndpoint.getEndpointByNumber(1)).getAsList()); + System.out.println(SparqlQuery.makeConceptQuery(test3, + SparqlEndpoint.getEndpointByNumber(1)).getAsVector("subject")); + } + + + + } catch (Exception e) { + e.printStackTrace(); + } + + } + +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2008-01-19 08:11:30
|
Revision: 398 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=398&view=rev Author: jenslehmann Date: 2008-01-19 00:11:27 -0800 (Sat, 19 Jan 2008) Log Message: ----------- - fixed/suppressed remaining warnings in SPARQL component - deleted deprecated files in SPARQL component Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestResultSet.java Removed Paths: ------------- trunk/src/dl-learner/org/dllearner/kb/sparql/old/ Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java 2008-01-19 02:08:16 UTC (rev 397) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java 2008-01-19 08:11:27 UTC (rev 398) @@ -20,7 +20,6 @@ package org.dllearner.kb.sparql; import org.dllearner.kb.sparql.configuration.SparqlQueryType; -import org.dllearner.kb.sparql.old.oldSparqlFilter; /** * Can assemble sparql queries. can make queries for subject, predicate, object @@ -137,6 +136,7 @@ * special object encapsulating all options * @return sparql query */ + /* public static String makeQueryFilter(String subject, oldSparqlFilter sf) { String Filter = ""; @@ -157,7 +157,8 @@ // System.out.println(ret); return ret; } - +*/ + /* * moved to SparqlQuery TODO remove here creates a query for subjects with * the specified label @param label a phrase that is part of the label of a Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java 2008-01-19 02:08:16 UTC (rev 397) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java 2008-01-19 08:11:27 UTC (rev 398) @@ -120,6 +120,7 @@ * * @return jena List<ResultBinding> */ + @SuppressWarnings({"unchecked"}) public List<ResultBinding> getAsList() { ResultSet rs = send(); return ResultSetFormatter.toList(rs); @@ -133,6 +134,7 @@ * @param varName * @return Vector<String> */ + @SuppressWarnings({"unchecked"}) public Vector<String> getAsVector(String varName) { ResultSet rs = send(); Vector<String> vret = new Vector<String>(); @@ -152,6 +154,7 @@ * @param varName2 * @return Vector<StringTuple> */ + @SuppressWarnings({"unchecked"}) public Vector<StringTuple> getAsVectorOfTupels(String varName1, String varName2) { ResultSet rs = send(); Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestResultSet.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestResultSet.java 2008-01-19 02:08:16 UTC (rev 397) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestResultSet.java 2008-01-19 08:11:27 UTC (rev 398) @@ -61,6 +61,7 @@ return ""; } + @SuppressWarnings({"unchecked"}) public static void testSaving(List<ResultBinding> l) { System.out.println(l + "\n****************************"); try { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ku...@us...> - 2008-01-23 08:25:30
|
Revision: 413 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=413&view=rev Author: kurzum Date: 2008-01-23 00:25:28 -0800 (Wed, 23 Jan 2008) Log Message: ----------- reduced warnings Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java 2008-01-23 08:17:01 UTC (rev 412) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java 2008-01-23 08:25:28 UTC (rev 413) @@ -127,11 +127,11 @@ return "&&( !regex(str(?object), '" + ns + "') )"; } - private void p(String str) { + /*private void p(String str) { if (print_flag) { System.out.println(str); } - } + }*/ /** * creates a query with the specified filters for all triples with subject Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2008-01-23 08:17:01 UTC (rev 412) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2008-01-23 08:25:28 UTC (rev 413) @@ -95,7 +95,7 @@ } - @Deprecated + /*@Deprecated private Set<StringTuple> cachedSparql(URI uri, String sparqlQueryString, String a, String b) { return null; @@ -118,7 +118,7 @@ // System.out.println(sparql); // System.out.println(xml); // process XML - } + //} /** * TODO old XML processing, can be removed, once Jena is done This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ku...@us...> - 2008-01-23 09:02:03
|
Revision: 415 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=415&view=rev Author: kurzum Date: 2008-01-23 01:02:01 -0800 (Wed, 23 Jan 2008) Log Message: ----------- added compatability with JSON Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestResultSet.java Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java 2008-01-23 09:01:29 UTC (rev 414) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java 2008-01-23 09:02:01 UTC (rev 415) @@ -19,6 +19,8 @@ */ package org.dllearner.kb.sparql.query; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; import java.net.URL; import java.util.Iterator; import java.util.List; @@ -32,6 +34,7 @@ import com.hp.hpl.jena.query.QueryExecutionFactory; import com.hp.hpl.jena.query.QueryFactory; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.query.ResultSetFactory; import com.hp.hpl.jena.query.ResultSetFormatter; import com.hp.hpl.jena.sparql.core.ResultBinding; @@ -226,11 +229,34 @@ * public Model asJenaModel(){ ResultSet rs=send(); return * ResultSetFormatter.toModel(rs); } */ + public String getAsJSON(){ + ResultSet rs=send(); + ByteArrayOutputStream baos=new ByteArrayOutputStream(); + ResultSetFormatter.outputAsJSON(baos, rs); + return baos.toString(); + + } + public static ResultSet JSONtoResultSet(String json){ + ResultSet rs=null; + try{ + ByteArrayInputStream bais=new ByteArrayInputStream(json.getBytes()); + rs=ResultSetFactory.fromJSON(bais); + + }catch (Exception e) {e.printStackTrace();} + return rs; + + } + + /*public void testJSon(){ - - ResultSet rs=send(); try{ + + try{ + ByteArrayInputStream BAIS=new ByteArrayInputStream(JSON.getBytes()); + ResultSet rs2=ResultSetFactory.fromJSON(BAIS); + System.out.println(ResultSetFormatter.asXMLString(rs2)); + }catch (Exception e) {e.printStackTrace();} //PipedOutputStream pos=new PipedOutputStream(); //pos.flush(); //System.out.println("hh"); @@ -248,7 +274,7 @@ }catch (Exception e) {e.printStackTrace();} //return //ResultSetFormatter.toModel(rs); } - }*/ + } /** * creates a query for subjects with the specified label Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestResultSet.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestResultSet.java 2008-01-23 09:01:29 UTC (rev 414) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestResultSet.java 2008-01-23 09:02:01 UTC (rev 415) @@ -43,10 +43,12 @@ SparqlEndpoint sse = SparqlEndpoint.dbpediaEndpoint(); SparqlQuery sqJena = new SparqlQuery(queryString, sse); - //sqJena.testJSon(); + String json=sqJena.getAsJSON(); + System.out.println(json); - List<ResultBinding> l = sqJena.getAsList(); - System.out.println(l.getClass()); + + //List<ResultBinding> l = sqJena.getAsList(); + //System.out.println(l.getClass()); //testSaving(new LinkedList<ResultBinding>(l)); /* * for (ResultBinding o : l) { System.out.println(o); // Iterator This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ku...@us...> - 2008-01-23 11:07:23
|
Revision: 416 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=416&view=rev Author: kurzum Date: 2008-01-23 03:07:20 -0800 (Wed, 23 Jan 2008) Log Message: ----------- Finished JSON, added SpeedTest Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java Added Paths: ----------- trunk/src/dl-learner/org/dllearner/kb/sparql/test/JenaQueryToResultSpeedTest.java Removed Paths: ------------- trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestSparqlQuery.java Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java 2008-01-23 09:02:01 UTC (rev 415) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java 2008-01-23 11:07:20 UTC (rev 416) @@ -22,6 +22,7 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.net.URL; +import java.nio.charset.Charset; import java.util.Iterator; import java.util.List; import java.util.Vector; @@ -229,53 +230,35 @@ * public Model asJenaModel(){ ResultSet rs=send(); return * ResultSetFormatter.toModel(rs); } */ + /** + * sends a query and returns JSON + * @return a String representation of the Resultset as JSON + */ public String getAsJSON(){ ResultSet rs=send(); ByteArrayOutputStream baos=new ByteArrayOutputStream(); ResultSetFormatter.outputAsJSON(baos, rs); return baos.toString(); - } + + /** + * @param json a string representation string object + * @return jena ResultSet + */ public static ResultSet JSONtoResultSet(String json){ ResultSet rs=null; try{ - ByteArrayInputStream bais=new ByteArrayInputStream(json.getBytes()); + ByteArrayInputStream bais=new ByteArrayInputStream(json.getBytes(Charset.forName("UTF-8"))); rs=ResultSetFactory.fromJSON(bais); - }catch (Exception e) {e.printStackTrace();} return rs; } - /*public void testJSon(){ - try{ - - try{ - ByteArrayInputStream BAIS=new ByteArrayInputStream(JSON.getBytes()); - ResultSet rs2=ResultSetFactory.fromJSON(BAIS); - System.out.println(ResultSetFormatter.asXMLString(rs2)); - }catch (Exception e) {e.printStackTrace();} - //PipedOutputStream pos=new PipedOutputStream(); - //pos.flush(); - //System.out.println("hh"); - //PipedInputStream pis=new PipedInputStream(pos); - //System.out.println("hh2"); - //pis.flush(); - //PrintStream out=new PrintStream(); - //ResultSetFormatter.outputAsJSON(System.out, rs); - //pos.flush(); - //System.out.println("hh"); - //while (pis.available()>0) - //System.out.println("hh"); - //System.out.println(pis.read());; - - }catch (Exception e) {e.printStackTrace();} - //return - //ResultSetFormatter.toModel(rs); } - } + /** * creates a query for subjects with the specified label * @param label a phrase that is part of the label of a subject Copied: trunk/src/dl-learner/org/dllearner/kb/sparql/test/JenaQueryToResultSpeedTest.java (from rev 411, trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestSparqlQuery.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/test/JenaQueryToResultSpeedTest.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/test/JenaQueryToResultSpeedTest.java 2008-01-23 11:07:20 UTC (rev 416) @@ -0,0 +1,149 @@ +/** + * Copyright (C) 2007-2008, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package org.dllearner.kb.sparql.test; + +import org.dllearner.kb.sparql.configuration.SparqlEndpoint; +import org.dllearner.kb.sparql.query.SparqlQuery; + + +public class JenaQueryToResultSpeedTest { + static boolean print_flag=false; + + public static void main(String[] args) { + + String queryString = "PREFIX dbpedia2: <http://dbpedia.org/property/> " + + "PREFIX skos: <http://www.w3.org/2004/02/skos/core#>" + + "SELECT ?episode ?chalkboard_gag WHERE { ?episode skos:subject" + + " <http://dbpedia.org/resource/Category:The_Simpsons_episodes%2C_season_12>." + + " ?episode dbpedia2:blackboard ?chalkboard_gag }"; + + int howOften=20; + testJenaAsXML(howOften, queryString); + testJenaAsList(howOften, queryString); + testJenaAsJSON(howOften, queryString); + testJenaAsJSONandBack(howOften, queryString); + + + + // compareResults( queryString); + + } + + + public static void testJenaAsXML(int howOften, String queryString){ + SparqlEndpoint sse = SparqlEndpoint.dbpediaEndpoint(); + SparqlQuery sqJena = new SparqlQuery(queryString, sse); + // first query is not counted + sqJena.getAsXMLString(); + long now = System.currentTimeMillis(); + long tmp = now; + for (int i = 0; i < howOften; i++) { + + sqJena.getAsXMLString(); + p("Jena as XML needed: " + + (System.currentTimeMillis() - tmp)); + tmp = System.currentTimeMillis(); + } + long total=System.currentTimeMillis() - now; + System.out.println("Jena as XML total: " + total + + " ms , average: "+ (total/howOften) ); + + } + + public static void testJenaAsList(int howOften, String queryString){ + SparqlEndpoint sse = SparqlEndpoint.dbpediaEndpoint(); + SparqlQuery sqJena = new SparqlQuery(queryString, sse); + // first query is not counted + sqJena.getAsList(); + long now = System.currentTimeMillis(); + long tmp = now; + for (int i = 0; i < howOften; i++) { + + sqJena.getAsList(); + p("Jena as List needed: " + + (System.currentTimeMillis() - tmp)); + tmp = System.currentTimeMillis(); + + } + long total=System.currentTimeMillis() - now; + System.out.println("Jena as List total: " + total + + " ms , average: "+ (total/howOften) ); + + } + + public static void testJenaAsJSON(int howOften, String queryString){ + SparqlEndpoint sse = SparqlEndpoint.dbpediaEndpoint(); + SparqlQuery sqJena = new SparqlQuery(queryString, sse); + // first query is not counted + sqJena.getAsJSON(); + long now = System.currentTimeMillis(); + long tmp = now; + for (int i = 0; i < howOften; i++) { + + sqJena.getAsJSON(); + p("Jena as JSON needed: " + + (System.currentTimeMillis() - tmp)); + tmp = System.currentTimeMillis(); + + } + long total=System.currentTimeMillis() - now; + System.out.println("Jena as JSON total: " + total + + " ms , average: "+ (total/howOften) ); + + } + + public static void testJenaAsJSONandBack(int howOften, String queryString){ + SparqlEndpoint sse = SparqlEndpoint.dbpediaEndpoint(); + SparqlQuery sqJena = new SparqlQuery(queryString, sse); + // first query is not counted + sqJena.getAsJSON(); + long now = System.currentTimeMillis(); + long tmp = now; + for (int i = 0; i < howOften; i++) { + + // System.out.println(sqJena.getAsJSON()); + SparqlQuery.JSONtoResultSet(sqJena.getAsJSON()); + p("Jena as JSON and back needed: " + + (System.currentTimeMillis() - tmp)); + tmp = System.currentTimeMillis(); + + } + long total=System.currentTimeMillis() - now; + System.out.println("Jena as JSON and back total: " + total + + " ms , average: "+ (total/howOften) ); + + } + + + public static void compareResults(String queryString) { + SparqlEndpoint sse = SparqlEndpoint.dbpediaEndpoint(); + SparqlQuery sqJena = new SparqlQuery(queryString, sse); + // SparqlQueryConventional sqConv=new SparqlQueryConventional(sse); + + System.out.println(sqJena.getAsXMLString()); + // System.out.println(sqConv.getAsXMLString("")); + + } + + static void p(String s) { + if (print_flag) + System.out.println(s); + } +} Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestSparqlQuery.java 2008-01-23 09:02:01 UTC (rev 415) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/test/TestSparqlQuery.java 2008-01-23 11:07:20 UTC (rev 416) @@ -1,84 +0,0 @@ -/** - * Copyright (C) 2007-2008, Jens Lehmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql.test; - -import org.dllearner.kb.sparql.configuration.SparqlEndpoint; -import org.dllearner.kb.sparql.query.SparqlQuery; - - -public class TestSparqlQuery { - - public static void main(String[] args) { - - String queryString = "PREFIX dbpedia2: <http://dbpedia.org/property/> " - + "PREFIX skos: <http://www.w3.org/2004/02/skos/core#>" - + "SELECT ?episode ?chalkboard_gag WHERE { ?episode skos:subject" - + " <http://dbpedia.org/resource/Category:The_Simpsons_episodes%2C_season_12>." - + " ?episode dbpedia2:blackboard ?chalkboard_gag }"; - - testTime(20, queryString); - - // compareResults( queryString); - - } - - public static void testTime(int howOften, String queryString) { - SparqlEndpoint sse = SparqlEndpoint.dbpediaEndpoint(); - SparqlQuery sqJena = new SparqlQuery(queryString, sse); - //SparqlQueryConventional sqConv = new SparqlQueryConventional(sse); - - // first query is not counted - sqJena.getAsList(); - long now = System.currentTimeMillis(); - long tmp = now; - for (int i = 0; i < howOften; i++) { - // sqConv.getAsXMLString(queryString); - sqJena.getAsList(); - System.out.println("Conv needed: " - + (System.currentTimeMillis() - tmp)); - tmp = System.currentTimeMillis(); - - } - System.out.println("Conv total: " + (System.currentTimeMillis() - now)); - // first query is not counted - sqJena.getAsXMLString(); - now = System.currentTimeMillis(); - tmp = now; - for (int i = 0; i < howOften; i++) { - - sqJena.getAsXMLString(); - System.out.println("Jena needed: " - + (System.currentTimeMillis() - tmp)); - tmp = System.currentTimeMillis(); - - } - System.out.println("Jena total: " + (System.currentTimeMillis() - now)); - } - - public static void compareResults(String queryString) { - SparqlEndpoint sse = SparqlEndpoint.dbpediaEndpoint(); - SparqlQuery sqJena = new SparqlQuery(queryString, sse); - // SparqlQueryConventional sqConv=new SparqlQueryConventional(sse); - - System.out.println(sqJena.getAsXMLString()); - // System.out.println(sqConv.getAsXMLString("")); - - } -} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ku...@us...> - 2008-01-23 12:41:09
|
Revision: 420 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=420&view=rev Author: kurzum Date: 2008-01-23 04:41:04 -0800 (Wed, 23 Jan 2008) Log Message: ----------- integrated cache component works Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlQueryType.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java 2008-01-23 12:13:23 UTC (rev 419) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java 2008-01-23 12:41:04 UTC (rev 420) @@ -43,7 +43,7 @@ private boolean print_flag = false; public ExtractionAlgorithm(Configuration Configuration) { - // this.configuration = Configuration; + this.configuration = Configuration; this.manipulator = Configuration.getManipulator(); this.recursionDepth = Configuration.getRecursiondepth(); // this.getAllSuperClasses = Configuration.isGetAllSuperClasses(); @@ -71,8 +71,12 @@ * @return */ public Node expandNode(URI uri, TypedSparqlQuery typedSparqlQuery) { + //System.out.println(uri.toString()); + //System.out.println(manipulator); + //System.out.println(this.configuration); long time = System.currentTimeMillis(); Node n = getFirstNode(uri); + System.out.println(n); Vector<Node> v = new Vector<Node>(); v.add(n); p("StartVector: " + v); @@ -85,12 +89,11 @@ Node tmpNode = v.remove(0); p("Expanding " + tmpNode); // System.out.println(this.Manipulator); - // these are the new not expanded nodes // the others are saved in connection with the original node Vector<Node> tmpVec = tmpNode.expand(typedSparqlQuery, manipulator); - + //System.out.println(tmpVec); tmp.addAll(tmpVec); } v = tmp; @@ -101,6 +104,8 @@ } HashSet<String> hadAlready = new HashSet<String>(); + p("Get all superclasses"); + //p(configuration.toString()); // gets All Class Nodes and expands them further if (this.configuration.isGetAllSuperClasses()) { // Set<Node> classes = new TreeSet<Node>(); Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java 2008-01-23 12:13:23 UTC (rev 419) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java 2008-01-23 12:41:04 UTC (rev 420) @@ -49,6 +49,7 @@ this.configuration = new Configuration(SparqlEndpoint, SparqlQueryType, manipulator, recursiondepth, getAllSuperClasses, closeAfterRecursion); + //System.out.println(this.configuration); this.typedSparqlQuery = new TypedSparqlQuery(configuration); this.extractionAlgorithm = new ExtractionAlgorithm(configuration); Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2008-01-23 12:13:23 UTC (rev 419) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2008-01-23 12:41:04 UTC (rev 420) @@ -21,6 +21,7 @@ import java.net.URI; import java.util.HashSet; +import java.util.List; import java.util.Set; import org.dllearner.kb.sparql.configuration.Configuration; @@ -29,6 +30,10 @@ import org.dllearner.kb.sparql.query.SparqlQuery; import org.dllearner.utilities.StringTuple; +import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.query.ResultSetFormatter; +import com.hp.hpl.jena.sparql.core.ResultBinding; + /** * Can execute different queries. * @@ -71,9 +76,10 @@ * normally object * @return */ - + @SuppressWarnings({"unchecked"}) public Set<StringTuple> getTupelForResource(URI uri) { - // TODO remove + Set<StringTuple> s = new HashSet<StringTuple>(); + String a = "predicate"; String b = "object"; // getQuery @@ -83,16 +89,18 @@ CachedSparqlQuery csq = new CachedSparqlQuery(configuration .getSparqlEndpoint(), cache, uri.toString(), sparqlQueryString); - String xml = csq.getAsXMLString(); - // TODO needs to be changed to new format - Set<StringTuple> s = processResult(xml, a, b); - try { - // System.out.println("retrieved " + s.size() + " tupels\n"); - } catch (Exception e) { + + // TODO optimize + ResultSet rs = csq.getAsResultSet(); + + List<ResultBinding> l = ResultSetFormatter.toList(rs); + p(l.toString()); + for (ResultBinding resultBinding : l) { + + s.add(new StringTuple(resultBinding.get(a).toString(), + resultBinding.get(b).toString())); } return s; - // return cachedSparql(u, sparql, "predicate", "object"); - } /*@Deprecated Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java 2008-01-23 12:13:23 UTC (rev 419) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java 2008-01-23 12:41:04 UTC (rev 420) @@ -20,12 +20,18 @@ package org.dllearner.kb.sparql; import java.net.URI; +import java.util.HashSet; +import java.util.List; import java.util.Set; import org.dllearner.kb.sparql.configuration.Configuration; import org.dllearner.kb.sparql.query.CachedSparqlQuery; import org.dllearner.utilities.StringTuple; +import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.query.ResultSetFormatter; +import com.hp.hpl.jena.sparql.core.ResultBinding; + /** * Can execute different queries. * @@ -46,8 +52,9 @@ * @see org.dllearner.kb.sparql.TypedSparqlQuery#getTupelForResource(java.net.URI) */ @Override + @SuppressWarnings({"unchecked"}) public Set<StringTuple> getTupelForResource(URI uri) { - // TODO remove + Set<StringTuple> s = new HashSet<StringTuple>(); String a = "predicate"; String b = "object"; // getQuery for all super classes of classes only @@ -60,16 +67,15 @@ CachedSparqlQuery csq = new CachedSparqlQuery(configuration .getSparqlEndpoint(), cache, uri.toString(), sparqlQueryString); - String xml = csq.getAsXMLString(); - // TODO needs to be changed to new format - Set<StringTuple> s = processResult(xml, a, b); - try { - // System.out.println("retrieved " + s.size() + " tupels\n"); - } catch (Exception e) { + // TODO optimize + ResultSet rs = csq.getAsResultSet(); + List<ResultBinding> l = ResultSetFormatter.toList(rs); + for (ResultBinding resultBinding : l) { + + s.add(new StringTuple(resultBinding.get(a).toString(), + resultBinding.get(b).toString())); } return s; - // return cachedSparql(u, sparql, "predicate", "object"); - } } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlQueryType.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlQueryType.java 2008-01-23 12:13:23 UTC (rev 419) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlQueryType.java 2008-01-23 12:41:04 UTC (rev 420) @@ -78,6 +78,8 @@ return YAGOSKOS(); case 4: return YagoSpecialHierarchy(); + case 5: + return test(); } return null; } @@ -88,7 +90,6 @@ pred.add("http://www.w3.org/2004/02/skos/core"); pred.add("http://www.w3.org/2002/07/owl#sameAs"); pred.add("http://xmlns.com/foaf/0.1/"); - pred.add("http://dbpedia.org/property/reference"); pred.add("http://dbpedia.org/property/website"); pred.add("http://dbpedia.org/property/wikipage"); @@ -201,7 +202,33 @@ return new SparqlQueryType("forbid", obj, pred, false); } + public static SparqlQueryType test(){ + Set<String> pred = new HashSet<String>(); + pred.add("http://www.w3.org/2004/02/skos/core"); + pred.add("http://www.w3.org/2002/07/owl#sameAs"); + pred.add("http://xmlns.com/foaf/0.1/"); + //pred.add("http://dbpedia.org/property/reference"); + //pred.add("http://dbpedia.org/property/website"); + //pred.add("http://dbpedia.org/property/wikipage"); + pred.add("http://dbpedia.org/property/wikiPageUsesTemplate"); + pred.add("http://dbpedia.org/property/relatedInstance"); + + Set<String> obj = new HashSet<String>(); + //obj.add("http://dbpedia.org/resource/Category:Wikipedia_"); + //obj.add("http://dbpedia.org/resource/Category:Articles_"); + obj.add("http://dbpedia.org/resource/Category:"); + obj.add("http://dbpedia.org/resource/Template"); + obj.add("http://xmlns.com/foaf/0.1/"); + obj.add("http://upload.wikimedia.org/wikipedia/commons"); + obj.add("http://upload.wikimedia.org/wikipedia"); + obj.add("http://www.geonames.org"); + obj.add("http://www.w3.org/2006/03/wn/wn20/instances/synset"); + obj.add("http://www4.wiwiss.fu-berlin.de/flickrwrappr"); + obj.add("http://www.w3.org/2004/02/skos/core"); + return new SparqlQueryType("forbid", obj, pred, false); + } + } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java 2008-01-23 12:13:23 UTC (rev 419) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java 2008-01-23 12:41:04 UTC (rev 420) @@ -88,16 +88,21 @@ p(queryString); // create a query and parse it into Jena Query query = QueryFactory.create(queryString); - // query.validate(); + query.validate(); String service = endpoint.getURL().toString(); + p(endpoint.getURL().toString()); // Jena access to SPARQL endpoint QueryExecution queryExecution = QueryExecutionFactory.sparqlService( service, query, endpoint.getDefaultGraphURIs(), endpoint .getNamedGraphURIs()); p("query SPARQL server"); + + ResultSet rs = queryExecution.execSelect(); + p(rs.getResultVars().toString()); + //p(ResultSetFormatter.asXMLString(rs)); return rs; } @@ -170,6 +175,7 @@ * @return Vector<String> */ @SuppressWarnings({"unchecked"}) + @Deprecated public Vector<String> getAsVector(String varName) { ResultSet rs = send(); Vector<String> vret = new Vector<String>(); @@ -190,6 +196,7 @@ * @return Vector<StringTuple> */ @SuppressWarnings({"unchecked"}) + @Deprecated public Vector<StringTuple> getAsVectorOfTupels(String varName1, String varName2) { ResultSet rs = send(); @@ -213,6 +220,7 @@ * @param varNames * @return Vector<Vector<String>> */ + @Deprecated public Vector<Vector<String>> getAsVectorOfVectors(Vector<String> varNames) { // ResultSet rs = send(); Vector<Vector<String>> vret = new Vector<Vector<String>>(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <sk...@us...> - 2008-01-28 14:26:55
|
Revision: 432 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=432&view=rev Author: sknappe Date: 2008-01-28 06:26:54 -0800 (Mon, 28 Jan 2008) Log Message: ----------- fixed some bugs Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/CachedSparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2008-01-28 03:09:12 UTC (rev 431) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2008-01-28 14:26:54 UTC (rev 432) @@ -32,7 +32,6 @@ import java.util.Map; import java.util.Random; import java.util.Set; -import java.util.Vector; import org.apache.log4j.Logger; import org.dllearner.core.KnowledgeSource; @@ -98,39 +97,6 @@ SparqlEndpoint endpoint = null; - /** - * Holds the results of the calculateSubjects method - */ - private String[] subjects; - - /** - * Holds the results of the calculateTriples method - */ - private String[] triples; - - /** - * Holds the results of the calculateConceptSubjects method - */ - private String[] conceptSubjects; - - /** - * if a method is running this becomes true - */ - private boolean subjectThreadRunning = false; - - private boolean triplesThreadRunning = false; - - private boolean conceptThreadRunning = false; - - /** - * the Thread that is running a method - */ - private Thread subjectThread; - - private Thread triplesThread; - - private Thread conceptThread; - private LinkedList<String> defaultGraphURIs = new LinkedList<String>(); private LinkedList<String> namedGraphURIs = new LinkedList<String>(); @@ -426,136 +392,6 @@ return ontArray; } - /** - * - * @param label - * @param limit - */ - public void calculateSubjects(String label, int limit) { - logger.info("SparqlModul: Collecting Subjects"); - // oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); - // try { - Vector<String> v = (SparqlQuery.makeLabelQuery(label, limit, endpoint) - .getAsVector("subject")); - subjects = (String[]) v.toArray(new String[v.size()]); - // subjects = oc.getSubjectsFromLabel(label, limit); - // } catch (IOException e) { - // TODO I removed IOException, please check - // subjects = new String[1]; - // subjects[0] = "[Error]Sparql Endpoint could not be reached."; - // } - logger.info("SparqlModul: ****Finished"); - } - - /** - * TODO SparqlOntologyCollector needs to be removed - * - * @param subject - */ - public void calculateTriples(String subject) { - logger.info("SparqlModul: Collecting Triples"); - Vector<StringTuple> v = (SparqlQuery - .makeArticleQuery(subject, endpoint).getAsVectorOfTupels( - "predicate", "objcet")); - // String[] subjects = (String[]) v.toArray(new String[v.size()]); - String[] tmp = new String[v.size()]; - int i = 0; - for (StringTuple stringTuple : v) { - tmp[i++] = stringTuple.a + "<" + stringTuple.b; - } - triples = tmp; - // oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); - // try { - // triples = oc.collectTriples(subject); - // } catch (IOException e) { - // triples = new String[1]; - // triples[0] = "[Error]Sparql Endpoint could not be reached."; - // } - logger.info("SparqlModul: ****Finished"); - } - - /** - * - * - * @param concept - */ - public void calculateConceptSubjects(String concept) { - logger.info("SparqlModul: Collecting Subjects"); - Vector<String> v = (SparqlQuery.makeConceptQuery(concept, endpoint) - .getAsVector("subject")); - conceptSubjects = (String[]) v.toArray(new String[v.size()]); - - // oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); - // try { - // conceptSubjects = oc.getSubjectsFromConcept(concept); - // } catch (IOException e) { - // TODO I removed IOException, please check - // conceptSubjects = new String[1]; - // conceptSubjects[0] = "[Error]Sparql Endpoint could not be reached."; - // } - logger.info("SparqlModul: ****Finished"); - } - - public boolean subjectThreadIsRunning() { - return subjectThreadRunning; - } - - public void setSubjectThreadRunning(boolean bool) { - subjectThreadRunning = bool; - } - - public boolean triplesThreadIsRunning() { - return triplesThreadRunning; - } - - public void setTriplesThreadRunning(boolean bool) { - triplesThreadRunning = bool; - } - - public boolean conceptThreadIsRunning() { - return conceptThreadRunning; - } - - public void setConceptThreadRunning(boolean bool) { - conceptThreadRunning = bool; - } - - public String[] getSubjects() { - return subjects; - } - - public Thread getSubjectThread() { - return subjectThread; - } - - public void setSubjectThread(Thread subjectThread) { - this.subjectThread = subjectThread; - } - - public Thread getTriplesThread() { - return triplesThread; - } - - public void setTriplesThread(Thread triplesThread) { - this.triplesThread = triplesThread; - } - - public Thread getConceptThread() { - return conceptThread; - } - - public void setConceptThread(Thread conceptThread) { - this.conceptThread = conceptThread; - } - - public String[] getTriples() { - return triples; - } - - public String[] getConceptSubjects() { - return conceptSubjects; - } - public int sparqlQuery(String query) { this.endpoint = new SparqlEndpoint(url, defaultGraphURIs, namedGraphURIs); Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/query/CachedSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/CachedSparqlQuery.java 2008-01-28 03:09:12 UTC (rev 431) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/CachedSparqlQuery.java 2008-01-28 14:26:54 UTC (rev 432) @@ -73,10 +73,14 @@ * TODO can further be optimized * @return a jena ResultSet */ - public ResultSet getAsResultSet(){ + public ResultSet getAsResultSet2(){ return SparqlQuery.JSONtoResultSet(getAsJSON()); } + public ResultSet getAsResultSet(){ + return this.sparqlQuery.send(); + } + /** * sends a query and returns JSON using cache * Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java 2008-01-28 03:09:12 UTC (rev 431) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java 2008-01-28 14:26:54 UTC (rev 432) @@ -30,10 +30,7 @@ import org.dllearner.kb.sparql.configuration.SparqlEndpoint; import org.dllearner.utilities.StringTuple; -import com.hp.hpl.jena.query.Query; import com.hp.hpl.jena.query.QueryExecution; -import com.hp.hpl.jena.query.QueryExecutionFactory; -import com.hp.hpl.jena.query.QueryFactory; import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.query.ResultSetFactory; import com.hp.hpl.jena.query.ResultSetFormatter; @@ -125,7 +122,6 @@ public String[][] getAsStringArray(){ System.out.println("Starting Query"); ResultSet rs=send(); - System.out.println("getResults"); List<ResultBinding> l = ResultSetFormatter.toList(rs); List<String> resultVars=rs.getResultVars(); String[][] array=new String[l.size()][resultVars.size()]; This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <sk...@us...> - 2008-02-28 13:56:47
|
Revision: 660 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=660&view=rev Author: sknappe Date: 2008-02-28 05:56:42 -0800 (Thu, 28 Feb 2008) Log Message: ----------- fixed some bugs Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/kb/sparql/Cache.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQuery.java Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Cache.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Cache.java 2008-02-27 19:03:22 UTC (rev 659) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Cache.java 2008-02-28 13:56:42 UTC (rev 660) @@ -33,6 +33,7 @@ import org.apache.log4j.Logger; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.query.ResultSetFormatter; /** * SPARQL query cache to avoid possibly expensive multiple queries. The queries @@ -217,13 +218,14 @@ if (result != null) { return SparqlQuery.JSONtoResultSet(result); } else { - ResultSet rs = query.send(); + query.send(); + ResultSet rs = query.getResultSet(); if (rs!=null){ String json = SparqlQuery.getAsJSON(rs); addToCache(query.getQueryString(), json); return SparqlQuery.JSONtoResultSet(json); } - else return rs; + return rs; } } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQuery.java 2008-02-27 19:03:22 UTC (rev 659) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQuery.java 2008-02-28 13:56:42 UTC (rev 660) @@ -175,7 +175,7 @@ ResultSetFormatter.outputAsJSON(baos, resultSet); // possible Jena bug: Jena modifies the result set during // JSON transformation, so we need to get it back - //resultSet = JSONtoResultSet(baos.toString()); + resultSet = JSONtoResultSet(baos.toString()); try{ return baos.toString("UTF-8"); }catch (Exception e){ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |