From: <jen...@us...> - 2007-12-02 11:52:08
|
Revision: 296 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=296&view=rev Author: jenslehmann Date: 2007-12-02 03:52:02 -0800 (Sun, 02 Dec 2007) Log Message: ----------- refactored and formatted new SPARQL component files Modified Paths: -------------- trunk/examples/dbpedia/sparql.conf trunk/src/dl-learner/org/dllearner/kb/SparqlEndpointRestructured.java Added Paths: ----------- trunk/src/dl-learner/org/dllearner/kb/sparql/ trunk/src/dl-learner/org/dllearner/kb/sparql/Cache.java trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/Configuration.java trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java trunk/src/dl-learner/org/dllearner/kb/sparql/Node.java trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java trunk/src/dl-learner/org/dllearner/kb/sparql/SimpleHTTPRequest.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlEndpoint.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlHTTPRequest.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryType.java trunk/src/dl-learner/org/dllearner/kb/sparql/Test.java trunk/src/dl-learner/org/dllearner/kb/sparql/Tupel.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java Removed Paths: ------------- trunk/src/dl-learner/org/dllearner/kb/extraction/Configuration.java trunk/src/dl-learner/org/dllearner/kb/extraction/ExtractionAlgorithm.java trunk/src/dl-learner/org/dllearner/kb/extraction/Manager.java trunk/src/dl-learner/org/dllearner/kb/extraction/Manipulator.java trunk/src/dl-learner/org/dllearner/kb/extraction/SparqlEndpoint.java trunk/src/dl-learner/org/dllearner/kb/extraction/SparqlQueryType.java trunk/src/dl-learner/org/dllearner/kb/extraction/Test.java trunk/src/dl-learner/org/dllearner/kb/extraction/datastructures/ trunk/src/dl-learner/org/dllearner/kb/extraction/sparql/ Modified: trunk/examples/dbpedia/sparql.conf =================================================================== --- trunk/examples/dbpedia/sparql.conf 2007-12-02 11:42:36 UTC (rev 295) +++ trunk/examples/dbpedia/sparql.conf 2007-12-02 11:52:02 UTC (rev 296) @@ -31,16 +31,16 @@ cli.showSubsumptionHierarchy = false; // SPARQL options -sparql.numberOfRecursions = 2; -sparql.instances = {"http://dbpedia.org/resource/Democritus","http://dbpedia.org/resource/Zeno_of_Elea","http://dbpedia.org/resource/Archytas","http://dbpedia.org/resource/Plato","http://dbpedia.org/resource/Philolaus","http://dbpedia.org/resource/Pythagoras","http://dbpedia.org/resource/Socrates"}; -sparql.filterMode = 0; -sparql.predList = {}; -sparql.objList = {}; -sparql.classList = {}; -sparql.format = "KB"; -sparql.dumpToFile = false; +sparql2.numberOfRecursions = 2; +sparql2.instances = {"http://dbpedia.org/resource/Democritus","http://dbpedia.org/resource/Zeno_of_Elea","http://dbpedia.org/resource/Archytas","http://dbpedia.org/resource/Plato","http://dbpedia.org/resource/Philolaus","http://dbpedia.org/resource/Pythagoras","http://dbpedia.org/resource/Socrates"}; +sparql2.filterMode = 0; +sparql2.predList = {}; +sparql2.objList = {}; +sparql2.classList = {}; +sparql2.format = "KB"; +sparql2.dumpToFile = false; -import("http://dbpedia.openlinksw.com:8890/sparql","SPARQL"); +import("http://dbpedia.openlinksw.com:8890/sparql","SPARQL2"); /** examples **/ +"http://dbpedia.org/resource/Pythagoras" Modified: trunk/src/dl-learner/org/dllearner/kb/SparqlEndpointRestructured.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/SparqlEndpointRestructured.java 2007-12-02 11:42:36 UTC (rev 295) +++ trunk/src/dl-learner/org/dllearner/kb/SparqlEndpointRestructured.java 2007-12-02 11:52:02 UTC (rev 296) @@ -41,9 +41,9 @@ import org.dllearner.core.config.StringConfigOption; import org.dllearner.core.config.StringSetConfigOption; import org.dllearner.core.dl.KB; -import org.dllearner.kb.extraction.Manager; -import org.dllearner.kb.extraction.SparqlEndpoint; -import org.dllearner.kb.extraction.SparqlQueryType; +import org.dllearner.kb.sparql.Manager; +import org.dllearner.kb.sparql.SparqlEndpoint; +import org.dllearner.kb.sparql.SparqlQueryType; import org.dllearner.parser.KBParser; import org.dllearner.reasoning.DIGConverter; import org.dllearner.reasoning.JenaOWLDIGConverter; Deleted: trunk/src/dl-learner/org/dllearner/kb/extraction/Configuration.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/extraction/Configuration.java 2007-12-02 11:42:36 UTC (rev 295) +++ trunk/src/dl-learner/org/dllearner/kb/extraction/Configuration.java 2007-12-02 11:52:02 UTC (rev 296) @@ -1,202 +0,0 @@ -package org.dllearner.kb.extraction; - -import java.io.File; -import java.net.URI; -import java.net.URL; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Map; -import java.util.Set; - -import org.semanticweb.owl.apibinding.OWLManager; -import org.semanticweb.owl.model.OWLConstant; -import org.semanticweb.owl.model.OWLDataPropertyExpression; -import org.semanticweb.owl.model.OWLIndividual; -import org.semanticweb.owl.model.OWLObjectPropertyExpression; -import org.semanticweb.owl.model.OWLOntology; -import org.semanticweb.owl.model.OWLOntologyManager; - -public class Configuration { - private SparqlEndpoint SparqlEndpoint; - private SparqlQueryType SparqlQueryType; - private Manipulator Manipulator; - - - private Configuration(){ - } - public Configuration(SparqlEndpoint SparqlEndpoint,SparqlQueryType SparqlQueryType){ - this.SparqlEndpoint=SparqlEndpoint; - this.SparqlQueryType=SparqlQueryType; - } - - - public static Configuration getConfiguration(URI uri){ - //public static String getTellsString(URL file, URI kbURI){//throws OWLOntologyCreationException{ - Configuration ret=new Configuration(); - try{ - String file="config/config.owl"; - - - File f= new File(file); - String fileURL="file:///"+f.getAbsolutePath(); - URL u=new URL(fileURL); - /* Load an ontology from a physical URI */ - OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); - OWLOntology ontology = manager.loadOntologyFromPhysicalURI(u.toURI()); - //System.out.println( ontology.containsIndividualReference(uri)); - //OWLIndividualImpl ind=new OWLIndividualImpl(); - //System.out.println(ontology.getReferencedIndividuals()); - Set<OWLIndividual> s=ontology.getReferencedIndividuals(); - //System.out.println(ontology.getReferencedClasses()); - //Set<OWLIndividualAxiom> s= ontology.getIndividualAxioms(); - Iterator<OWLIndividual> it=s.iterator(); - while (it.hasNext()){ - OWLIndividual tmp=(OWLIndividual)it.next(); - //tmp.getURI() - if(tmp.getURI().equals(uri)){ - OWLIndividual[] arr=getIndividualsForProperty("hasSparqlEndpoint",tmp.getObjectPropertyValues(ontology)); - OWLIndividual sEndpoint=arr[0]; - ret.SparqlEndpoint=makeEndpoint(sEndpoint, ontology); - arr=getIndividualsForProperty("hasTypedQuery",tmp.getObjectPropertyValues(ontology)); - OWLIndividual typedQuery=arr[0]; - ret.SparqlQueryType=makeSparqlQueryType(typedQuery, ontology); - - } - //{hasSparqlEndpoint=[dbpediaEndpoint]} - } - - ret.Manipulator=makeManipulator(); - }catch (Exception e) {e.printStackTrace();} - - return ret; - } - - - - - - public static OWLIndividual[] getIndividualsForProperty(String propertyname,Map<OWLObjectPropertyExpression, Set<OWLIndividual>> m){ - Set<OWLObjectPropertyExpression> s=m.keySet(); - - Iterator<OWLObjectPropertyExpression> it=s.iterator(); - while (it.hasNext()){ - OWLObjectPropertyExpression tmp=(OWLObjectPropertyExpression)it.next(); - //System.out.println(tmp); - //System.out.println(propertyname); - if(tmp.toString().equals(propertyname)) - { - Object[] arr=((Set<OWLIndividual>)m.get(tmp)).toArray() ; - OWLIndividual[] o=new OWLIndividual[arr.length]; - for (int i = 0; i < o.length; i++) { - o[i]=(OWLIndividual)arr[i]; - } - - return o;} - } - return null; - - } - - public static String getFirstValueForDataProperty(String propertyname,Map<OWLDataPropertyExpression, Set<OWLConstant>> m){ - return getValuesForDataProperty(propertyname, m)[0]; - } - - public static String[] getValuesForDataProperty(String propertyname,Map<OWLDataPropertyExpression, Set<OWLConstant>> m){ - Set<OWLDataPropertyExpression> s=m.keySet(); - - Iterator<OWLDataPropertyExpression> it=s.iterator(); - while (it.hasNext()){ - OWLDataPropertyExpression tmp=(OWLDataPropertyExpression)it.next(); - if(tmp.toString().equals(propertyname)) - { - Object[] arr=((Set<OWLConstant>)m.get(tmp)).toArray() ; - String[] str=new String[arr.length]; - for (int i = 0; i < str.length; i++) { - str[i]=((OWLConstant)arr[i]).getLiteral(); - } - return str;} - } - return null; - - } - - public static SparqlEndpoint makeEndpoint(OWLIndividual sEndpoint,OWLOntology o){ - String host=getFirstValueForDataProperty("hasHost",sEndpoint.getDataPropertyValues(o)); - String port=getFirstValueForDataProperty("hasPort",sEndpoint.getDataPropertyValues(o)); - String hasAfterGET=getFirstValueForDataProperty("hasAfterGET",sEndpoint.getDataPropertyValues(o)); - String hasQueryParameter=getFirstValueForDataProperty("hasQueryParameter",sEndpoint.getDataPropertyValues(o)); - OWLIndividual[] para=getIndividualsForProperty("hasGETParameter",sEndpoint.getObjectPropertyValues(o)); - //System.out.println("test"); - HashMap<String,String> parameters=new HashMap<String,String>(); - if(para==null)return new SparqlEndpoint( host, port, hasAfterGET, hasQueryParameter, parameters); - for (OWLIndividual p : para) { - //System.out.println("test2"); - String a1=getFirstValueForDataProperty("hasParameterName",p.getDataPropertyValues(o)); - String a2=getFirstValueForDataProperty("hasParameterContent",p.getDataPropertyValues(o)); - parameters.put(a1, a2); - } - //System.out.println("test2"); - //System.out.println(host+port+ hasAfterGET+ hasQueryParameter+ parameters); - return new SparqlEndpoint( host, port, hasAfterGET, hasQueryParameter, parameters); - - - - } - - public static SparqlQueryType makeSparqlQueryType(OWLIndividual typedQuery,OWLOntology o){ - String useLiterals=getFirstValueForDataProperty("usesLiterals",typedQuery.getDataPropertyValues(o)); - String hasMode=getFirstValueForDataProperty("hasMode",typedQuery.getDataPropertyValues(o)); - //String hasAfterGET=getValuesForDataProperty("hasAfterGET",sEndpoint.getDataPropertyValues(o)); - //String hasQueryParameter=getValuesForDataProperty("hasQueryParameter",sEndpoint.getDataPropertyValues(o)); - OWLIndividual[] objFilter=getIndividualsForProperty("hasObjectFilterSet",typedQuery.getObjectPropertyValues(o)); - OWLIndividual[] predFilter=getIndividualsForProperty("hasPredicateFilterSet",typedQuery.getObjectPropertyValues(o)); - - Set<String> objectFilter=new HashSet<String>(); - Set<String> predicateFilter=new HashSet<String>(); - - for (OWLIndividual of : objFilter) { - String[] tmp=getValuesForDataProperty("filtersURI",of.getDataPropertyValues(o)); - for (String s : tmp){ - objectFilter.add(s); - - } - } - - for (OWLIndividual pf : predFilter) { - String[] tmp=getValuesForDataProperty("filtersURI",pf.getDataPropertyValues(o)); - for (String s : tmp){ - predicateFilter.add(s); - - } - } - //System.out.println(predicateFilter); - //System.out.println(hasMode+objectFilter+predicateFilter+useLiterals); - return new SparqlQueryType(hasMode,objectFilter,predicateFilter,useLiterals); - - - - } - - public static Manipulator makeManipulator() { - return new Manipulator(); - } - - - public Manipulator getManipulator() { - return this.Manipulator; - } - - public SparqlEndpoint getSparqlEndpoint() { - return SparqlEndpoint; - } - - - - public SparqlQueryType getSparqlQueryType() { - return SparqlQueryType; - } - - - -} Deleted: trunk/src/dl-learner/org/dllearner/kb/extraction/ExtractionAlgorithm.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/extraction/ExtractionAlgorithm.java 2007-12-02 11:42:36 UTC (rev 295) +++ trunk/src/dl-learner/org/dllearner/kb/extraction/ExtractionAlgorithm.java 2007-12-02 11:52:02 UTC (rev 296) @@ -1,74 +0,0 @@ -package org.dllearner.kb.extraction; - -import java.net.URI; -import java.util.Vector; - -import org.dllearner.kb.extraction.datastructures.InstanceNode; -import org.dllearner.kb.extraction.datastructures.Node; -import org.dllearner.kb.extraction.sparql.TypedSparqlQuery; - -public class ExtractionAlgorithm { - - private Configuration Configuration; - private Manipulator Manipulator; - private int recursiondepth=2; - private boolean getAllBackground=true; - - - - public ExtractionAlgorithm (Configuration Configuration){ - this.Configuration=Configuration; - this.Manipulator=Configuration.getManipulator(); - - } - - public Node getFirstNode(URI u){ - return new InstanceNode(u); - } - public Vector<Node> expandAll(URI[] u,TypedSparqlQuery tsp){ - Vector<Node> v=new Vector<Node>(); - for(URI one:u){ - v.add(expandNode(one, tsp)); - } - return v; - } - - - public Node expandNode(URI u, TypedSparqlQuery tsp){ - Node n=getFirstNode(u); - Vector<Node> v=new Vector<Node>(); - v.add(n); - System.out.println("StartVector: "+v); - // n.expand(tsp, this.Manipulator); - //Vector<Node> second= - for(int x=1;x<=this.recursiondepth;x++){ - - Vector<Node>tmp=new Vector<Node>(); - while (v.size()>0) { - Node tmpNode=v.remove(0); - System.out.println("Expanding "+tmpNode); - Vector<Node> tmpVec=tmpNode.expand(tsp, this.Manipulator); - - tmp.addAll(tmpVec); - } - v=tmp; - System.out.println("Rec: "+x+" with "+v); - } - if(this.getAllBackground){ - Vector<Node> classes=new Vector<Node>(); - for(Node one:v){ - if(one.isClass()) {classes.add(one);} - } - while(classes.size()>0){ - System.out.println(classes.size()); - classes.addAll(classes.remove(0).expand(tsp, this.Manipulator)); - } - - } - return n; - - } - - - -} Deleted: trunk/src/dl-learner/org/dllearner/kb/extraction/Manager.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/extraction/Manager.java 2007-12-02 11:42:36 UTC (rev 295) +++ trunk/src/dl-learner/org/dllearner/kb/extraction/Manager.java 2007-12-02 11:52:02 UTC (rev 296) @@ -1,67 +0,0 @@ -package org.dllearner.kb.extraction; - -import java.net.URI; -import java.util.HashSet; -import java.util.Set; - -import org.dllearner.kb.extraction.datastructures.Node; -import org.dllearner.kb.extraction.sparql.TypedSparqlQuery; - -public class Manager { - - private Configuration Configuration; - private TypedSparqlQuery TypedSparqlQuery; - private ExtractionAlgorithm ExtractionAlgorithm; - - - public void usePredefinedConfiguration(URI uri){ - - this.Configuration=org.dllearner.kb.extraction.Configuration.getConfiguration(uri); - this.TypedSparqlQuery=new TypedSparqlQuery(Configuration); - this.ExtractionAlgorithm=new ExtractionAlgorithm(Configuration); - } - - public void useConfiguration(SparqlQueryType SparqlQueryType, SparqlEndpoint SparqlEndpoint){ - - this.Configuration=new Configuration(SparqlEndpoint,SparqlQueryType); - this.TypedSparqlQuery=new TypedSparqlQuery(Configuration); - this.ExtractionAlgorithm=new ExtractionAlgorithm(Configuration); - } - - public String extract(URI uri){ - //this.TypedSparqlQuery.query(uri); - //System.out.println(ExtractionAlgorithm.getFirstNode(uri)); - System.out.println("Start extracting"); - Node n=this.ExtractionAlgorithm.expandNode(uri, this.TypedSparqlQuery); - Set<String> s=n.toNTriple(); - String nt=""; - for(String str:s){ - nt+=str+"\n"; - } - return nt; - } - - public String extract(Set<String> instances){ - //this.TypedSparqlQuery.query(uri); - //System.out.println(ExtractionAlgorithm.getFirstNode(uri)); - System.out.println("Start extracting"); - Set<String> ret=new HashSet<String>(); - - - for(String one:instances){ - try{ - Node n=this.ExtractionAlgorithm.expandNode(new URI(one),this.TypedSparqlQuery); - ret.addAll(n.toNTriple()); - }catch (Exception e) {e.printStackTrace();} - } - - - String nt=""; - for(String str:ret){ - nt+=str+"\n"; - } - return nt; - } - - -} \ No newline at end of file Deleted: trunk/src/dl-learner/org/dllearner/kb/extraction/Manipulator.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/extraction/Manipulator.java 2007-12-02 11:42:36 UTC (rev 295) +++ trunk/src/dl-learner/org/dllearner/kb/extraction/Manipulator.java 2007-12-02 11:52:02 UTC (rev 296) @@ -1,55 +0,0 @@ -package org.dllearner.kb.extraction; - -import java.util.HashSet; -import java.util.Iterator; -import java.util.Set; - -import org.dllearner.kb.extraction.datastructures.Node; -import org.dllearner.kb.extraction.datastructures.Tupel; - -public class Manipulator { - public String subclass="http://www.w3.org/2000/01/rdf-schema#subClassOf"; - public String type="http://www.w3.org/1999/02/22-rdf-syntax-ns#type"; - - String objectProperty="http://www.w3.org/2002/07/owl#ObjectProperty"; - String classns="http://www.w3.org/2002/07/owl#Class"; - String thing="http://www.w3.org/2002/07/owl#Thing"; - - Set<String> classproperties; - - - String[] defaultClasses={ - "http://dbpedia.org/class/yago", - "http://dbpedia.org/resource/Category:", - "http://dbpedia.org/resource/Template:", - "http://www.w3.org/2004/02/skos/core", - "http://dbpedia.org/class/"}; //TODO FEHLER hier fehlt yago - - public Manipulator(){ - Set<String> classproperties=new HashSet<String>(); - classproperties.add(subclass); - - } - - public Set<Tupel> check(Set<Tupel> s,Node node){ - Set<Tupel> toRemove=new HashSet<Tupel>(); - Iterator<Tupel> it=s.iterator(); - while(it.hasNext()){ - Tupel t=(Tupel)it.next(); - //all classes with owl:type class - if(t.a.equals(this.type) && t.b.equals(this.classns)&& node.isClass() ) - {toRemove.add(t);}; - // all with type class - if( t.b.equals(this.classns) && node.isClass() ) - {toRemove.add(t);}; - // all instances with owl:type thing - if(t.a.equals(this.type) && t.b.equals(this.thing)&& node.isInstance() ) - {toRemove.add(t);}; - - } - s.removeAll(toRemove); - - return s; - } - -} Deleted: trunk/src/dl-learner/org/dllearner/kb/extraction/SparqlEndpoint.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/extraction/SparqlEndpoint.java 2007-12-02 11:42:36 UTC (rev 295) +++ trunk/src/dl-learner/org/dllearner/kb/extraction/SparqlEndpoint.java 2007-12-02 11:52:02 UTC (rev 296) @@ -1,69 +0,0 @@ -package org.dllearner.kb.extraction; - -import java.util.HashMap; - -public class SparqlEndpoint { - - String host; - int port; - String hasAfterGET; - String hasQueryParameter; - String hasURL; - public HashMap<String,String> parameters=new HashMap<String,String>(); - public SparqlEndpoint(String host, String port, String hasAfterGET, String hasQueryParameter, HashMap<String, String> parameters) { - super(); - this.host = host; - this.port = Integer.parseInt(port); - this.hasAfterGET = hasAfterGET; - this.hasQueryParameter = hasQueryParameter; - this.parameters = parameters; - } - - public SparqlEndpoint(String host, int port, String hasURL, HashMap<String, String> parameters) { - super(); - this.port=port; - this.host=host; - this.hasURL = hasURL; - this.hasQueryParameter = "query"; - this.parameters = parameters; - } - public String getHasAfterGET() { - return hasAfterGET; - } - public void setHasAfterGET(String hasAfterGET) { - this.hasAfterGET = hasAfterGET; - } - public String getHasQueryParameter() { - return hasQueryParameter; - } - public void setHasQueryParameter(String hasQueryParameter) { - this.hasQueryParameter = hasQueryParameter; - } - public String getHost() { - return host; - } - public void setHost(String host) { - this.host = host; - } - public HashMap<String, String> getParameters() { - return parameters; - } - public void setParameters(HashMap<String, String> parameters) { - this.parameters = parameters; - } - public int getPort() { - return port; - } - public void setPort(int port) { - this.port = port; - } - - - - - /*sparql?default-graph-uri=http%3A%2F%2Fdbpedia.org&query=" + - //"SELECT%20%2A%20WHERE%20%7B%20%3Chttp%3A%2F%2Fdbpedia.org%2Fresource%2FAristotle%3E%20%3Fa%20%3Fb%20%7D%20" + - URLEncoder.encode(query, "UTF-8")+ - //query+// URLencode - "&format=application%2Fsparql-results%2Bxml*/ -} Deleted: trunk/src/dl-learner/org/dllearner/kb/extraction/SparqlQueryType.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/extraction/SparqlQueryType.java 2007-12-02 11:42:36 UTC (rev 295) +++ trunk/src/dl-learner/org/dllearner/kb/extraction/SparqlQueryType.java 2007-12-02 11:52:02 UTC (rev 296) @@ -1,68 +0,0 @@ -package org.dllearner.kb.extraction; - -import java.util.Set; - -public class SparqlQueryType { - - private String mode="forbid"; - private String[] objectfilterlist={ - "http://dbpedia.org/resource/Category:Articles_", - "http://dbpedia.org/resource/Category:Wikipedia_", - "http://xmlns.com/foaf/0.1/", - "http://dbpedia.org/resource/Category", - "http://dbpedia.org/resource/Template", - "http://upload.wikimedia.org/wikipedia/commons"}; - private String[] predicatefilterlist={ - "http://www.w3.org/2004/02/skos/core", - "http://xmlns.com/foaf/0.1/", - "http://dbpedia.org/property/wikipage-", - "http://www.w3.org/2002/07/owl#sameAs", - "http://dbpedia.org/property/reference" }; - private boolean literals=false; - - public SparqlQueryType(String mode, String[] obectfilterlist, String[] predicatefilterlist, boolean literals) { - super(); - this.mode = mode; - this.objectfilterlist = obectfilterlist; - this.predicatefilterlist = predicatefilterlist; - this.literals = literals; - } - - public SparqlQueryType(String mode, Set<String> objectfilterlist, Set<String> predicatefilterlist, String literals) { - super(); - this.mode = mode; - this.literals = (literals.equals("true"))?true:false; - - Object[] arr=objectfilterlist.toArray(); - Object[] arr2=predicatefilterlist.toArray(); - this.objectfilterlist = new String[arr.length]; - this.predicatefilterlist = new String[arr2.length]; - for (int i = 0; i < arr.length; i++) { - this.objectfilterlist[i]=(String)arr[i]; - } - for (int i = 0; i < arr2.length; i++) { - this.predicatefilterlist[i]=(String)arr2[i]; - } - - - } - - public boolean isLiterals() { - return literals; - } - - public String getMode() { - return mode; - } - - public String[] getObjectfilterlist() { - return objectfilterlist; - } - - public String[] getPredicatefilterlist() { - return predicatefilterlist; - } - - - -} Deleted: trunk/src/dl-learner/org/dllearner/kb/extraction/Test.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/extraction/Test.java 2007-12-02 11:42:36 UTC (rev 295) +++ trunk/src/dl-learner/org/dllearner/kb/extraction/Test.java 2007-12-02 11:52:02 UTC (rev 296) @@ -1,31 +0,0 @@ -package org.dllearner.kb.extraction; - -import java.io.File; -import java.io.FileWriter; -import java.net.URI; - -public class Test { - - - public static void main(String[] args) { - System.out.println("Start"); - String test2="http://www.extraction.org/config#dbpediatest"; - String test="http://www.extraction.org/config#localjoseki"; - try{ - URI u=new URI(test); - Manager m=new Manager(); - m.usePredefinedConfiguration(u); - - - URI u2=new URI("http://dbpedia.org/resource/Angela_Merkel"); - - String filename=System.currentTimeMillis()+".nt"; - FileWriter fw=new FileWriter(new File(filename),true); - fw.write(m.extract(u2)); - fw.flush(); - fw.close(); - - }catch (Exception e) {e.printStackTrace();} - } - -} Copied: trunk/src/dl-learner/org/dllearner/kb/sparql/Cache.java (from rev 294, trunk/src/dl-learner/org/dllearner/kb/extraction/sparql/Cache.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Cache.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Cache.java 2007-12-02 11:52:02 UTC (rev 296) @@ -0,0 +1,132 @@ +package org.dllearner.kb.sparql; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; +import java.net.URLEncoder; + +public class Cache implements Serializable { + // Object can be the cache itself + // or a cache object(one entry) + + final static long serialVersionUID = 104; + transient String basedir = ""; + transient String fileending = ".cache"; + long timestamp; + String content = ""; + long daysoffreshness = 15; + long multiplier = 24 * 60 * 60 * 1000;// h m s ms + String sparqlquery = ""; + + // constructor for the cache itself + public Cache(String path) { + this.basedir = path + File.separator; + if (!new File(path).exists()) { + System.out.println(new File(path).mkdir()); + ; + } + + } + + // constructor for single cache object(one entry) + public Cache(String c, String sparql) { + this.content = c; + this.sparqlquery = sparql; + this.timestamp = System.currentTimeMillis(); + } + + public String get(String key, String sparql) { + // System.out.println("get From "+key); + String ret = null; + try { + Cache c = readFromFile(makeFilename(key)); + if (c == null) + return null; + // System.out.println(" file found"); + if (!c.checkFreshness()) + return null; + // System.out.println("fresh"); + if (!c.validate(sparql)) + return null; + // System.out.println("valid"); + ret = c.content; + } catch (Exception e) { + e.printStackTrace(); + } + return ret; + }; + + public void put(String key, String content, String sparql) { + // System.out.println("put into "+key); + Cache c = new Cache(content, sparql); + putIntoFile(makeFilename(key), c); + } + + String makeFilename(String key) { + String ret = ""; + try { + ret = basedir + URLEncoder.encode(key, "UTF-8") + fileending; + } catch (Exception e) { + e.printStackTrace(); + } + return ret; + } + + boolean checkFreshness() { + if ((System.currentTimeMillis() - this.timestamp) <= (daysoffreshness * multiplier)) + // fresh + return true; + else + return false; + } + + boolean validate(String sparql) { + if (this.sparqlquery.equals(sparql)) + // valid + return true; + else + return false; + } + + public void checkFile(String Filename) { + if (!new File(Filename).exists()) { + try { + new File(Filename).createNewFile(); + } catch (Exception e) { + e.printStackTrace(); + } + + } + + } + + public void putIntoFile(String Filename, Cache content) { + try { + // FileWriter fw=new FileWriter(new File(Filename),true); + FileOutputStream fos = new FileOutputStream(Filename, false); + ObjectOutputStream o = new ObjectOutputStream(fos); + o.writeObject(content); + fos.flush(); + fos.close(); + } catch (Exception e) { + System.out.println("Not in cache creating: " + Filename); + } + } + + public Cache readFromFile(String Filename) { + Cache content = null; + try { + FileInputStream fos = new FileInputStream(Filename); + ObjectInputStream o = new ObjectInputStream(fos); + content = (Cache) o.readObject(); + // FileReader fr=new FileReader(new File(Filename,"r")); + // BufferedReader br=new BufferedReader(fr); + } catch (Exception e) { + } + return content; + + } +} Copied: trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java (from rev 294, trunk/src/dl-learner/org/dllearner/kb/extraction/datastructures/ClassNode.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/ClassNode.java 2007-12-02 11:52:02 UTC (rev 296) @@ -0,0 +1,61 @@ +package org.dllearner.kb.sparql; + +import java.net.URI; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Set; +import java.util.Vector; + +public class ClassNode extends Node { + Set<PropertyNode> properties = new HashSet<PropertyNode>(); + + public ClassNode(URI u) { + super(u); + this.type = "class"; + } + + @Override + public Vector<Node> expand(TypedSparqlQuery tsq, Manipulator m) { + Set<Tupel> s = tsq.query(this.URI); + s = m.check(s, this); + Vector<Node> Nodes = new Vector<Node>(); + // Manipulation + + Iterator<Tupel> it = s.iterator(); + while (it.hasNext()) { + Tupel t = (Tupel) it.next(); + try { + if (t.a.equals(m.type) || t.a.equals(m.subclass)) { + ClassNode tmp = new ClassNode(new URI(t.b)); + properties.add(new PropertyNode(new URI(m.subclass), this, tmp)); + Nodes.add(tmp); + } + } catch (Exception e) { + System.out.println(t); + e.printStackTrace(); + } + + } + return Nodes; + } + + @Override + public boolean isClass() { + return true; + } + + @Override + public Set<String> toNTriple() { + Set<String> s = new HashSet<String>(); + s.add("<" + this.URI + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + + "http://www.w3.org/2002/07/owl#Class" + ">."); + + for (PropertyNode one : properties) { + s.add("<" + this.URI + "><" + one.getURI() + "><" + one.getB().getURI() + ">."); + s.addAll(one.getB().toNTriple()); + } + + return s; + } + +} Copied: trunk/src/dl-learner/org/dllearner/kb/sparql/Configuration.java (from rev 294, trunk/src/dl-learner/org/dllearner/kb/extraction/Configuration.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Configuration.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Configuration.java 2007-12-02 11:52:02 UTC (rev 296) @@ -0,0 +1,207 @@ +package org.dllearner.kb.sparql; + +import java.io.File; +import java.net.URI; +import java.net.URL; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; + +import org.semanticweb.owl.apibinding.OWLManager; +import org.semanticweb.owl.model.OWLConstant; +import org.semanticweb.owl.model.OWLDataPropertyExpression; +import org.semanticweb.owl.model.OWLIndividual; +import org.semanticweb.owl.model.OWLObjectPropertyExpression; +import org.semanticweb.owl.model.OWLOntology; +import org.semanticweb.owl.model.OWLOntologyManager; + +public class Configuration { + private SparqlEndpoint SparqlEndpoint; + private SparqlQueryType SparqlQueryType; + private Manipulator Manipulator; + + private Configuration() { + } + + public Configuration(SparqlEndpoint SparqlEndpoint, SparqlQueryType SparqlQueryType) { + this.SparqlEndpoint = SparqlEndpoint; + this.SparqlQueryType = SparqlQueryType; + } + + public static Configuration getConfiguration(URI uri) { + // public static String getTellsString(URL file, URI kbURI){//throws + // OWLOntologyCreationException{ + Configuration ret = new Configuration(); + try { + String file = "config/config.owl"; + + File f = new File(file); + String fileURL = "file:///" + f.getAbsolutePath(); + URL u = new URL(fileURL); + /* Load an ontology from a physical URI */ + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + OWLOntology ontology = manager.loadOntologyFromPhysicalURI(u.toURI()); + // System.out.println( ontology.containsIndividualReference(uri)); + // OWLIndividualImpl ind=new OWLIndividualImpl(); + // System.out.println(ontology.getReferencedIndividuals()); + Set<OWLIndividual> s = ontology.getReferencedIndividuals(); + // System.out.println(ontology.getReferencedClasses()); + // Set<OWLIndividualAxiom> s= ontology.getIndividualAxioms(); + Iterator<OWLIndividual> it = s.iterator(); + while (it.hasNext()) { + OWLIndividual tmp = (OWLIndividual) it.next(); + // tmp.getURI() + if (tmp.getURI().equals(uri)) { + OWLIndividual[] arr = getIndividualsForProperty("hasSparqlEndpoint", tmp + .getObjectPropertyValues(ontology)); + OWLIndividual sEndpoint = arr[0]; + ret.SparqlEndpoint = makeEndpoint(sEndpoint, ontology); + arr = getIndividualsForProperty("hasTypedQuery", tmp + .getObjectPropertyValues(ontology)); + OWLIndividual typedQuery = arr[0]; + ret.SparqlQueryType = makeSparqlQueryType(typedQuery, ontology); + + } + // {hasSparqlEndpoint=[dbpediaEndpoint]} + } + + ret.Manipulator = makeManipulator(); + } catch (Exception e) { + e.printStackTrace(); + } + + return ret; + } + + public static OWLIndividual[] getIndividualsForProperty(String propertyname, + Map<OWLObjectPropertyExpression, Set<OWLIndividual>> m) { + Set<OWLObjectPropertyExpression> s = m.keySet(); + + Iterator<OWLObjectPropertyExpression> it = s.iterator(); + while (it.hasNext()) { + OWLObjectPropertyExpression tmp = (OWLObjectPropertyExpression) it.next(); + // System.out.println(tmp); + // System.out.println(propertyname); + if (tmp.toString().equals(propertyname)) { + Object[] arr = ((Set<OWLIndividual>) m.get(tmp)).toArray(); + OWLIndividual[] o = new OWLIndividual[arr.length]; + for (int i = 0; i < o.length; i++) { + o[i] = (OWLIndividual) arr[i]; + } + + return o; + } + } + return null; + + } + + public static String getFirstValueForDataProperty(String propertyname, + Map<OWLDataPropertyExpression, Set<OWLConstant>> m) { + return getValuesForDataProperty(propertyname, m)[0]; + } + + public static String[] getValuesForDataProperty(String propertyname, + Map<OWLDataPropertyExpression, Set<OWLConstant>> m) { + Set<OWLDataPropertyExpression> s = m.keySet(); + + Iterator<OWLDataPropertyExpression> it = s.iterator(); + while (it.hasNext()) { + OWLDataPropertyExpression tmp = (OWLDataPropertyExpression) it.next(); + if (tmp.toString().equals(propertyname)) { + Object[] arr = ((Set<OWLConstant>) m.get(tmp)).toArray(); + String[] str = new String[arr.length]; + for (int i = 0; i < str.length; i++) { + str[i] = ((OWLConstant) arr[i]).getLiteral(); + } + return str; + } + } + return null; + + } + + public static SparqlEndpoint makeEndpoint(OWLIndividual sEndpoint, OWLOntology o) { + String host = getFirstValueForDataProperty("hasHost", sEndpoint.getDataPropertyValues(o)); + String port = getFirstValueForDataProperty("hasPort", sEndpoint.getDataPropertyValues(o)); + String hasAfterGET = getFirstValueForDataProperty("hasAfterGET", sEndpoint + .getDataPropertyValues(o)); + String hasQueryParameter = getFirstValueForDataProperty("hasQueryParameter", sEndpoint + .getDataPropertyValues(o)); + OWLIndividual[] para = getIndividualsForProperty("hasGETParameter", sEndpoint + .getObjectPropertyValues(o)); + // System.out.println("test"); + HashMap<String, String> parameters = new HashMap<String, String>(); + if (para == null) + return new SparqlEndpoint(host, port, hasAfterGET, hasQueryParameter, parameters); + for (OWLIndividual p : para) { + // System.out.println("test2"); + String a1 = getFirstValueForDataProperty("hasParameterName", p.getDataPropertyValues(o)); + String a2 = getFirstValueForDataProperty("hasParameterContent", p + .getDataPropertyValues(o)); + parameters.put(a1, a2); + } + // System.out.println("test2"); + // System.out.println(host+port+ hasAfterGET+ hasQueryParameter+ + // parameters); + return new SparqlEndpoint(host, port, hasAfterGET, hasQueryParameter, parameters); + + } + + public static SparqlQueryType makeSparqlQueryType(OWLIndividual typedQuery, OWLOntology o) { + String useLiterals = getFirstValueForDataProperty("usesLiterals", typedQuery + .getDataPropertyValues(o)); + String hasMode = getFirstValueForDataProperty("hasMode", typedQuery + .getDataPropertyValues(o)); + // String + // hasAfterGET=getValuesForDataProperty("hasAfterGET",sEndpoint.getDataPropertyValues(o)); + // String + // hasQueryParameter=getValuesForDataProperty("hasQueryParameter",sEndpoint.getDataPropertyValues(o)); + OWLIndividual[] objFilter = getIndividualsForProperty("hasObjectFilterSet", typedQuery + .getObjectPropertyValues(o)); + OWLIndividual[] predFilter = getIndividualsForProperty("hasPredicateFilterSet", typedQuery + .getObjectPropertyValues(o)); + + Set<String> objectFilter = new HashSet<String>(); + Set<String> predicateFilter = new HashSet<String>(); + + for (OWLIndividual of : objFilter) { + String[] tmp = getValuesForDataProperty("filtersURI", of.getDataPropertyValues(o)); + for (String s : tmp) { + objectFilter.add(s); + + } + } + + for (OWLIndividual pf : predFilter) { + String[] tmp = getValuesForDataProperty("filtersURI", pf.getDataPropertyValues(o)); + for (String s : tmp) { + predicateFilter.add(s); + + } + } + // System.out.println(predicateFilter); + // System.out.println(hasMode+objectFilter+predicateFilter+useLiterals); + return new SparqlQueryType(hasMode, objectFilter, predicateFilter, useLiterals); + + } + + public static Manipulator makeManipulator() { + return new Manipulator(); + } + + public Manipulator getManipulator() { + return this.Manipulator; + } + + public SparqlEndpoint getSparqlEndpoint() { + return SparqlEndpoint; + } + + public SparqlQueryType getSparqlQueryType() { + return SparqlQueryType; + } + +} Copied: trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java (from rev 294, trunk/src/dl-learner/org/dllearner/kb/extraction/ExtractionAlgorithm.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/ExtractionAlgorithm.java 2007-12-02 11:52:02 UTC (rev 296) @@ -0,0 +1,68 @@ +package org.dllearner.kb.sparql; + +import java.net.URI; +import java.util.Vector; + +public class ExtractionAlgorithm { + + private Configuration Configuration; + private Manipulator Manipulator; + private int recursiondepth = 2; + private boolean getAllBackground = true; + + public ExtractionAlgorithm(Configuration Configuration) { + this.Configuration = Configuration; + this.Manipulator = Configuration.getManipulator(); + + } + + public Node getFirstNode(URI u) { + return new InstanceNode(u); + } + + public Vector<Node> expandAll(URI[] u, TypedSparqlQuery tsp) { + Vector<Node> v = new Vector<Node>(); + for (URI one : u) { + v.add(expandNode(one, tsp)); + } + return v; + } + + public Node expandNode(URI u, TypedSparqlQuery tsp) { + Node n = getFirstNode(u); + Vector<Node> v = new Vector<Node>(); + v.add(n); + System.out.println("StartVector: " + v); + // n.expand(tsp, this.Manipulator); + // Vector<Node> second= + for (int x = 1; x <= this.recursiondepth; x++) { + + Vector<Node> tmp = new Vector<Node>(); + while (v.size() > 0) { + Node tmpNode = v.remove(0); + System.out.println("Expanding " + tmpNode); + Vector<Node> tmpVec = tmpNode.expand(tsp, this.Manipulator); + + tmp.addAll(tmpVec); + } + v = tmp; + System.out.println("Rec: " + x + " with " + v); + } + if (this.getAllBackground) { + Vector<Node> classes = new Vector<Node>(); + for (Node one : v) { + if (one.isClass()) { + classes.add(one); + } + } + while (classes.size() > 0) { + System.out.println(classes.size()); + classes.addAll(classes.remove(0).expand(tsp, this.Manipulator)); + } + + } + return n; + + } + +} Copied: trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java (from rev 294, trunk/src/dl-learner/org/dllearner/kb/extraction/datastructures/InstanceNode.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/InstanceNode.java 2007-12-02 11:52:02 UTC (rev 296) @@ -0,0 +1,78 @@ +package org.dllearner.kb.sparql; + +import java.net.URI; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Set; +import java.util.Vector; + +public class InstanceNode extends Node { + + Set<ClassNode> classes = new HashSet<ClassNode>(); + Set<Tupel> datatypes = new HashSet<Tupel>(); + Set<PropertyNode> properties = new HashSet<PropertyNode>(); + + public InstanceNode(URI u) { + super(u); + this.type = "instance"; + + } + + @Override + public Vector<Node> expand(TypedSparqlQuery tsq, Manipulator m) { + + Set<Tupel> s = tsq.query(this.URI); + // Manipulation + m.check(s, this); + Vector<Node> Nodes = new Vector<Node>(); + + Iterator<Tupel> it = s.iterator(); + while (it.hasNext()) { + Tupel t = (Tupel) it.next(); + + try { + if (t.a.equals(m.type)) { + ClassNode tmp = new ClassNode(new URI(t.b)); + classes.add(tmp); + Nodes.add(tmp); + } else { + InstanceNode tmp = new InstanceNode(new URI(t.b)); + properties.add(new PropertyNode(new URI(t.a), this, tmp)); + Nodes.add(tmp); + + } + } catch (Exception e) { + System.out.println("Problem with: " + t); + e.printStackTrace(); + } + + } + this.expanded = true; + return Nodes; + } + + @Override + public boolean isInstance() { + return true; + } + + @Override + public Set<String> toNTriple() { + Set<String> s = new HashSet<String>(); + s.add("<" + this.URI + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + + "http://www.w3.org/2002/07/owl#Thing" + ">."); + for (ClassNode one : classes) { + s.add("<" + this.URI + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + + one.getURI() + ">."); + s.addAll(one.toNTriple()); + } + for (PropertyNode one : properties) { + s.add("<" + this.URI + "><" + one.getURI() + "><" + one.getB().getURI() + ">."); + s.addAll(one.toNTriple()); + s.addAll(one.getB().toNTriple()); + } + + return s; + } + +} Copied: trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java (from rev 294, trunk/src/dl-learner/org/dllearner/kb/extraction/Manager.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java 2007-12-02 11:52:02 UTC (rev 296) @@ -0,0 +1,62 @@ +package org.dllearner.kb.sparql; + +import java.net.URI; +import java.util.HashSet; +import java.util.Set; + +public class Manager { + + private Configuration Configuration; + private TypedSparqlQuery TypedSparqlQuery; + private ExtractionAlgorithm ExtractionAlgorithm; + + public void usePredefinedConfiguration(URI uri) { + + this.Configuration = org.dllearner.kb.sparql.Configuration.getConfiguration(uri); + this.TypedSparqlQuery = new TypedSparqlQuery(Configuration); + this.ExtractionAlgorithm = new ExtractionAlgorithm(Configuration); + } + + public void useConfiguration(SparqlQueryType SparqlQueryType, SparqlEndpoint SparqlEndpoint) { + + this.Configuration = new Configuration(SparqlEndpoint, SparqlQueryType); + this.TypedSparqlQuery = new TypedSparqlQuery(Configuration); + this.ExtractionAlgorithm = new ExtractionAlgorithm(Configuration); + } + + public String extract(URI uri) { + // this.TypedSparqlQuery.query(uri); + // System.out.println(ExtractionAlgorithm.getFirstNode(uri)); + System.out.println("Start extracting"); + Node n = this.ExtractionAlgorithm.expandNode(uri, this.TypedSparqlQuery); + Set<String> s = n.toNTriple(); + String nt = ""; + for (String str : s) { + nt += str + "\n"; + } + return nt; + } + + public String extract(Set<String> instances) { + // this.TypedSparqlQuery.query(uri); + // System.out.println(ExtractionAlgorithm.getFirstNode(uri)); + System.out.println("Start extracting"); + Set<String> ret = new HashSet<String>(); + + for (String one : instances) { + try { + Node n = this.ExtractionAlgorithm.expandNode(new URI(one), this.TypedSparqlQuery); + ret.addAll(n.toNTriple()); + } catch (Exception e) { + e.printStackTrace(); + } + } + + String nt = ""; + for (String str : ret) { + nt += str + "\n"; + } + return nt; + } + +} \ No newline at end of file Copied: trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java (from rev 294, trunk/src/dl-learner/org/dllearner/kb/extraction/Manipulator.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Manipulator.java 2007-12-02 11:52:02 UTC (rev 296) @@ -0,0 +1,58 @@ +package org.dllearner.kb.sparql; + +import java.util.HashSet; +import java.util.Iterator; +import java.util.Set; + +public class Manipulator { + public String subclass = "http://www.w3.org/2000/01/rdf-schema#subClassOf"; + public String type = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type"; + + String objectProperty = "http://www.w3.org/2002/07/owl#ObjectProperty"; + String classns = "http://www.w3.org/2002/07/owl#Class"; + String thing = "http://www.w3.org/2002/07/owl#Thing"; + + Set<String> classproperties; + + String[] defaultClasses = { "http://dbpedia.org/class/yago", + "http://dbpedia.org/resource/Category:", "http://dbpedia.org/resource/Template:", + "http://www.w3.org/2004/02/skos/core", "http://dbpedia.org/class/" }; // TODO + // FEHLER + // hier + // fehlt + // yago + + public Manipulator() { + Set<String> classproperties = new HashSet<String>(); + classproperties.add(subclass); + + } + + public Set<Tupel> check(Set<Tupel> s, Node node) { + Set<Tupel> toRemove = new HashSet<Tupel>(); + Iterator<Tupel> it = s.iterator(); + while (it.hasNext()) { + Tupel t = (Tupel) it.next(); + // all classes with owl:type class + if (t.a.equals(this.type) && t.b.equals(this.classns) && node.isClass()) { + toRemove.add(t); + } + ; + // all with type class + if (t.b.equals(this.classns) && node.isClass()) { + toRemove.add(t); + } + ; + // all instances with owl:type thing + if (t.a.equals(this.type) && t.b.equals(this.thing) && node.isInstance()) { + toRemove.add(t); + } + ; + + } + s.removeAll(toRemove); + + return s; + } + +} Copied: trunk/src/dl-learner/org/dllearner/kb/sparql/Node.java (from rev 294, trunk/src/dl-learner/org/dllearner/kb/extraction/datastructures/Node.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Node.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Node.java 2007-12-02 11:52:02 UTC (rev 296) @@ -0,0 +1,54 @@ +package org.dllearner.kb.sparql; + +import java.net.URI; +import java.util.Set; +import java.util.Vector; + +public abstract class Node { + URI URI; + protected String type; + protected boolean expanded = false; + + // Hashtable<String,Node> classes=new Hashtable<String,Node>(); + // Hashtable<String,Node> instances=new Hashtable<String,Node>();; + // Hashtable<String,Node> datatype=new Hashtable<String,Node>();; + + public Node(URI u) { + this.URI = u; + + } + + /* + * public void checkConsistency(){ if (type.equals("class") && ( + * instances.size()>0 || datatype.size()>0)){ System.out.println("Warning, + * inconsistent:"+this.toString()); } + * } + */ + + public abstract Vector<Node> expand(TypedSparqlQuery tsq, Manipulator m); + + public abstract Set<String> toNTriple(); + + @Override + public String toString() { + return "Node: " + URI + ":" + type; + + } + + public boolean isClass() { + return false; + } + + public boolean isInstance() { + return false; + } + + public boolean isProperty() { + return false; + } + + public URI getURI() { + return URI; + } + +} Copied: trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java (from rev 294, trunk/src/dl-learner/org/dllearner/kb/extraction/datastructures/PropertyNode.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/PropertyNode.java 2007-12-02 11:52:02 UTC (rev 296) @@ -0,0 +1,74 @@ +package org.dllearner.kb.sparql; + +import java.net.URI; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Set; +import java.util.Vector; + +public class PropertyNode extends Node { + + private Node a; + private Node b; + private Set<String> SpecialTypes; + + public PropertyNode(URI u) { + super(u); + this.type = "property"; + + } + + public PropertyNode(URI u, Node a, Node b) { + super(u); + this.type = "property"; + this.a = a; + this.b = b; + this.SpecialTypes = new HashSet<String>(); + } + + @Override + public Vector<Node> expand(TypedSparqlQuery tsq, Manipulator m) { + Set<Tupel> s = tsq.query(this.URI); + Vector<Node> Nodes = new Vector<Node>(); + // Manipulation + + Iterator<Tupel> it = s.iterator(); + while (it.hasNext()) { + Tupel t = (Tupel) it.next(); + try { + if (t.a.equals(m.type)) { + SpecialTypes.add(t.b); + } + } catch (Exception e) { + System.out.println(t); + e.printStackTrace(); + } + + } + return Nodes; + } + + @Override + public boolean isProperty() { + return true; + } + + public Node getB() { + return this.b; + } + + @Override + public Set<String> toNTriple() { + Set<String> s = new HashSet<String>(); + s.add("<" + this.URI + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + + "http://www.w3.org/2002/07/owl#ObjectProperty" + ">."); + for (String one : SpecialTypes) { + s.add("<" + this.URI + "><" + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + "><" + + one + ">."); + + } + + return s; + } + +} Copied: trunk/src/dl-learner/org/dllearner/kb/sparql/SimpleHTTPRequest.java (from rev 294, trunk/src/dl-learner/org/dllearner/kb/extraction/sparql/SimpleHTTPRequest.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SimpleHTTPRequest.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SimpleHTTPRequest.java 2007-12-02 11:52:02 UTC (rev 296) @@ -0,0 +1,128 @@ +package org.dllearner.kb.sparql; + +import java.io.BufferedInputStream; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.net.HttpURLConnection; +import java.net.InetAddress; +import java.net.Socket; +import java.net.URL; +import java.net.URLEncoder; + +public class SimpleHTTPRequest { + static final char value[] = { 13, 10 }; + static final String cut = new String(value); + private InetAddress ia; + private int port; + + public SimpleHTTPRequest(InetAddress ia, int port) { + super(); + this.ia = ia; + this.port = port; + + } + + public String sendAndReceive(String content) { + String retval = ""; + // + + byte resp[] = null; + + try { + Socket SparqlServer = new Socket(this.ia, this.port); + // String request=makeHeader(content); + // send request + (SparqlServer.getOutputStream()).write(content.getBytes()); + + // get Response + resp = readBuffer(new BufferedInputStream(SparqlServer.getInputStream())); + retval = new String(resp); + retval = subtractResponseHeader(retval); + // retval="||"+retval; + + SparqlServer.close(); + + } catch (Exception e) { + e.printStackTrace(); + } + // System.out.println("got it"); + return retval; + + }// down + + public static byte[] readBuffer(InputStream IS) throws IOException { + byte buffer[] = new byte[0xffff]; + int nbytes = 0; + byte resp[] = new byte[0]; + while ((nbytes = IS.read(buffer)) != -1) { + byte tmp[] = new byte[resp.length + nbytes]; + int i = 0; + for (; i < resp.length; i++) { + tmp[i] = resp[i]; + } + for (int a = 0; a < nbytes; a++, i++) { + tmp[i] = buffer[a]; + } + resp = tmp; + } + return resp; + } + + public String subtractResponseHeader(String in) { + // System.out.println(in.indexOf(cut+""+cut)); + return in.substring(in.indexOf(cut + "" + cut) + 4); + + } + + private String sendAndReceive2(String sparql, URL url) throws IOException { + StringBuilder answer = new StringBuilder(); + + // String an Sparql-Endpoint schicken + HttpURLConnection connection; + + connection = (HttpURLConnection) url.openConnection(); + connection.setDoOutput(true); + + connection.addRequestProperty("Host", "dbpedia.openlinksw.com"); + connection.addRequestProperty("Connection", "close"); + connection + .addRequestProperty( + "Accept", + "text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"); + connection.addRequestProperty("Accept-Language", "de-de,de;q=0.8,en-us;q=0.5,en;q=0.3"); + connection.addRequestProperty("Accept-Charset", "utf-8;q=1.0"); + connection + .addRequestProperty( + "User-Agent", + "Mozilla/5.0 (Windows; U; Windows NT 5.1; de; rv:1.8.1.4) Gecko/20070515 Firefox/2.0.0.4 Web-Sniffer/1.0.24"); + + OutputStream os = connection.getOutputStream(); + OutputStreamWriter osw = new OutputStreamWriter(os); + osw + .write("default-graph-uri=http%3A%2F%2Fdbpedia.org&query=" + + URLEncoder.encode(sparql, "UTF-8") + + "&format=application%2Fsparql-results%2Bxml"); + osw.close(); + + // receive answer + InputStream is = connection.getInputStream(); + InputStreamReader isr = new InputStreamReader(is, "UTF-8"); + BufferedReader br = new BufferedReader(isr); + + String line; + do { + line = br.readLine(); + if (line != null) + answer.append(line); + } while (line != null); + + br.close(); + + return answer.toString(); + } + +} Copied: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlEndpoint.java (from rev 294, trunk/src/dl-learner/org/dllearner/kb/extraction/SparqlEndpoint.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlEndpoint.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlEndpoint.java 2007-12-02 11:52:02 UTC (rev 296) @@ -0,0 +1,79 @@ +package org.dllearner.kb.sparql; + +import java.util.HashMap; + +public class SparqlEndpoint { + + String host; + int port; + String hasAfterGET; + String hasQueryParameter; + String hasURL; + public HashMap<String, String> parameters = new HashMap<String, String>(); + + public SparqlEndpoint(String host, String port, String hasAfterGET, String hasQueryParameter, + HashMap<String, String> parameters) { + super(); + this.host = host; + this.port = Integer.parseInt(port); + this.hasAfterGET = hasAfterGET; + this.hasQueryParameter = hasQueryParameter; + this.parameters = parameters; + } + + public SparqlEndpoint(String host, int port, String hasURL, HashMap<String, String> parameters) { + super(); + this.port = port; + this.host = host; + this.hasURL = hasURL; + this.hasQueryParameter = "query"; + this.parameters = parameters; + } + + public String getHasAfterGET() { + return hasAfterGET; + } + + public void setHasAfterGET(String hasAfterGET) { + this.hasAfterGET = hasAfterGET; + } + + public String getHasQueryParameter() { + return hasQueryParameter; + } + + public void setHasQueryParameter(String hasQueryParameter) { + this.hasQueryParameter = hasQueryParameter; + } + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + public HashMap<String, String> getParameters() { + return parameters; + } + + public void setParameters(HashMap<String, String> parameters) { + this.parameters = parameters; + } + + public int getPort() { + return port; + } + + public void setPort(int port) { + this.port = port; + } + + /* + * sparql?default-graph-uri=http%3A%2F%2Fdbpedia.org&query=" + + * //"SELECT%20%2A%20WHERE%20%7B%20%3Chttp%3A%2F%2Fdbpedia.org%2Fresource%2FAristotle%3E%20%3Fa%20%3Fb%20%7D%20" + + * URLEncoder.encode(query, "UTF-8")+ //query+// URLencode + * "&format=application%2Fsparql-results%2Bxml + */ +} Copied: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlHTTPRequest.java (from rev 294, trunk/src/dl-learner/org/dllearner/kb/extraction/sparql/SparqlHTTPRequest.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlHTTPRequest.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlHTTPRequest.java 2007-12-02 11:52:02 UTC (rev 296) @@ -0,0 +1,81 @@ +package org.dllearner.kb.sparql; + +import java.net.InetAddress; +import java.net.URLEncoder; +import java.util.Iterator; +import java.util.Set; + +public class SparqlHTTPRequest { + static final char value[] = { 13, 10 }; + static final String cut = new String(value); + + private SparqlEndpoint SparqlEndpoint; + private SimpleHTTPRequest SimpleHTTPRequest; + + public SparqlHTTPRequest(SparqlEndpoint SparqlEndpoint) { + this.SparqlEndpoint = SparqlEndpoint; + InetAddress ia = null; + try { + ia = InetAddress.getByName(SparqlEndpoint.getHost()); + } catch (Exception e) { + e.printStackTrace(); + } + this.SimpleHTTPRequest = new SimpleHTTPRequest(ia, SparqlEndpoint.getPort()); + + } + + public String sendAndReceiveSPARQL(String sparql) { + + // System.out.println(sparql); + String content = makeContent(sparql); + // System.out.println(content); + String ret = this.SimpleHTTPRequest.sendAndReceive(content); + // System.out.println(ret); + + // this.sendAndReceiveSPARQL("SELECT * WHERE {?a ?b ?c} LIMIT 10"); + + return ret; + + }// down + + public String makeContent(String query) { + + String RequestHeader = ""; + try { + + RequestHeader = "GET "; + RequestHeader += SparqlEndpoint.getHasAfterGET() + "?"; + // parameters + Set<String> s = SparqlEndpoint.getParameters().keySet(); + Iterator<String> it = s.iterator(); + while (it.hasNext()) { + String element = (String) it.next(); + RequestHeader += "" + URLEncoder.encode(element, "UTF-8") + "=" + + URLEncoder.encode(SparqlEndpoint.getParameters().get(element), "UTF-8") + + "&"; + } + RequestHeader += "" + SparqlEndpoint.getHasQueryParameter() + "=" + + URLEncoder.encode(query, "UTF-8"); + RequestHeader += " HTTP/1.1" + cut; + RequestHeader += "Host: " + SparqlEndpoint.getHost() + cut; + + RequestHeader += "Connection: close" + + cut + + + // "Accept-Encoding: gzip"+cut+ + "Accept: text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5" + + cut + + "Accept-Language: de-de,de;q=0.8,en-us;q=0.5,en;q=0.3" + + cut + + "Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7" + + cut + + "User-Agent: Mozilla/5.0 (Windows; U; Windows NT 5.1; de; rv:1.8.1.4) Gecko/20070515 Firefox/2.0.0.4 Web-Sniffer/1.0.24" + + cut + cut; + } catch (Exception e) { + e.printStackTrace(); + } + return RequestHeader; + + } + +} Copied: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java (from rev 294, trunk/src/dl-learner/org/dllearner/kb/extraction/sparql/SparqlQueryMaker.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java 2007-12-02 11:52:02 UTC (rev 296) @@ -0,0 +1,37 @@ +package org.dllearner.kb.sparql; + +public class SparqlQueryMaker { + + private SparqlQueryType SparqlQueryType; + + public SparqlQueryMaker(SparqlQueryType SparqlQueryType) { + this.SparqlQueryType = SparqlQu... [truncated message content] |