[graphl-cvs] graphl/src/org/mediavirus/graphl/graph/rdf RDFGraphReader.java RDFGraph.java RDFNode.j
Status: Pre-Alpha
Brought to you by:
flo1
From: Flo L. <fl...@us...> - 2006-06-08 13:15:40
|
Update of /cvsroot/graphl/graphl/src/org/mediavirus/graphl/graph/rdf In directory sc8-pr-cvs2.sourceforge.net:/tmp/cvs-serv17343/src/org/mediavirus/graphl/graph/rdf Modified Files: RDFGraph.java RDFNode.java Added Files: RDFGraphReader.java Log Message: a lot of changes done a long time ago... changelog has to be updated another time :( --- NEW FILE: RDFGraphReader.java --- /* * Created on 20.12.2005 */ package org.mediavirus.graphl.graph.rdf; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.mediavirus.graphl.GraphlRegistry; import org.mediavirus.graphl.graph.Edge; import org.mediavirus.graphl.graph.Node; import org.mediavirus.graphl.vocabulary.NS; import org.mediavirus.util.ParseUtils; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import edu.unika.aifb.rdf.api.syntax.RDFConsumer; import edu.unika.aifb.rdf.api.syntax.RDFParser; public class RDFGraphReader implements RDFConsumer{ RDFGraph graph; URL url; Node sourceNode; boolean reload = false; boolean loading = false; List<Node> nodesToRemove; List<Edge> edgesToRemove; List<URL> loadedURLs; int loadCount; float loadAlpha; public RDFGraphReader(RDFGraph graph, URL url) { this.graph = graph; this.url = url; } public List<URL> read() throws IOException{ try { loadCount = 0; loadAlpha = 0; loadedURLs = new ArrayList<URL>(); loadedURLs.add(url); sourceNode = graph.getNodeById(url.toString()); if (sourceNode == null) { sourceNode = graph.getNodeOrAdd(url.toString()); reload = false; } else { // we are reloading a file // store old nodes+edges for checking deletions // while loading, nodes and edges are removed from these lists nodesToRemove = sourceNode.getNeighbours(NS.graphl + "definedIn", Node.REVERSE); nodesToRemove.remove(sourceNode); edgesToRemove = graph.getEdgesWithPropertyValue(NS.graphl + "definedIn",url.toString()); reload = true; } if (sourceNode.getNeighbours(NS.graphl + "definedIn", Node.FORWARD).size() == 0) { Edge edge = graph.createEdge(sourceNode, sourceNode); edge.setSource(NS.graphl + "SYSTEM"); edge.setType(NS.graphl + "definedIn"); graph.addElements(null, Collections.singleton(edge)); } InputSource input; input = new InputSource(url.openConnection().getInputStream()); input.setSystemId(url.toString()); RDFParser parser = new RDFParser(); loading = true; try { parser.parse(input, this); } catch (Exception e) { e.printStackTrace(); } loading = false; if (reload) { for (Iterator<Node> nodesI = nodesToRemove.iterator(); nodesI.hasNext();) { Node node = nodesI.next(); List<Node> nodeSources = node.getNeighbours(NS.graphl + "definedIn", Node.FORWARD); if (nodeSources.size() > 1) { // keep nodes with multiple sources nodesToRemove.remove(node); } } graph.deleteElements(nodesToRemove, edgesToRemove); } return loadedURLs; } finally { loading = false; reload = false; } } // from getNodeOrNew() // from createEdge() // if (loadingURL != null) { // edge.setSource(loadingURL.toString()); //} //else { // edge.setSource(NS.graphl + "USER"); //} /** * @see edu.unika.aifb.rdf.api.syntax.RDFConsumer#startModel(java.lang.String) */ public void startModel(String physicalURI) throws SAXException { if (GraphlRegistry.DEBUG) System.out.println("RDF: startModel"); } /** * @see edu.unika.aifb.rdf.api.syntax.RDFConsumer#endModel() */ public void endModel() throws SAXException { if (GraphlRegistry.DEBUG) System.out.println("RDF: endModel"); } /** * @see edu.unika.aifb.rdf.api.syntax.RDFConsumer#statementWithResourceValue(java.lang.String, java.lang.String, java.lang.String) */ public void statementWithResourceValue(String subject, String predicate, String object) throws SAXException { try { subject = new URL(url, subject).toString(); } catch (MalformedURLException muex) { // do nothing } try { object = new URL(url, object).toString(); } catch (MalformedURLException muex) { // do nothing } Node snode; Node onode; try { snode = getNodeOrAdd(subject); onode = getNodeOrAdd(object); // get / create connection List edges = snode.getEdgesFrom(); RDFEdge edge = null; boolean exists = false; for (Iterator iter = edges.iterator(); iter.hasNext();) { edge = (RDFEdge) iter.next(); if ((edge.getTo() == onode) && (edge.getType().equals(predicate))) { exists = true; if (reload) edgesToRemove.remove(edge); break; } } if (!exists) { edge = new RDFEdge(snode, onode); edge.setType(predicate); edge.setSource(url.toString()); graph.addElements(null, Collections.singleton((Edge)edge)); if (GraphlRegistry.DEBUG) System.out.println("created edge " + subject + ", " + predicate + ", " + object); if (predicate.equals("http://www.w3.org/2002/07/owl#imports") || predicate.equals(NS.rdfs + "seeAlso")) { //if (predicate.equals("http://www.w3.org/2002/07/owl#imports")) { try { URL importURL = new URL(url, object); System.out.println("importing " + importURL.toString() + " ... "); RDFGraphReader importReader = new RDFGraphReader(graph, importURL); loadedURLs.addAll(importReader.read()); } catch (Exception ex) { System.out.println("Error importing " + object + " : " + ex.toString()); } } } } catch (Exception ex) { if (GraphlRegistry.DEBUG) System.out.println("Error while reading triple: " + subject + ", " + predicate + ", " + object); //ex.printStackTrace(); } } /** * @see edu.unika.aifb.rdf.api.syntax.RDFConsumer#statementWithLiteralValue(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String) */ public void statementWithLiteralValue(String subject, String predicate, String object, String language, String datatype) throws SAXException { // get absolute URL for subject try { subject = new URL(url, subject).toString(); } catch (MalformedURLException muex) { // do nothing } Node snode; try { snode = getNodeOrAdd(subject); snode.setProperty(predicate, object); if (GraphlRegistry.DEBUG) System.out.println("created property " + subject + ", " + predicate + ", " + object); } catch (Exception ex) { if (GraphlRegistry.DEBUG) System.out.println("Error while reading triple: " + subject + ", " + predicate + ", " + object); } } protected Node getNodeOrAdd(String uri) { Node node = graph.getNodeById(uri); if (node == null) { node = graph.getNodeOrAdd(uri); // the node is new, so assign default coordinates float r = 20 + loadCount; loadAlpha += 30 / r; node.setCenter(r * Math.sin(loadAlpha), r * Math.cos(loadAlpha)); loadCount++; } else if (reload) { if (!ParseUtils.guessName(uri).startsWith("genid")) { // if the node has a proper id and it's already in the graph, it has existed before nodesToRemove.remove(node); } else { // use heuristics to select similar node? } } boolean found = false; for (Iterator i = node.getNeighbours(NS.graphl + "definedIn", Node.FORWARD).iterator(); i.hasNext();) { Node source = (Node) i.next(); if (source.equals(sourceNode)) { found = true; break; } } if (!found) { Edge edge = graph.createEdge(node, sourceNode); edge.setSource(NS.graphl + "SYSTEM"); edge.setType(NS.graphl + "definedIn"); graph.addElements(null, Collections.singleton(edge)); } return node; } /** * @see edu.unika.aifb.rdf.api.syntax.RDFConsumer#logicalURI(java.lang.String) */ public void logicalURI(String logicalURI) throws SAXException { if (GraphlRegistry.DEBUG) System.out.println("RDF: logicalURI: " + logicalURI); } /** * @see edu.unika.aifb.rdf.api.syntax.RDFConsumer#includeModel(java.lang.String, java.lang.String) */ public void includeModel(String logicalURI, String physicalURI) throws SAXException { if (GraphlRegistry.DEBUG) System.out.println("RDF: includeModel: " + logicalURI + ", " + physicalURI); } /** * @see edu.unika.aifb.rdf.api.syntax.RDFConsumer#addModelAttribte(java.lang.String, java.lang.String) */ public void addModelAttribte(String key, String value) throws SAXException { if (GraphlRegistry.DEBUG) System.out.println("RDF: addModelAttribte" + key + ", " + value); } } Index: RDFNode.java =================================================================== RCS file: /cvsroot/graphl/graphl/src/org/mediavirus/graphl/graph/rdf/RDFNode.java,v retrieving revision 1.12 retrieving revision 1.13 diff -C2 -d -r1.12 -r1.13 *** RDFNode.java 18 Dec 2005 11:11:41 -0000 1.12 --- RDFNode.java 8 Jun 2006 13:14:59 -0000 1.13 *************** *** 85,89 **** Node typeNode; try { ! typeNode = graph.getNodeOrNew(type); RDFEdge typeEdge = new RDFEdge(this, typeNode); typeEdge.setType("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"); --- 85,89 ---- Node typeNode; try { ! typeNode = graph.getNodeOrAdd(type); RDFEdge typeEdge = new RDFEdge(this, typeNode); typeEdge.setType("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"); Index: RDFGraph.java =================================================================== RCS file: /cvsroot/graphl/graphl/src/org/mediavirus/graphl/graph/rdf/RDFGraph.java,v retrieving revision 1.14 retrieving revision 1.15 diff -C2 -d -r1.14 -r1.15 *** RDFGraph.java 18 Dec 2005 11:11:41 -0000 1.14 --- RDFGraph.java 8 Jun 2006 13:14:59 -0000 1.15 *************** *** 5,10 **** package org.mediavirus.graphl.graph.rdf; - import java.util.Iterator; - import java.io.File; import java.io.IOException; --- 5,8 ---- *************** *** 13,20 **** import java.net.MalformedURLException; import java.net.URL; - import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Hashtable; import java.util.List; import java.util.Map; --- 11,18 ---- import java.net.MalformedURLException; import java.net.URL; import java.util.Collection; import java.util.Collections; import java.util.Hashtable; + import java.util.Iterator; import java.util.List; import java.util.Map; *************** *** 25,34 **** import org.mediavirus.graphl.graph.Graph; import org.mediavirus.graphl.graph.Node; - import org.mediavirus.graphl.vocabulary.NS; - import org.xml.sax.InputSource; - import org.xml.sax.SAXException; - import edu.unika.aifb.rdf.api.syntax.RDFConsumer; - import edu.unika.aifb.rdf.api.syntax.RDFParser; import edu.unika.aifb.rdf.api.syntax.RDFWriter; --- 23,27 ---- *************** *** 37,43 **** * @author Flo Ledermann <led...@im...> */ ! public class RDFGraph extends DefaultGraph implements RDFConsumer { ! ! public static boolean DEBUG = false; boolean dirty = false; --- 30,34 ---- * @author Flo Ledermann <led...@im...> */ ! public class RDFGraph extends DefaultGraph { boolean dirty = false; *************** *** 45,149 **** Hashtable<String, Node> allNodes = new Hashtable<String, Node>(); ! URL loadingURL = null; ! ! List<URL> loadingURLs = null; ! ! Node sourceNode = null; ! ! private int loadCount = 0; ! ! private float loadAlpha = 0.1f; ! ! public RDFNode getNodeById(String id) { ! ! return (RDFNode) allNodes.get(id); ! } ! ! public static void writeToRDF(Graph graph, OutputStream out, String baseURL) throws IOException { ! ! RDFWriter writer = new RDFWriter(); ! ! Map<String, String> namespaceMap = GraphlRegistry.instance().getVocabularyRegistry().getNamespaces(); ! for (Iterator prefixes = namespaceMap.keySet().iterator(); prefixes.hasNext();) { ! String prefix = (String)prefixes.next(); ! writer.addNamespacePrefix(prefix, namespaceMap.get(prefix)); ! } ! // TODO (2) use vocabularies from config for prefix resolution ! // writer.addNamespacePrefix("graphl", NS.graphl); ! // writer.addNamespacePrefix("foaf", "http://xmlns.com/foaf/0.1/"); ! // writer.addNamespacePrefix("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"); ! // writer.addNamespacePrefix("rdfs", "http://www.w3.org/2000/01/rdf-schema#"); ! // writer.addNamespacePrefix("owl", "http://www.w3.org/2002/07/owl#"); ! // writer.addNamespacePrefix("map", "http://fabl.net/vocabularies/geography/map/1.1/"); ! // writer.addNamespacePrefix("geo", "http://www.w3.org/2003/01/geo/wgs84_pos#"); ! // writer.addNamespacePrefix("dc", "http://purl.org/dc/elements/1.1/"); ! // writer.addNamespacePrefix("foo", "http://www.mediavirus.org/foo#"); ! // writer.addNamespacePrefix("vs", "http://www.w3.org/2003/06/sw-vocab-status/ns#"); ! // writer.addNamespacePrefix("wot", "http://xmlns.com/wot/0.1/"); ! ! writer.prepareNamespaceCollection(); ! ! for (Iterator namespaces = namespaceMap.values().iterator(); namespaces.hasNext();) { ! String ns = (String)namespaces.next(); ! writer.collectNamespace(ns); ! } ! ! // writer.collectNamespace(NS.graphl); ! // writer.collectNamespace("http://xmlns.com/foaf/0.1/"); ! // writer.collectNamespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#"); ! // writer.collectNamespace("http://www.w3.org/2000/01/rdf-schema#"); ! // writer.collectNamespace("http://www.w3.org/2002/07/owl#"); ! // writer.collectNamespace("http://fabl.net/vocabularies/geography/map/1.1/"); ! // writer.collectNamespace("http://www.w3.org/2003/01/geo/wgs84_pos#"); ! // writer.collectNamespace("http://purl.org/dc/elements/1.1/"); ! // writer.collectNamespace("http://www.mediavirus.org/foo#"); ! // writer.collectNamespace("http://www.w3.org/2003/06/sw-vocab-status/ns#"); ! // writer.collectNamespace("http://xmlns.com/wot/0.1/"); ! ! OutputStreamWriter owriter = new OutputStreamWriter(out); ! writer.startSerialization(owriter, "", "", "UTF-8"); ! writer.startRDFContents(); ! ! List allNodes = graph.getNodes(); ! List allEdges = graph.getEdges(); ! ! for (Iterator iter = allNodes.iterator(); iter.hasNext();) { ! Node node = (Node) iter.next(); ! ! String id = node.getId(); ! if ((baseURL != null) && (id.startsWith(baseURL)) && (id.lastIndexOf('#') > -1)) { ! id = id.substring(id.lastIndexOf('#')); ! } ! ! for (Iterator attrs = node.getProperties().keySet().iterator(); attrs.hasNext();) { ! String attr = (String) attrs.next(); ! writer.writeStatement(id, attr, node.getProperty(attr), null, null, true); ! } ! List nodeEdges = node.getEdgesFrom(); ! for (Iterator edgeIter = nodeEdges.iterator(); edgeIter.hasNext();) { ! Edge edge = (Edge) edgeIter.next(); ! // TODO (3) this would be a reason to have a FilteredNode class that returns only filtered edges... ! if (allEdges.contains(edge)) { ! String toId = edge.getTo().getId(); ! if ((baseURL != null) && (toId.startsWith(baseURL)) && (toId.lastIndexOf('#') > -1)) { ! toId = toId.substring(toId.lastIndexOf('#')); ! } ! writer.writeStatement(id, edge.getType(), toId, null, null, false); ! } ! } ! } ! ! writer.finishRDFContents(); ! writer.cleanUp(); ! ! } ! ! public void writeToRDF(OutputStream out, String baseURL) throws IOException { ! ! writeToRDF(this, out, baseURL); ! } ! ! public synchronized Iterator<URL> readFromFile(File file) throws IOException { ! try { URL url = new URL("file:///" + file.getAbsolutePath()); --- 36,40 ---- Hashtable<String, Node> allNodes = new Hashtable<String, Node>(); ! public synchronized List<URL> readFromFile(File file) throws IOException { try { URL url = new URL("file:///" + file.getAbsolutePath()); *************** *** 156,173 **** } ! public synchronized Iterator<URL> readFromURL(URL url) throws IOException{ ! ! try { ! loadingURLs = new ArrayList<URL>(); ! importFromURL(url); ! ! Iterator<URL> retVal = loadingURLs.iterator(); ! ! return retVal; ! } ! finally { ! loadingURLs = null; ! } ! } --- 47,52 ---- } ! public synchronized List<URL> readFromURL(URL url) throws IOException { ! return importFromURL(url); } *************** *** 179,342 **** * @param url The URL to load. */ ! protected void importFromURL(URL url) throws IOException{ try { ! loadingURL = url; ! sourceNode = getNodeOrNew(loadingURL.toString()); ! ! if (sourceNode.getNeighbours(NS.graphl + "definedIn", true).size() == 0) { ! Edge edge = createEdge(sourceNode, sourceNode); ! edge.setSource(NS.graphl + "SYSTEM"); ! edge.setType(NS.graphl + "definedIn"); ! addElements(null, Collections.singleton(edge)); ! } ! ! InputSource input; ! ! input = new InputSource(url.openConnection().getInputStream()); ! input.setSystemId(url.toString()); ! readGraph(input); ! loadingURLs.add(url); } finally { ! loadingURL = null; ! sourceNode = null; } - } - - public synchronized void readGraph(InputSource input) { - RDFParser parser = new RDFParser(); - loading = true; - try { - parser.parse(input, this); - } - catch (Exception e) { - e.printStackTrace(); - } - loading = false; - resetDirty(); - fireGraphContentsChanged(); } ! /** ! * @see edu.unika.aifb.rdf.api.syntax.RDFConsumer#startModel(java.lang.String) ! */ ! public void startModel(String physicalURI) throws SAXException { ! ! if (DEBUG) System.out.println("RDF: startModel"); ! } ! ! /** ! * @see edu.unika.aifb.rdf.api.syntax.RDFConsumer#endModel() ! */ ! public void endModel() throws SAXException { ! ! if (DEBUG) System.out.println("RDF: endModel"); ! } ! ! /** ! * @see edu.unika.aifb.rdf.api.syntax.RDFConsumer#statementWithResourceValue(java.lang.String, java.lang.String, java.lang.String) ! */ ! public void statementWithResourceValue(String subject, String predicate, String object) throws SAXException { ! ! //if (predicate.equals(NS.graphl + "connectedTo")) { ! // get / create node with label subject ! ! if (loadingURL != null) { ! try { ! subject = new URL(loadingURL, subject).toString(); ! } ! catch (MalformedURLException muex) { ! // do nothing ! } ! try { ! object = new URL(loadingURL, object).toString(); ! } ! catch (MalformedURLException muex) { ! // do nothing ! } ! } ! ! Node snode; ! Node onode; ! try { ! snode = getNodeOrNew(subject); ! onode = getNodeOrNew(object); ! ! // get / create connection ! List edges = snode.getEdgesFrom(); ! RDFEdge edge = null; ! boolean exists = false; ! ! for (Iterator iter = edges.iterator(); iter.hasNext();) { ! edge = (RDFEdge) iter.next(); ! if ((edge.getTo() == onode) && (edge.getType().equals(predicate))) { ! exists = true; ! break; ! } ! } ! ! if (!exists) { ! edge = new RDFEdge(snode, onode); ! edge.setType(predicate); ! edge.setSource(loadingURL.toString()); ! addElements(null, Collections.singleton((Edge)edge)); ! if (DEBUG) System.out.println("created edge " + subject + ", " + predicate + ", " + object); ! ! if (predicate.equals("http://www.w3.org/2002/07/owl#imports") || predicate.equals(NS.rdfs + "seeAlso")) { ! //if (predicate.equals("http://www.w3.org/2002/07/owl#imports")) { ! URL oldBase = loadingURL; ! Node oldSource = sourceNode; ! try { ! URL importURL = new URL(loadingURL, object); ! System.out.println("importing " + importURL.toString() + " ... "); ! importFromURL(importURL); ! // restore original base url ! } ! catch (Exception ex) { ! System.out.println("Error importing " + object); ! } ! finally { ! loadingURL = oldBase; ! sourceNode = oldSource; ! } ! } ! } ! } ! catch (Exception ex) { ! if (DEBUG) System.out.println("Error while reading triple: " + subject + ", " + predicate + ", " + object); ! //ex.printStackTrace(); ! } ! } ! public Node getNodeOrNew(String uri) { ! RDFNode node = getNodeById(uri); if (node == null) { node = new RDFNode(this, uri); ! float r = 20 + loadCount; ! loadAlpha += 30 / r; ! node.setCenter(r * Math.sin(loadAlpha), r * Math.cos(loadAlpha)); ! loadCount++; ! addElements(Collections.singleton((Node)node), null); ! if (DEBUG) System.out.println("created node " + uri); ! } ! if (sourceNode != null) { ! boolean found = false; ! for (Iterator i = node.getNeighbours(NS.graphl + "definedIn", true).iterator(); i.hasNext();) { ! Node source = (Node) i.next(); ! if (source.equals(sourceNode)) { ! found = true; ! break; ! } ! } ! if (!found) { ! Edge edge = createEdge(node, sourceNode); ! edge.setSource(NS.graphl + "SYSTEM"); ! edge.setType(NS.graphl + "definedIn"); ! addElements(null, Collections.singleton(edge)); ! } } return node; --- 58,89 ---- * @param url The URL to load. */ ! protected synchronized List<URL> importFromURL(URL url) throws IOException { + // we have to set the loading flag to prevent elementsAdded notifications during loading + loading = true; + boolean oldDirty = isDirty(); try { ! RDFGraphReader reader = new RDFGraphReader(this, url); ! List<URL> retVal = reader.read(); ! fireGraphContentsChanged(); ! return retVal; } finally { ! loading = false; ! setDirty(oldDirty); } } ! public RDFNode getNodeById(String id) { ! return (RDFNode) allNodes.get(id); } ! public Node getNodeOrAdd(String uri) { RDFNode node = getNodeById(uri); if (node == null) { node = new RDFNode(this, uri); ! addElements(Collections.singleton((Node) node), null); ! if (GraphlRegistry.DEBUG) System.out.println("created node " + uri); } return node; *************** *** 353,362 **** RDFEdge edge = new RDFEdge(from, to); - if (loadingURL != null) { - edge.setSource(loadingURL.toString()); - } - else { - edge.setSource(NS.graphl + "USER"); - } //addElements(null, Collections.singleton(edge)); return edge; --- 100,103 ---- *************** *** 364,424 **** /** - * @see edu.unika.aifb.rdf.api.syntax.RDFConsumer#statementWithLiteralValue(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String) - */ - public void statementWithLiteralValue(String subject, String predicate, String object, String language, String datatype) throws SAXException { - - // get absolute URL for subject - if (loadingURL != null) { - try { - subject = new URL(loadingURL, subject).toString(); - } - catch (MalformedURLException muex) { - // do nothing - } - } - - Node snode; - try { - snode = getNodeOrNew(subject); - snode.setProperty(predicate, object); - if (DEBUG) System.out.println("created property " + subject + ", " + predicate + ", " + object); - } - catch (Exception ex) { - if (DEBUG) System.out.println("Error while reading triple: " + subject + ", " + predicate + ", " + object); - } - } - - /** - * @see edu.unika.aifb.rdf.api.syntax.RDFConsumer#logicalURI(java.lang.String) - */ - public void logicalURI(String logicalURI) throws SAXException { - - if (DEBUG) - System.out.println("RDF: logicalURI: " + logicalURI); - } - - /** - * @see edu.unika.aifb.rdf.api.syntax.RDFConsumer#includeModel(java.lang.String, java.lang.String) - */ - public void includeModel(String logicalURI, String physicalURI) throws SAXException { - - if (DEBUG) - System.out.println("RDF: includeModel: " + logicalURI + ", " + physicalURI); - } - - /** - * @see edu.unika.aifb.rdf.api.syntax.RDFConsumer#addModelAttribte(java.lang.String, java.lang.String) - */ - public void addModelAttribte(String key, String value) throws SAXException { - - if (DEBUG) - System.out.println("RDF: addModelAttribte" + key + ", " + value); - } - - /** * @return Returns the dirty flag. */ public boolean isDirty() { - return dirty; } --- 105,111 ---- *************** *** 428,445 **** */ public void setDirty(boolean dirty) { - this.dirty = dirty; } - public void setDirty() { - - setDirty(true); - } - - public void resetDirty() { - - setDirty(false); - } - /* * Overrides @see de.fzi.wim.guibase.graphview.graph.DefaultGraph#addElements(java.util.Collection, java.util.Collection) --- 115,121 ---- *************** *** 447,451 **** public synchronized void addElements(Collection<Node> nodes, Collection<Edge> edges) { ! setDirty(); super.addElements(nodes, edges); --- 123,127 ---- public synchronized void addElements(Collection<Node> nodes, Collection<Edge> edges) { ! setDirty(true); super.addElements(nodes, edges); *************** *** 463,470 **** public synchronized void clear() { ! setDirty(); allNodes.clear(); super.clear(); - loadCount = 0; } --- 139,145 ---- public synchronized void clear() { ! setDirty(true); allNodes.clear(); super.clear(); } *************** *** 474,479 **** public synchronized void deleteElements(Collection<Node> nodes, Collection<Edge> edges) { ! setDirty(); ! super.deleteElements(nodes, edges); if (nodes != null) { --- 149,153 ---- public synchronized void deleteElements(Collection<Node> nodes, Collection<Edge> edges) { ! setDirty(true); if (nodes != null) { *************** *** 483,486 **** --- 157,247 ---- } } + // TODO after this call, nodes is empty??? why??? + super.deleteElements(nodes, edges); + + } + + public static void writeToRDF(Graph graph, OutputStream out, String baseURL) throws IOException { + + RDFWriter writer = new RDFWriter(); + + Map<String, String> namespaceMap = GraphlRegistry.instance().getVocabularyRegistry().getNamespaces(); + for (Iterator prefixes = namespaceMap.keySet().iterator(); prefixes.hasNext();) { + String prefix = (String) prefixes.next(); + writer.addNamespacePrefix(prefix, namespaceMap.get(prefix)); + } + // TODO (2) use vocabularies from config for prefix resolution + // writer.addNamespacePrefix("graphl", NS.graphl); + // writer.addNamespacePrefix("foaf", "http://xmlns.com/foaf/0.1/"); + // writer.addNamespacePrefix("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"); + // writer.addNamespacePrefix("rdfs", "http://www.w3.org/2000/01/rdf-schema#"); + // writer.addNamespacePrefix("owl", "http://www.w3.org/2002/07/owl#"); + // writer.addNamespacePrefix("map", "http://fabl.net/vocabularies/geography/map/1.1/"); + // writer.addNamespacePrefix("geo", "http://www.w3.org/2003/01/geo/wgs84_pos#"); + // writer.addNamespacePrefix("dc", "http://purl.org/dc/elements/1.1/"); + // writer.addNamespacePrefix("foo", "http://www.mediavirus.org/foo#"); + writer.addNamespacePrefix("vs", "http://www.w3.org/2003/06/sw-vocab-status/ns#"); + // writer.addNamespacePrefix("wot", "http://xmlns.com/wot/0.1/"); + + writer.prepareNamespaceCollection(); + + for (Iterator namespaces = namespaceMap.values().iterator(); namespaces.hasNext();) { + String ns = (String) namespaces.next(); + writer.collectNamespace(ns); + } + + // writer.collectNamespace(NS.graphl); + // writer.collectNamespace("http://xmlns.com/foaf/0.1/"); + // writer.collectNamespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#"); + // writer.collectNamespace("http://www.w3.org/2000/01/rdf-schema#"); + // writer.collectNamespace("http://www.w3.org/2002/07/owl#"); + // writer.collectNamespace("http://fabl.net/vocabularies/geography/map/1.1/"); + // writer.collectNamespace("http://www.w3.org/2003/01/geo/wgs84_pos#"); + // writer.collectNamespace("http://purl.org/dc/elements/1.1/"); + // writer.collectNamespace("http://www.mediavirus.org/foo#"); + writer.collectNamespace("http://www.w3.org/2003/06/sw-vocab-status/ns#"); + // writer.collectNamespace("http://xmlns.com/wot/0.1/"); + + OutputStreamWriter owriter = new OutputStreamWriter(out); + writer.startSerialization(owriter, "", "", "UTF-8"); + writer.startRDFContents(); + + List allNodes = graph.getNodes(); + List allEdges = graph.getEdges(); + + for (Iterator iter = allNodes.iterator(); iter.hasNext();) { + Node node = (Node) iter.next(); + + String id = node.getId(); + if ((baseURL != null) && (id.startsWith(baseURL)) && (id.lastIndexOf('#') > -1)) { + id = id.substring(id.lastIndexOf('#')); + } + + for (Iterator attrs = node.getProperties().keySet().iterator(); attrs.hasNext();) { + String attr = (String) attrs.next(); + System.out.println("WRITING: " + id + ";" + attr + ";" + node.getProperty(attr)); + writer.writeStatement(id, attr, node.getProperty(attr), null, null, true); + } + List nodeEdges = node.getEdgesFrom(); + for (Iterator edgeIter = nodeEdges.iterator(); edgeIter.hasNext();) { + Edge edge = (Edge) edgeIter.next(); + // TODO (3) this would be a reason to have a FilteredNode class that returns only filtered edges... + if (allEdges.contains(edge)) { + String toId = edge.getTo().getId(); + if ((baseURL != null) && (toId.startsWith(baseURL)) && (toId.lastIndexOf('#') > -1)) { + toId = toId.substring(toId.lastIndexOf('#')); + } + writer.writeStatement(id, edge.getType(), toId, null, null, false); + } + } + } + + writer.finishRDFContents(); + writer.cleanUp(); + + } + + public void writeToRDF(OutputStream out, String baseURL) throws IOException { + writeToRDF(this, out, baseURL); } |