|
From: <tho...@us...> - 2014-02-23 23:25:02
|
Revision: 7877
http://sourceforge.net/p/bigdata/code/7877
Author: thompsonbry
Date: 2014-02-23 23:24:57 +0000 (Sun, 23 Feb 2014)
Log Message:
-----------
Bug fix for #816 (static analysis of SERVICE ignores variables declared in the SERVICE's graph pattern).
Modified Paths:
--------------
branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/StaticAnalysis.java
branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/StaticAnalysisBase.java
branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/TestStaticAnalysis.java
Modified: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/StaticAnalysis.java
===================================================================
--- branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/StaticAnalysis.java 2014-02-23 12:15:58 UTC (rev 7876)
+++ branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/StaticAnalysis.java 2014-02-23 23:24:57 UTC (rev 7877)
@@ -1547,7 +1547,11 @@
/**
* Report "MUST" bound bindings projected by the SERVICE. This involves
* checking the graph pattern reported by
- * {@link ServiceNode#getGraphPattern()} .
+ * {@link ServiceNode#getGraphPattern()}.
+ * <p>
+ * Note: If the SERVICE URI is a variable, then it can only become bound
+ * through some other operation. If the SERVICE variable never becomes
+ * bound, then the SERVICE call can not run.
*/
// MUST : ServiceNode
public Set<IVariable<?>> getDefinitelyProducedBindings(
Modified: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/StaticAnalysisBase.java
===================================================================
--- branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/StaticAnalysisBase.java 2014-02-23 12:15:58 UTC (rev 7876)
+++ branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/StaticAnalysisBase.java 2014-02-23 23:24:57 UTC (rev 7877)
@@ -38,8 +38,8 @@
import com.bigdata.bop.IConstant;
import com.bigdata.bop.IVariable;
import com.bigdata.rdf.sparql.ast.eval.IEvaluationContext;
+import com.bigdata.rdf.sparql.ast.service.ServiceNode;
import com.bigdata.rdf.sparql.ast.ssets.ISolutionSetManager;
-import com.bigdata.rdf.store.ITripleStore;
/**
* Base class for static analysis.
@@ -171,7 +171,26 @@
// do not recurse
return varSet;
-
+
+ } else if (op instanceof ServiceNode) {
+
+ // @see http://trac.bigdata.com/ticket/816
+ final ServiceNode serviceNode = (ServiceNode) op;
+
+ // Look for the SERVICE URI, it might be a variable as well.
+ final TermNode uriRef = serviceNode.getServiceRef();
+
+ if (uriRef instanceof VarNode) {
+
+ varSet.add(((VarNode) uriRef).getValueExpression());
+
+ }
+
+ // pick up anything in the group graph pattern.
+ getSpannedVariables(serviceNode.getGraphPattern(), filters, varSet);
+
+ // fall through - look for attached filters.
+
} else if (op instanceof FilterNode && !filters) {
// DO NOT RECURSE INTO THE FILTER!
Modified: branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/TestStaticAnalysis.java
===================================================================
--- branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/TestStaticAnalysis.java 2014-02-23 12:15:58 UTC (rev 7876)
+++ branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/TestStaticAnalysis.java 2014-02-23 23:24:57 UTC (rev 7877)
@@ -270,6 +270,70 @@
}
/**
+ * Unit test of static analysis for a SERVICE call.
+ *
+ * @see <a href="http://trac.bigdata.com/ticket/816" > Wildcard projection
+ * ignores variables inside a SERVICE call </a>
+ */
+ public void test_static_analysis05()
+ throws MalformedQueryException {
+
+ final String queryStr = "" +
+ "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> \n"+
+ "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> \n"+
+ "PREFIX foaf: <http://xmlns.com/foaf/0.1/> \n"+
+ "select ?x (12 as ?y)\n" +
+ " where {\n" +
+ " service ?uri {\n" +
+ " ?x rdf:type foaf:Person .\n" +
+ " ?x rdfs:label ?z .\n" +
+ " }\n" +
+ "}";
+
+ final QueryRoot queryRoot = new Bigdata2ASTSPARQLParser(store)
+ .parseQuery2(queryStr, baseURI).getOriginalAST();
+
+ final StaticAnalysis sa = new StaticAnalysis(queryRoot);
+
+ final Set<IVariable<?>> expectedProjected = new LinkedHashSet<IVariable<?>>();
+
+ expectedProjected.add(Var.var("x"));
+ expectedProjected.add(Var.var("y"));
+
+ assertEquals(expectedProjected, sa.getDefinitelyProducedBindings(queryRoot));
+
+ // The spanned variables includes the SERVICE URI (if it is a variable).
+ {
+
+ final Set<IVariable<?>> expectedWhereClause = new LinkedHashSet<IVariable<?>>();
+
+ expectedWhereClause.add(Var.var("uri"));
+ expectedWhereClause.add(Var.var("x"));
+ expectedWhereClause.add(Var.var("z"));
+
+ assertEquals(expectedWhereClause, sa.getSpannedVariables(
+ queryRoot.getWhereClause(),
+ new LinkedHashSet<IVariable<?>>()));
+ }
+
+ // The definitely bound variables does NOT include the SERVICE URI. When
+ // that is a variable it needs to become bound through other means.
+ {
+
+ final Set<IVariable<?>> expectedWhereClause = new LinkedHashSet<IVariable<?>>();
+
+ expectedWhereClause.add(Var.var("x"));
+ expectedWhereClause.add(Var.var("z"));
+
+ assertEquals(expectedWhereClause, sa.getDefinitelyProducedBindings(
+ queryRoot.getWhereClause(),
+ new LinkedHashSet<IVariable<?>>(), true/* recursive */));
+
+ }
+
+ }
+
+ /**
* Unit test for computing the join variables for a named subquery based on
* the analysis of the bindings which MUST be produced by the subquery and
* those which MUST be bound on entry into the group in which the subquery
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <mrp...@us...> - 2014-03-12 17:48:31
|
Revision: 7943
http://sourceforge.net/p/bigdata/code/7943
Author: mrpersonick
Date: 2014-03-12 17:48:28 +0000 (Wed, 12 Mar 2014)
Log Message:
-----------
Extended the SPARQL Results JSON writer to handle RDR. Tickets 849 and 848.
Modified Paths:
--------------
branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/ServiceProviderHook.java
branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.rio.RDFParserFactory
branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.rio.RDFWriterFactory
Added Paths:
-----------
branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/
branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriter.java
branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactory.java
branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.query.resultio.TupleQueryResultWriterFactory
Modified: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/ServiceProviderHook.java
===================================================================
--- branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/ServiceProviderHook.java 2014-03-12 17:47:35 UTC (rev 7942)
+++ branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/ServiceProviderHook.java 2014-03-12 17:48:28 UTC (rev 7943)
@@ -32,14 +32,14 @@
import java.util.ServiceLoader;
import org.openrdf.query.QueryLanguage;
+import org.openrdf.query.resultio.TupleQueryResultParserRegistry;
+import org.openrdf.query.resultio.TupleQueryResultWriterRegistry;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFParserRegistry;
-import org.openrdf.rio.RDFWriterRegistry;
import com.bigdata.rdf.model.StatementEnum;
+import com.bigdata.rdf.rio.json.BigdataSPARQLResultsJSONWriterFactory;
import com.bigdata.rdf.rio.ntriples.BigdataNTriplesParserFactory;
-import com.bigdata.rdf.rio.rdfxml.BigdataRDFXMLParserFactory;
-import com.bigdata.rdf.rio.rdfxml.BigdataRDFXMLWriterFactory;
import com.bigdata.rdf.rio.turtle.BigdataTurtleParserFactory;
/**
@@ -119,7 +119,17 @@
r.add(new BigdataTurtleParserFactory());
}
+
+ {
+
+ final TupleQueryResultWriterRegistry r = TupleQueryResultWriterRegistry.getInstance();
+ // add our custom RDR-enabled JSON writer
+ r.add(new BigdataSPARQLResultsJSONWriterFactory());
+
+ }
+
+
// // Ditto, but for the writer.
// {
// final RDFWriterRegistry r = RDFWriterRegistry.getInstance();
Added: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriter.java
===================================================================
--- branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriter.java (rev 0)
+++ branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriter.java 2014-03-12 17:48:28 UTC (rev 7943)
@@ -0,0 +1,370 @@
+/*
+ * Copyright Aduna (http://www.aduna-software.com/) (c) 1997-2007.
+ *
+ * Licensed under the Aduna BSD-style license.
+ */
+package com.bigdata.rdf.rio.json;
+
+import info.aduna.io.IndentingWriter;
+import info.aduna.text.StringUtil;
+
+import java.io.BufferedWriter;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.nio.charset.Charset;
+import java.util.Iterator;
+import java.util.List;
+
+import org.openrdf.model.BNode;
+import org.openrdf.model.Literal;
+import org.openrdf.model.URI;
+import org.openrdf.model.Value;
+import org.openrdf.query.Binding;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.TupleQueryResultHandlerException;
+import org.openrdf.query.resultio.TupleQueryResultFormat;
+import org.openrdf.query.resultio.TupleQueryResultWriter;
+
+import com.bigdata.rdf.model.BigdataBNode;
+import com.bigdata.rdf.model.BigdataStatement;
+
+/**
+ * A TupleQueryResultWriter that writes query results in the <a
+ * href="http://www.w3.org/TR/rdf-sparql-json-res/">SPARQL Query Results JSON
+ * Format</a>.
+ */
+public class BigdataSPARQLResultsJSONWriter implements TupleQueryResultWriter {
+
+ /*-----------*
+ * Variables *
+ *-----------*/
+
+ private IndentingWriter writer;
+
+ private boolean firstTupleWritten;
+
+ /*--------------*
+ * Constructors *
+ *--------------*/
+
+ public BigdataSPARQLResultsJSONWriter(OutputStream out) {
+ Writer w = new OutputStreamWriter(out, Charset.forName("UTF-8"));
+ w = new BufferedWriter(w, 1024);
+ writer = new IndentingWriter(w);
+ }
+
+ /*---------*
+ * Methods *
+ *---------*/
+
+ /**
+ * This is the only method that is different from the OpenRDF version.
+ * I could not subclass their implementation because the IndentingWriter
+ * is private.
+ */
+ private void writeValue(Value value)
+ throws IOException, TupleQueryResultHandlerException
+ {
+ writer.write("{ ");
+
+ if (value instanceof URI) {
+ writeKeyValue("type", "uri");
+ writer.write(", ");
+ writeKeyValue("value", ((URI)value).toString());
+ }
+ else if (value instanceof BigdataBNode &&
+ ((BigdataBNode) value).isStatementIdentifier()) {
+
+// "bindings": [
+// {
+// "book": { "type": "uri" , "value": "http://example.org/book/book6" } ,
+// "title": { "type": "literal" , "value": "Harry Potter and the Half-Blood Prince" }
+// } ,
+// {
+// "book": { "type": "sid" , "value":
+// {
+// "s": { "type": "uri" , "value": "<s>" } ,
+// "p": { "type": "uri" , "value": "<p>" } ,
+// "o": { "type": "uri" , "value": "<o>" }
+// }
+// }
+// "title": { "type": "literal" , "value": "Harry Potter and the Deathly Hallows" }
+// } ,
+
+ final BigdataBNode bnode = (BigdataBNode) value;
+ final BigdataStatement stmt = bnode.getStatement();
+ writeKeyValue("type", "sid");
+ writer.write(", ");
+ writeKey("value");
+ openBraces();
+ writeKeyValue("sid-s", stmt.getSubject());
+ writeComma();
+ writeKeyValue("sid-p", stmt.getPredicate());
+ writeComma();
+ writeKeyValue("sid-o", stmt.getObject());
+
+ if (stmt.getContext() != null) {
+ writeComma();
+ writeKeyValue("sid-c", stmt.getContext());
+ }
+ closeBraces();
+
+ }
+ else if (value instanceof BNode) {
+ writeKeyValue("type", "bnode");
+ writer.write(", ");
+ writeKeyValue("value", ((BNode)value).getID());
+ }
+ else if (value instanceof Literal) {
+ Literal lit = (Literal)value;
+
+ if (lit.getDatatype() != null) {
+ writeKeyValue("type", "typed-literal");
+ writer.write(", ");
+ writeKeyValue("datatype", lit.getDatatype().toString());
+ }
+ else {
+ writeKeyValue("type", "literal");
+ if (lit.getLanguage() != null) {
+ writer.write(", ");
+ writeKeyValue("xml:lang", lit.getLanguage());
+ }
+ }
+
+ writer.write(", ");
+ writeKeyValue("value", lit.getLabel());
+ }
+ else {
+ throw new TupleQueryResultHandlerException("Unknown Value object type: " + value.getClass());
+ }
+
+ writer.write(" }");
+ }
+
+
+ public final TupleQueryResultFormat getTupleQueryResultFormat() {
+ return TupleQueryResultFormat.JSON;
+ }
+
+ public void startQueryResult(List<String> columnHeaders)
+ throws TupleQueryResultHandlerException
+ {
+ try {
+ openBraces();
+
+ // Write header
+ writeKey("head");
+ openBraces();
+ writeKeyValue("vars", columnHeaders);
+ closeBraces();
+
+ writeComma();
+
+ // Write results
+ writeKey("results");
+ openBraces();
+
+ writeKey("bindings");
+ openArray();
+
+ firstTupleWritten = false;
+ }
+ catch (IOException e) {
+ throw new TupleQueryResultHandlerException(e);
+ }
+ }
+
+ public void endQueryResult()
+ throws TupleQueryResultHandlerException
+ {
+ try {
+ closeArray(); // bindings array
+ closeBraces(); // results braces
+ closeBraces(); // root braces
+ writer.flush();
+ }
+ catch (IOException e) {
+ throw new TupleQueryResultHandlerException(e);
+ }
+ }
+
+ public void handleSolution(BindingSet bindingSet)
+ throws TupleQueryResultHandlerException
+ {
+ try {
+ if (firstTupleWritten) {
+ writeComma();
+ }
+ else {
+ firstTupleWritten = true;
+ }
+
+ openBraces(); // start of new solution
+
+ Iterator<Binding> bindingIter = bindingSet.iterator();
+ while (bindingIter.hasNext()) {
+ Binding binding = bindingIter.next();
+
+ writeKeyValue(binding.getName(), binding.getValue());
+
+ if (bindingIter.hasNext()) {
+ writeComma();
+ }
+ }
+
+ closeBraces(); // end solution
+
+ writer.flush();
+ }
+ catch (IOException e) {
+ throw new TupleQueryResultHandlerException(e);
+ }
+ }
+
+ private void writeKeyValue(String key, String value)
+ throws IOException
+ {
+ writeKey(key);
+ writeString(value);
+ }
+
+ private void writeKeyValue(String key, Value value)
+ throws IOException, TupleQueryResultHandlerException
+ {
+ writeKey(key);
+ writeValue(value);
+ }
+
+ private void writeKeyValue(String key, Iterable<String> array)
+ throws IOException
+ {
+ writeKey(key);
+ writeArray(array);
+ }
+
+ private void writeKey(String key)
+ throws IOException
+ {
+ writeString(key);
+ writer.write(": ");
+ }
+
+// private void writeValue(Value value)
+// throws IOException, TupleQueryResultHandlerException
+// {
+// writer.write("{ ");
+//
+// if (value instanceof URI) {
+// writeKeyValue("type", "uri");
+// writer.write(", ");
+// writeKeyValue("value", ((URI)value).toString());
+// }
+// else if (value instanceof BNode) {
+// writeKeyValue("type", "bnode");
+// writer.write(", ");
+// writeKeyValue("value", ((BNode)value).getID());
+// }
+// else if (value instanceof Literal) {
+// Literal lit = (Literal)value;
+//
+// if (lit.getDatatype() != null) {
+// writeKeyValue("type", "typed-literal");
+// writer.write(", ");
+// writeKeyValue("datatype", lit.getDatatype().toString());
+// }
+// else {
+// writeKeyValue("type", "literal");
+// if (lit.getLanguage() != null) {
+// writer.write(", ");
+// writeKeyValue("xml:lang", lit.getLanguage());
+// }
+// }
+//
+// writer.write(", ");
+// writeKeyValue("value", lit.getLabel());
+// }
+// else {
+// throw new TupleQueryResultHandlerException("Unknown Value object type: " + value.getClass());
+// }
+//
+// writer.write(" }");
+// }
+
+ private void writeString(String value)
+ throws IOException
+ {
+ // Escape special characters
+ value = StringUtil.gsub("\\", "\\\\", value);
+ value = StringUtil.gsub("\"", "\\\"", value);
+ value = StringUtil.gsub("/", "\\/", value);
+ value = StringUtil.gsub("\b", "\\b", value);
+ value = StringUtil.gsub("\f", "\\f", value);
+ value = StringUtil.gsub("\n", "\\n", value);
+ value = StringUtil.gsub("\r", "\\r", value);
+ value = StringUtil.gsub("\t", "\\t", value);
+
+ writer.write("\"");
+ writer.write(value);
+ writer.write("\"");
+ }
+
+ private void writeArray(Iterable<String> array)
+ throws IOException
+ {
+ writer.write("[ ");
+
+ Iterator<String> iter = array.iterator();
+ while (iter.hasNext()) {
+ String value = iter.next();
+
+ writeString(value);
+
+ if (iter.hasNext()) {
+ writer.write(", ");
+ }
+ }
+
+ writer.write(" ]");
+ }
+
+ private void openArray()
+ throws IOException
+ {
+ writer.write("[");
+ writer.writeEOL();
+ writer.increaseIndentation();
+ }
+
+ private void closeArray()
+ throws IOException
+ {
+ writer.writeEOL();
+ writer.decreaseIndentation();
+ writer.write("]");
+ }
+
+ private void openBraces()
+ throws IOException
+ {
+ writer.write("{");
+ writer.writeEOL();
+ writer.increaseIndentation();
+ }
+
+ private void closeBraces()
+ throws IOException
+ {
+ writer.writeEOL();
+ writer.decreaseIndentation();
+ writer.write("}");
+ }
+
+ private void writeComma()
+ throws IOException
+ {
+ writer.write(", ");
+ writer.writeEOL();
+ }
+}
Property changes on: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriter.java
___________________________________________________________________
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Added: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactory.java
===================================================================
--- branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactory.java (rev 0)
+++ branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactory.java 2014-03-12 17:48:28 UTC (rev 7943)
@@ -0,0 +1,35 @@
+/*
+ * Copyright Aduna (http://www.aduna-software.com/) (c) 2007.
+ *
+ * Licensed under the Aduna BSD-style license.
+ */
+package com.bigdata.rdf.rio.json;
+
+import java.io.OutputStream;
+
+import org.openrdf.query.resultio.TupleQueryResultFormat;
+import org.openrdf.query.resultio.TupleQueryResultWriter;
+import org.openrdf.query.resultio.TupleQueryResultWriterFactory;
+
+/**
+ * A {@link TupleQueryResultWriterFactory} for writers of SPARQL/JSON query
+ * results.
+ *
+ * @author Arjohn Kampman
+ */
+public class BigdataSPARQLResultsJSONWriterFactory implements TupleQueryResultWriterFactory {
+
+ /**
+ * Returns {@link TupleQueryResultFormat#JSON}.
+ */
+ public TupleQueryResultFormat getTupleQueryResultFormat() {
+ return TupleQueryResultFormat.JSON;
+ }
+
+ /**
+ * Returns a new instance of SPARQLResultsJSONWriter.
+ */
+ public TupleQueryResultWriter getWriter(OutputStream out) {
+ return new BigdataSPARQLResultsJSONWriter(out);
+ }
+}
Property changes on: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactory.java
___________________________________________________________________
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Added: branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.query.resultio.TupleQueryResultWriterFactory
===================================================================
--- branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.query.resultio.TupleQueryResultWriterFactory (rev 0)
+++ branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.query.resultio.TupleQueryResultWriterFactory 2014-03-12 17:48:28 UTC (rev 7943)
@@ -0,0 +1 @@
+com.bigdata.rdf.rio.json.BigdataSPARQLResultsJSONWriterFactory
Modified: branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.rio.RDFParserFactory
===================================================================
--- branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.rio.RDFParserFactory 2014-03-12 17:47:35 UTC (rev 7942)
+++ branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.rio.RDFParserFactory 2014-03-12 17:48:28 UTC (rev 7943)
@@ -1,2 +1,3 @@
-com.bigdata.rdf.rio.nquads.NQuadsParserFactory
-com.bigdata.rdf.rio.rdfxml.BigdataRDFXMLParserFactory
+com.bigdata.rdf.rio.nquads.NQuadsParserFactory
+com.bigdata.rdf.rio.ntriples.BigdataNTriplesParserFactory
+com.bigdata.rdf.rio.turtle.BigdataTurtleParserFactory
Modified: branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.rio.RDFWriterFactory
===================================================================
--- branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.rio.RDFWriterFactory 2014-03-12 17:47:35 UTC (rev 7942)
+++ branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.rio.RDFWriterFactory 2014-03-12 17:48:28 UTC (rev 7943)
@@ -1 +0,0 @@
-com.bigdata.rdf.rio.rdfxml.BigdataRDFXMLWriterFactory
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <mrp...@us...> - 2014-03-13 00:16:52
|
Revision: 7949
http://sourceforge.net/p/bigdata/code/7949
Author: mrpersonick
Date: 2014-03-13 00:16:49 +0000 (Thu, 13 Mar 2014)
Log Message:
-----------
Supporting tickets 848 and 849. Added RDR-enabled JSON support for select and construct. Added RDR-enabled TurtleWriter for export.
Modified Paths:
--------------
branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/ServiceProviderHook.java
branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriter.java
branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.query.resultio.TupleQueryResultWriterFactory
branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.rio.RDFWriterFactory
Added Paths:
-----------
branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactoryForConstruct.java
branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactoryForSelect.java
branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/turtle/BigdataTurtleWriter.java
branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/turtle/BigdataTurtleWriterFactory.java
Removed Paths:
-------------
branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactory.java
Modified: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/ServiceProviderHook.java
===================================================================
--- branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/ServiceProviderHook.java 2014-03-13 00:11:05 UTC (rev 7948)
+++ branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/ServiceProviderHook.java 2014-03-13 00:16:49 UTC (rev 7949)
@@ -32,15 +32,18 @@
import java.util.ServiceLoader;
import org.openrdf.query.QueryLanguage;
-import org.openrdf.query.resultio.TupleQueryResultParserRegistry;
import org.openrdf.query.resultio.TupleQueryResultWriterRegistry;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFParserRegistry;
+import org.openrdf.rio.RDFWriterRegistry;
import com.bigdata.rdf.model.StatementEnum;
-import com.bigdata.rdf.rio.json.BigdataSPARQLResultsJSONWriterFactory;
+import com.bigdata.rdf.rio.json.BigdataSPARQLResultsJSONWriterFactoryForConstruct;
+import com.bigdata.rdf.rio.json.BigdataSPARQLResultsJSONWriterFactoryForSelect;
import com.bigdata.rdf.rio.ntriples.BigdataNTriplesParserFactory;
+import com.bigdata.rdf.rio.rdfxml.BigdataRDFXMLWriterFactory;
import com.bigdata.rdf.rio.turtle.BigdataTurtleParserFactory;
+import com.bigdata.rdf.rio.turtle.BigdataTurtleWriterFactory;
/**
* This static class provides a hook which allows the replacement of services
@@ -125,19 +128,23 @@
final TupleQueryResultWriterRegistry r = TupleQueryResultWriterRegistry.getInstance();
// add our custom RDR-enabled JSON writer
- r.add(new BigdataSPARQLResultsJSONWriterFactory());
+ r.add(new BigdataSPARQLResultsJSONWriterFactoryForSelect());
}
-// // Ditto, but for the writer.
-// {
-// final RDFWriterRegistry r = RDFWriterRegistry.getInstance();
-//
+ // Ditto, but for the writer.
+ {
+ final RDFWriterRegistry r = RDFWriterRegistry.getInstance();
+
// r.add(new BigdataRDFXMLWriterFactory());
-//
-// }
+
+ r.add(new BigdataTurtleWriterFactory());
+ r.add(new BigdataSPARQLResultsJSONWriterFactoryForConstruct());
+
+ }
+
// {
// final PropertiesParserRegistry r = PropertiesParserRegistry.getInstance();
//
Modified: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriter.java
===================================================================
--- branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriter.java 2014-03-13 00:11:05 UTC (rev 7948)
+++ branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriter.java 2014-03-13 00:16:49 UTC (rev 7949)
@@ -14,11 +14,13 @@
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.Charset;
+import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import org.openrdf.model.BNode;
import org.openrdf.model.Literal;
+import org.openrdf.model.Statement;
import org.openrdf.model.URI;
import org.openrdf.model.Value;
import org.openrdf.query.Binding;
@@ -26,6 +28,9 @@
import org.openrdf.query.TupleQueryResultHandlerException;
import org.openrdf.query.resultio.TupleQueryResultFormat;
import org.openrdf.query.resultio.TupleQueryResultWriter;
+import org.openrdf.rio.RDFFormat;
+import org.openrdf.rio.RDFHandlerException;
+import org.openrdf.rio.RDFWriter;
import com.bigdata.rdf.model.BigdataBNode;
import com.bigdata.rdf.model.BigdataStatement;
@@ -35,7 +40,7 @@
* href="http://www.w3.org/TR/rdf-sparql-json-res/">SPARQL Query Results JSON
* Format</a>.
*/
-public class BigdataSPARQLResultsJSONWriter implements TupleQueryResultWriter {
+public class BigdataSPARQLResultsJSONWriter implements TupleQueryResultWriter, RDFWriter {
/*-----------*
* Variables *
@@ -50,7 +55,10 @@
*--------------*/
public BigdataSPARQLResultsJSONWriter(OutputStream out) {
- Writer w = new OutputStreamWriter(out, Charset.forName("UTF-8"));
+ this(new OutputStreamWriter(out, Charset.forName("UTF-8")));
+ }
+
+ public BigdataSPARQLResultsJSONWriter(Writer w) {
w = new BufferedWriter(w, 1024);
writer = new IndentingWriter(w);
}
@@ -148,6 +156,20 @@
return TupleQueryResultFormat.JSON;
}
+ public void startRDF() {
+
+ try {
+
+ startQueryResult(Arrays.asList(new String[] {
+ "s", "p", "o", "c"
+ }));
+
+ } catch (TupleQueryResultHandlerException e) {
+ throw new RuntimeException(e);
+ }
+
+ }
+
public void startQueryResult(List<String> columnHeaders)
throws TupleQueryResultHandlerException
{
@@ -176,6 +198,18 @@
}
}
+ public void endRDF() {
+
+ try {
+
+ endQueryResult();
+
+ } catch (TupleQueryResultHandlerException e) {
+ throw new RuntimeException(e);
+ }
+
+ }
+
public void endQueryResult()
throws TupleQueryResultHandlerException
{
@@ -190,6 +224,51 @@
}
}
+ public void handleStatement(final Statement stmt)
+ {
+ try {
+ if (firstTupleWritten) {
+ writeComma();
+ }
+ else {
+ firstTupleWritten = true;
+ }
+
+ openBraces(); // start of new solution
+
+ writeKeyValue("s", stmt.getSubject());
+ writeComma();
+ writeKeyValue("p", stmt.getPredicate());
+ writeComma();
+ writeKeyValue("o", stmt.getObject());
+ if (stmt.getContext() != null) {
+ writeComma();
+ writeKeyValue("c", stmt.getContext());
+ }
+
+// Iterator<Binding> bindingIter = bindingSet.iterator();
+// while (bindingIter.hasNext()) {
+// Binding binding = bindingIter.next();
+//
+// writeKeyValue(binding.getName(), binding.getValue());
+//
+// if (bindingIter.hasNext()) {
+// writeComma();
+// }
+// }
+
+ closeBraces(); // end solution
+
+ writer.flush();
+ }
+ catch (TupleQueryResultHandlerException e) {
+ throw new RuntimeException(e);
+ }
+ catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
public void handleSolution(BindingSet bindingSet)
throws TupleQueryResultHandlerException
{
@@ -367,4 +446,24 @@
writer.write(", ");
writer.writeEOL();
}
+
+ @Override
+ public void handleComment(String arg0) throws RDFHandlerException {
+ // TODO Implement me
+
+ }
+
+ @Override
+ public void handleNamespace(String arg0, String arg1)
+ throws RDFHandlerException {
+ // TODO Implement me
+
+ }
+
+ @Override
+ public RDFFormat getRDFFormat() {
+
+ return null;
+
+ }
}
Deleted: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactory.java
===================================================================
--- branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactory.java 2014-03-13 00:11:05 UTC (rev 7948)
+++ branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactory.java 2014-03-13 00:16:49 UTC (rev 7949)
@@ -1,35 +0,0 @@
-/*
- * Copyright Aduna (http://www.aduna-software.com/) (c) 2007.
- *
- * Licensed under the Aduna BSD-style license.
- */
-package com.bigdata.rdf.rio.json;
-
-import java.io.OutputStream;
-
-import org.openrdf.query.resultio.TupleQueryResultFormat;
-import org.openrdf.query.resultio.TupleQueryResultWriter;
-import org.openrdf.query.resultio.TupleQueryResultWriterFactory;
-
-/**
- * A {@link TupleQueryResultWriterFactory} for writers of SPARQL/JSON query
- * results.
- *
- * @author Arjohn Kampman
- */
-public class BigdataSPARQLResultsJSONWriterFactory implements TupleQueryResultWriterFactory {
-
- /**
- * Returns {@link TupleQueryResultFormat#JSON}.
- */
- public TupleQueryResultFormat getTupleQueryResultFormat() {
- return TupleQueryResultFormat.JSON;
- }
-
- /**
- * Returns a new instance of SPARQLResultsJSONWriter.
- */
- public TupleQueryResultWriter getWriter(OutputStream out) {
- return new BigdataSPARQLResultsJSONWriter(out);
- }
-}
Added: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactoryForConstruct.java
===================================================================
--- branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactoryForConstruct.java (rev 0)
+++ branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactoryForConstruct.java 2014-03-13 00:16:49 UTC (rev 7949)
@@ -0,0 +1,61 @@
+/*
+ * Copyright Aduna (http://www.aduna-software.com/) (c) 2007.
+ *
+ * Licensed under the Aduna BSD-style license.
+ */
+package com.bigdata.rdf.rio.json;
+
+import java.io.OutputStream;
+import java.io.Writer;
+import java.nio.charset.Charset;
+
+import org.openrdf.query.resultio.TupleQueryResultFormat;
+import org.openrdf.query.resultio.TupleQueryResultWriterFactory;
+import org.openrdf.rio.RDFFormat;
+import org.openrdf.rio.RDFWriter;
+import org.openrdf.rio.RDFWriterFactory;
+
+/**
+ * A {@link TupleQueryResultWriterFactory} for writers of SPARQL/JSON query
+ * results.
+ *
+ * @author Arjohn Kampman
+ */
+public class BigdataSPARQLResultsJSONWriterFactoryForConstruct implements RDFWriterFactory {
+
+ public static final RDFFormat JSON = new RDFFormat(
+ "JSON", // name
+ "application/sparql-results+json", // mime-type
+ Charset.forName("UTF-8"), // charset
+ "json", // file extension
+ false, // supports namespaces
+ true // supports contexts
+ );
+
+ static {
+
+ RDFFormat.register(JSON);
+
+ }
+
+ /**
+ * Returns {@link TupleQueryResultFormat#JSON}.
+ */
+ public RDFFormat getRDFFormat() {
+ return JSON;
+ }
+
+ /**
+ * Returns a new instance of SPARQLResultsJSONWriter.
+ */
+ public RDFWriter getWriter(OutputStream out) {
+ return new BigdataSPARQLResultsJSONWriter(out);
+ }
+
+ /**
+ * Returns a new instance of SPARQLResultsJSONWriter.
+ */
+ public RDFWriter getWriter(Writer writer) {
+ return new BigdataSPARQLResultsJSONWriter(writer);
+ }
+}
Property changes on: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactoryForConstruct.java
___________________________________________________________________
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Copied: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactoryForSelect.java (from rev 7946, branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactory.java)
===================================================================
--- branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactoryForSelect.java (rev 0)
+++ branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactoryForSelect.java 2014-03-13 00:16:49 UTC (rev 7949)
@@ -0,0 +1,35 @@
+/*
+ * Copyright Aduna (http://www.aduna-software.com/) (c) 2007.
+ *
+ * Licensed under the Aduna BSD-style license.
+ */
+package com.bigdata.rdf.rio.json;
+
+import java.io.OutputStream;
+
+import org.openrdf.query.resultio.TupleQueryResultFormat;
+import org.openrdf.query.resultio.TupleQueryResultWriter;
+import org.openrdf.query.resultio.TupleQueryResultWriterFactory;
+
+/**
+ * A {@link TupleQueryResultWriterFactory} for writers of SPARQL/JSON query
+ * results.
+ *
+ * @author Arjohn Kampman
+ */
+public class BigdataSPARQLResultsJSONWriterFactoryForSelect implements TupleQueryResultWriterFactory {
+
+ /**
+ * Returns {@link TupleQueryResultFormat#JSON}.
+ */
+ public TupleQueryResultFormat getTupleQueryResultFormat() {
+ return TupleQueryResultFormat.JSON;
+ }
+
+ /**
+ * Returns a new instance of SPARQLResultsJSONWriter.
+ */
+ public TupleQueryResultWriter getWriter(OutputStream out) {
+ return new BigdataSPARQLResultsJSONWriter(out);
+ }
+}
Property changes on: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/BigdataSPARQLResultsJSONWriterFactoryForSelect.java
___________________________________________________________________
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Added: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/turtle/BigdataTurtleWriter.java
===================================================================
--- branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/turtle/BigdataTurtleWriter.java (rev 0)
+++ branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/turtle/BigdataTurtleWriter.java 2014-03-13 00:16:49 UTC (rev 7949)
@@ -0,0 +1,410 @@
+/*
+ * Copyright Aduna (http://www.aduna-software.com/) (c) 1997-2007.
+ *
+ * Licensed under the Aduna BSD-style license.
+ */
+package com.bigdata.rdf.rio.turtle;
+
+import info.aduna.io.IndentingWriter;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.Writer;
+import java.util.Map;
+
+import org.openrdf.model.BNode;
+import org.openrdf.model.Literal;
+import org.openrdf.model.Resource;
+import org.openrdf.model.URI;
+import org.openrdf.rio.RDFWriter;
+import org.openrdf.rio.turtle.TurtleUtil;
+import org.openrdf.rio.turtle.TurtleWriter;
+
+import com.bigdata.rdf.model.BigdataBNode;
+import com.bigdata.rdf.model.BigdataStatement;
+
+/**
+ * An implementation of the RDFWriter interface that writes RDF documents in
+ * Turtle format. The Turtle format is defined in <a
+ * href="http://www.dajobe.org/2004/01/turtle/">in this document</a>.
+ */
+public class BigdataTurtleWriter extends TurtleWriter implements RDFWriter {
+
+ /*-----------*
+ * Variables *
+ *-----------*/
+
+// protected IndentingWriter writer;
+//
+// /**
+// * Table mapping namespace names (key) to namespace prefixes (value).
+// */
+// protected Map<String, String> namespaceTable;
+//
+// protected boolean writingStarted;
+//
+// /**
+// * Flag indicating whether the last written statement has been closed.
+// */
+// protected boolean statementClosed;
+//
+// protected Resource lastWrittenSubject;
+//
+// protected URI lastWrittenPredicate;
+
+ /*--------------*
+ * Constructors *
+ *--------------*/
+
+ /**
+ * Creates a new TurtleWriter that will write to the supplied OutputStream.
+ *
+ * @param out
+ * The OutputStream to write the Turtle document to.
+ */
+ public BigdataTurtleWriter(OutputStream out) {
+ super(out);
+ }
+
+ /**
+ * Creates a new TurtleWriter that will write to the supplied Writer.
+ *
+ * @param writer
+ * The Writer to write the Turtle document to.
+ */
+ public BigdataTurtleWriter(Writer writer) {
+ super(writer);
+ }
+
+ /*---------*
+ * Methods *
+ *---------*/
+
+// public RDFFormat getRDFFormat() {
+// return RDFFormat.TURTLE;
+// }
+//
+// public void startRDF()
+// throws RDFHandlerException
+// {
+// if (writingStarted) {
+// throw new RuntimeException("Document writing has already started");
+// }
+//
+// writingStarted = true;
+//
+// try {
+// // Write namespace declarations
+// for (Map.Entry<String, String> entry : namespaceTable.entrySet()) {
+// String name = entry.getKey();
+// String prefix = entry.getValue();
+//
+// writeNamespace(prefix, name);
+// }
+//
+// if (!namespaceTable.isEmpty()) {
+// writer.writeEOL();
+// }
+// }
+// catch (IOException e) {
+// throw new RDFHandlerException(e);
+// }
+// }
+//
+// public void endRDF()
+// throws RDFHandlerException
+// {
+// if (!writingStarted) {
+// throw new RuntimeException("Document writing has not yet started");
+// }
+//
+// try {
+// closePreviousStatement();
+// writer.flush();
+// }
+// catch (IOException e) {
+// throw new RDFHandlerException(e);
+// }
+// finally {
+// writingStarted = false;
+// }
+// }
+//
+// public void handleNamespace(String prefix, String name)
+// throws RDFHandlerException
+// {
+// try {
+// if (!namespaceTable.containsKey(name)) {
+// // Namespace not yet mapped to a prefix, try to give it the
+// // specified prefix
+//
+// boolean isLegalPrefix = prefix.length() == 0 || TurtleUtil.isLegalPrefix(prefix);
+//
+// if (!isLegalPrefix || namespaceTable.containsValue(prefix)) {
+// // Specified prefix is not legal or the prefix is already in use,
+// // generate a legal unique prefix
+//
+// if (prefix.length() == 0 || !isLegalPrefix) {
+// prefix = "ns";
+// }
+//
+// int number = 1;
+//
+// while (namespaceTable.containsValue(prefix + number)) {
+// number++;
+// }
+//
+// prefix += number;
+// }
+//
+// namespaceTable.put(name, prefix);
+//
+// if (writingStarted) {
+// closePreviousStatement();
+//
+// writeNamespace(prefix, name);
+// }
+// }
+// }
+// catch (IOException e) {
+// throw new RDFHandlerException(e);
+// }
+// }
+//
+// public void handleStatement(Statement st)
+// throws RDFHandlerException
+// {
+// if (!writingStarted) {
+// throw new RuntimeException("Document writing has not yet been started");
+// }
+//
+// Resource subj = st.getSubject();
+// URI pred = st.getPredicate();
+// Value obj = st.getObject();
+//
+// try {
+// if (subj.equals(lastWrittenSubject)) {
+// if (pred.equals(lastWrittenPredicate)) {
+// // Identical subject and predicate
+// writer.write(" , ");
+// }
+// else {
+// // Identical subject, new predicate
+// writer.write(" ;");
+// writer.writeEOL();
+//
+// // Write new predicate
+// writePredicate(pred);
+// writer.write(" ");
+// lastWrittenPredicate = pred;
+// }
+// }
+// else {
+// // New subject
+// closePreviousStatement();
+//
+// // Write new subject:
+// writer.writeEOL();
+// writeResource(subj);
+// writer.write(" ");
+// lastWrittenSubject = subj;
+//
+// // Write new predicate
+// writePredicate(pred);
+// writer.write(" ");
+// lastWrittenPredicate = pred;
+//
+// statementClosed = false;
+// writer.increaseIndentation();
+// }
+//
+// writeValue(obj);
+//
+// // Don't close the line just yet. Maybe the next
+// // statement has the same subject and/or predicate.
+// }
+// catch (IOException e) {
+// throw new RDFHandlerException(e);
+// }
+// }
+//
+// public void handleComment(String comment)
+// throws RDFHandlerException
+// {
+// try {
+// closePreviousStatement();
+//
+// if (comment.indexOf('\r') != -1 || comment.indexOf('\n') != -1) {
+// // Comment is not allowed to contain newlines or line feeds.
+// // Split comment in individual lines and write comment lines
+// // for each of them.
+// StringTokenizer st = new StringTokenizer(comment, "\r\n");
+// while (st.hasMoreTokens()) {
+// writeCommentLine(st.nextToken());
+// }
+// }
+// else {
+// writeCommentLine(comment);
+// }
+// }
+// catch (IOException e) {
+// throw new RDFHandlerException(e);
+// }
+// }
+//
+// protected void writeCommentLine(String line)
+// throws IOException
+// {
+// writer.write("# ");
+// writer.write(line);
+// writer.writeEOL();
+// }
+//
+// protected void writeNamespace(String prefix, String name)
+// throws IOException
+// {
+// writer.write("@prefix ");
+// writer.write(prefix);
+// writer.write(": <");
+// writer.write(TurtleUtil.encodeURIString(name));
+// writer.write("> .");
+// writer.writeEOL();
+// }
+//
+// protected void writePredicate(URI predicate)
+// throws IOException
+// {
+// if (predicate.equals(RDF.TYPE)) {
+// // Write short-cut for rdf:type
+// writer.write("a");
+// }
+// else {
+// writeURI(predicate);
+// }
+// }
+//
+// protected void writeValue(Value val)
+// throws IOException
+// {
+// if (val instanceof Resource) {
+// writeResource((Resource)val);
+// }
+// else {
+// writeLiteral((Literal)val);
+// }
+// }
+//
+// protected void writeResource(Resource res)
+// throws IOException
+// {
+// if (res instanceof URI) {
+// writeURI((URI)res);
+// }
+// else {
+// writeBNode((BNode)res);
+// }
+// }
+//
+// protected void writeURI(URI uri)
+// throws IOException
+// {
+// String uriString = uri.toString();
+//
+// // Try to find a prefix for the URI's namespace
+// String prefix = null;
+//
+// int splitIdx = TurtleUtil.findURISplitIndex(uriString);
+// if (splitIdx > 0) {
+// String namespace = uriString.substring(0, splitIdx);
+// prefix = namespaceTable.get(namespace);
+// }
+//
+// if (prefix != null) {
+// // Namespace is mapped to a prefix; write abbreviated URI
+// writer.write(prefix);
+// writer.write(":");
+// writer.write(uriString.substring(splitIdx));
+// }
+// else {
+// // Write full URI
+// writer.write("<");
+// writer.write(TurtleUtil.encodeURIString(uriString));
+// writer.write(">");
+// }
+// }
+
+ protected void writeBNode(BNode bNode)
+ throws IOException
+ {
+ if (bNode instanceof BigdataBNode &&
+ ((BigdataBNode) bNode).isStatementIdentifier()) {
+ writeSid((BigdataBNode) bNode);
+ } else {
+ writer.write("_:");
+ writer.write(bNode.getID());
+ }
+ }
+
+ protected void writeSid(final BigdataBNode sid)
+ throws IOException
+ {
+ final BigdataStatement stmt = sid.getStatement();
+ writer.write("<< ");
+ writeValue(stmt.getSubject());
+ writer.write(", ");
+ writeValue(stmt.getPredicate());
+ writer.write(", ");
+ writeValue(stmt.getObject());
+ if (stmt.getContext() != null) {
+ writer.write(", ");
+ writeValue(stmt.getContext());
+ }
+ writer.write(" >>");
+ }
+
+// protected void writeLiteral(Literal lit)
+// throws IOException
+// {
+// String label = lit.getLabel();
+//
+// if (label.indexOf('\n') > 0 || label.indexOf('\r') > 0 || label.indexOf('\t') > 0) {
+// // Write label as long string
+// writer.write("\"\"\"");
+// writer.write(TurtleUtil.encodeLongString(label));
+// writer.write("\"\"\"");
+// }
+// else {
+// // Write label as normal string
+// writer.write("\"");
+// writer.write(TurtleUtil.encodeString(label));
+// writer.write("\"");
+// }
+//
+// if (lit.getDatatype() != null) {
+// // Append the literal's datatype (possibly written as an abbreviated
+// // URI)
+// writer.write("^^");
+// writeURI(lit.getDatatype());
+// }
+// else if (lit.getLanguage() != null) {
+// // Append the literal's language
+// writer.write("@");
+// writer.write(lit.getLanguage());
+// }
+// }
+//
+// protected void closePreviousStatement()
+// throws IOException
+// {
+// if (!statementClosed) {
+// // The previous statement still needs to be closed:
+// writer.write(" .");
+// writer.writeEOL();
+// writer.decreaseIndentation();
+//
+// statementClosed = true;
+// lastWrittenSubject = null;
+// lastWrittenPredicate = null;
+// }
+// }
+}
Property changes on: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/turtle/BigdataTurtleWriter.java
___________________________________________________________________
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Added: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/turtle/BigdataTurtleWriterFactory.java
===================================================================
--- branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/turtle/BigdataTurtleWriterFactory.java (rev 0)
+++ branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/turtle/BigdataTurtleWriterFactory.java 2014-03-13 00:16:49 UTC (rev 7949)
@@ -0,0 +1,42 @@
+/*
+ * Copyright Aduna (http://www.aduna-software.com/) (c) 2007.
+ *
+ * Licensed under the Aduna BSD-style license.
+ */
+package com.bigdata.rdf.rio.turtle;
+
+import java.io.OutputStream;
+import java.io.Writer;
+
+import org.openrdf.rio.RDFFormat;
+import org.openrdf.rio.RDFWriter;
+import org.openrdf.rio.RDFWriterFactory;
+
+/**
+ * An {@link RDFWriterFactory} for Turtle writers.
+ *
+ * @author Arjohn Kampman
+ */
+public class BigdataTurtleWriterFactory implements RDFWriterFactory {
+
+ /**
+ * Returns {@link RDFFormat#TURTLE}.
+ */
+ public RDFFormat getRDFFormat() {
+ return RDFFormat.TURTLE;
+ }
+
+ /**
+ * Returns a new instance of {@link BigdataTurtleWriter}.
+ */
+ public RDFWriter getWriter(OutputStream out) {
+ return new BigdataTurtleWriter(out);
+ }
+
+ /**
+ * Returns a new instance of {@link BigdataTurtleWriter}.
+ */
+ public RDFWriter getWriter(Writer writer) {
+ return new BigdataTurtleWriter(writer);
+ }
+}
Property changes on: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/turtle/BigdataTurtleWriterFactory.java
___________________________________________________________________
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Modified: branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.query.resultio.TupleQueryResultWriterFactory
===================================================================
--- branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.query.resultio.TupleQueryResultWriterFactory 2014-03-13 00:11:05 UTC (rev 7948)
+++ branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.query.resultio.TupleQueryResultWriterFactory 2014-03-13 00:16:49 UTC (rev 7949)
@@ -1 +1 @@
-com.bigdata.rdf.rio.json.BigdataSPARQLResultsJSONWriterFactory
+com.bigdata.rdf.rio.json.BigdataSPARQLResultsJSONWriterFactoryForSelect
Modified: branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.rio.RDFWriterFactory
===================================================================
--- branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.rio.RDFWriterFactory 2014-03-13 00:11:05 UTC (rev 7948)
+++ branches/RDR/bigdata-rdf/src/resources/service-providers/META-INF/services/org.openrdf.rio.RDFWriterFactory 2014-03-13 00:16:49 UTC (rev 7949)
@@ -0,0 +1,2 @@
+com.bigdata.rdf.rio.turtle.BigdataTurtleWriterFactory
+com.bigdata.rdf.rio.json.BigdataSPARQLResultsJSONWriterFactoryForConstruct
\ No newline at end of file
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|