Skip to content

Commit

Permalink
improved SPARQLStore BNode customizability
Browse files Browse the repository at this point in the history
query (initBindings), contexts, addN, remove, add_graph and remove_graph now call
node_to_sparql. Some support for BNode graph names added.

Add-on for #513, see also #511, #512
  • Loading branch information
joernhees committed Mar 9, 2016
1 parent 712c3fc commit 77e9b70
Showing 1 changed file with 31 additions and 15 deletions.
46 changes: 31 additions & 15 deletions rdflib/plugins/stores/sparqlstore.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
from rdflib.query import Result
from rdflib import Variable, Namespace, BNode, URIRef, Literal
from rdflib.graph import DATASET_DEFAULT_GRAPH_ID
from rdflib.term import Node

import httplib
import urlparse
Expand Down Expand Up @@ -318,7 +319,7 @@ def query(self, query,
# VALUES was added to SPARQL 1.1 on 2012/07/24
query += "\nVALUES ( %s )\n{ ( %s ) }\n"\
% (" ".join("?" + str(x) for x in v),
" ".join(initBindings[x].n3() for x in v))
" ".join(self.node_to_sparql(initBindings[x]) for x in v))

self.resetQuery()
if self._is_contextual(queryGraph):
Expand Down Expand Up @@ -386,7 +387,7 @@ def triples(self, (s, p, o), context=None):

# The ORDER BY is necessary
if hasattr(context, LIMIT) or hasattr(context, OFFSET) \
or hasattr(context, ORDERBY):
or hasattr(context, ORDERBY):
var = None
if isinstance(s, Variable):
var = s
Expand Down Expand Up @@ -472,10 +473,11 @@ def contexts(self, triple=None):
self.resetQuery()

if triple:
nts = self.node_to_sparql
s, p, o = triple
params = ((s if s else Variable('s')).n3(),
(p if p else Variable('p')).n3(),
(o if o else Variable('o')).n3())
params = (nts(s if s else Variable('s')),
nts(p if p else Variable('p')),
nts(o if o else Variable('o')))
self.setQuery('SELECT ?name WHERE { GRAPH ?name { %s %s %s }}' % params)
else:
self.setQuery('SELECT ?name WHERE { GRAPH ?name {} }')
Expand Down Expand Up @@ -695,7 +697,7 @@ def add(self, spo, context=None, quoted=False):
triple = "%s %s %s ." % (nts(subject), nts(predicate), nts(obj))
if self._is_contextual(context):
q = "INSERT DATA { GRAPH %s { %s } }" % (
context.identifier.n3(), triple)
nts(context.identifier), triple)
else:
q = "INSERT DATA { %s }" % triple
self._transaction().append(q)
Expand All @@ -711,9 +713,15 @@ def addN(self, quads):
for subject, predicate, obj, context in quads:
contexts[context].append((subject,predicate,obj))
data = []
nts = self.node_to_sparql
for context in contexts:
triples = ["%s %s %s ." % (x[0].n3(), x[1].n3(), x[2].n3()) for x in contexts[context]]
data.append("INSERT DATA { GRAPH <%s> { %s } }\n" % (context.identifier, '\n'.join(triples)))
triples = [
"%s %s %s ." % (
nts(subject), nts(predicate), nts(obj)
) for subject, predicate, obj in contexts[context]
]
data.append("INSERT DATA { GRAPH %s { %s } }\n" % (
nts(context.identifier), '\n'.join(triples)))
self._transaction().extend(data)
if self.autocommit:
self.commit()
Expand All @@ -731,11 +739,13 @@ def remove(self, spo, context):
if not obj:
obj = Variable("O")

triple = "%s %s %s ." % (subject.n3(), predicate.n3(), obj.n3())
nts = self.node_to_sparql
triple = "%s %s %s ." % (nts(subject), nts(predicate), nts(obj))
if self._is_contextual(context):
cid = nts(context.identifier)
q = "DELETE { GRAPH %s { %s } } WHERE { GRAPH %s { %s } }" % (
context.identifier.n3(), triple,
context.identifier.n3(), triple)
cid, triple,
cid, triple)
else:
q = "DELETE { %s } WHERE { %s } " % (triple, triple)
self._transaction().append(q)
Expand Down Expand Up @@ -816,7 +826,7 @@ def update(self, query,
v = list(initBindings)
values = "\nVALUES ( %s )\n{ ( %s ) }\n"\
% (" ".join("?" + str(x) for x in v),
" ".join(initBindings[x].n3() for x in v))
" ".join(self.node_to_sparql(initBindings[x]) for x in v))

query = self.where_pattern.sub("WHERE { " + values, query)

Expand All @@ -832,7 +842,11 @@ def _insert_named_graph(self, query, query_graph):
is converted into
"INSERT DATA { GRAPH <urn:graph> { <urn:michel> <urn:likes> <urn:pizza> } }"
"""
graph_block_open = " GRAPH <%s> {" % query_graph
if isinstance(query_graph, Node):
query_graph = self.node_to_sparql(query_graph)
else:
query_graph = '<%s>' % query_graph
graph_block_open = " GRAPH %s {" % query_graph
graph_block_close = "} "

# SPARQL Update supports the following operations:
Expand Down Expand Up @@ -879,12 +893,14 @@ def add_graph(self, graph):
if not self.graph_aware:
Store.add_graph(self, graph)
elif graph.identifier != DATASET_DEFAULT_GRAPH_ID:
self.update("CREATE GRAPH <%s>" % graph.identifier)
self.update(
"CREATE GRAPH %s" % self.node_to_sparql(graph.identifier))

def remove_graph(self, graph):
if not self.graph_aware:
Store.remove_graph(self, graph)
elif graph.identifier == DATASET_DEFAULT_GRAPH_ID:
self.update("DROP DEFAULT")
else:
self.update("DROP GRAPH <%s>" % graph.identifier)
self.update(
"DROP GRAPH %s" % self.node_to_sparql(graph.identifier))

0 comments on commit 77e9b70

Please sign in to comment.