Bladeren bron

Route terms to graphs; many more edits.

Stefano Cossu 7 jaren geleden
bovenliggende
commit
adb7f9009e

+ 1 - 0
lakesuperior/dictionaries/namespaces.py

@@ -23,6 +23,7 @@ core_namespaces = {
     'fcres' : Namespace('info:fcres/'),
     'fcmeta' : Namespace('info:fcmeta/'),
     'fcstate' : Namespace('info:fcstate/'),
+    'fcstruct' : Namespace('info:fcstruct/'),
     'fcsystem' : Namespace('info:fcsystem/'),
     'webac' : Namespace('http://www.w3.org/ns/auth/acl#'),
     'xml' : Namespace('http://www.w3.org/XML/1998/namespace'),

+ 15 - 16
lakesuperior/endpoints/ldp.py

@@ -403,7 +403,7 @@ def tombstone(uid):
     logger.debug('Deleting tombstone for {}.'.format(uid))
     rsrc = Ldpr(uid)
     try:
-        imr = rsrc.imr
+        metadata = rsrc.metadata
     except TombstoneError as e:
         if request.method == 'DELETE':
             if e.uid == uid:
@@ -426,7 +426,7 @@ def tombstone(uid):
         return '', 404
 
 
-def uuid_for_post(parent_uuid=None, slug=None):
+def uuid_for_post(parent_uid=None, slug=None):
     '''
     Validate conditions to perform a POST and return an LDP resource
     UID for using with the `post` method.
@@ -440,37 +440,36 @@ def uuid_for_post(parent_uuid=None, slug=None):
         return uid
 
     # Shortcut!
-    if not slug and not parent_uuid:
+    if not slug and not parent_uid:
         uid = split_if_legacy(str(uuid4()))
 
         return uid
 
-    parent = LdpFactory.from_stored(parent_uuid, repr_opts={'incl_children' : False})
+    parent = LdpFactory.from_stored(parent_uid,
+            repr_opts={'incl_children' : False})
 
     if nsc['fcrepo'].Pairtree in parent.types:
         raise InvalidResourceError(parent.uid,
                 'Resources cannot be created under a pairtree.')
 
     # Set prefix.
-    if parent_uuid:
-        parent_types = { t.identifier for t in \
-                parent.imr.objects(RDF.type) }
-        logger.debug('Parent types: {}'.format(pformat(parent_types)))
-        if nsc['ldp'].Container not in parent_types:
-            raise InvalidResourceError('Parent {} is not a container.'
-                   .format(parent_uuid))
-
-        pfx = parent_uuid + '/'
+    if parent_uid:
+        logger.debug('Parent types: {}'.format(pformat(parent.types)))
+        if nsc['ldp'].Container not in parent.types:
+            raise InvalidResourceError(parent_uid,
+                    'Parent {} is not a container.'.format(parent_uid))
+
+        pfx = parent_uid + '/'
     else:
         pfx = ''
 
     # Create candidate UID and validate.
     if slug:
-        cnd_uuid = pfx + slug
-        if current_app.rdfly.ask_rsrc_exists(nsc['fcres'][cnd_uuid]):
+        cnd_uid = pfx + slug
+        if current_app.rdfly.ask_rsrc_exists(nsc['fcres'][cnd_uid]):
             uid = pfx + split_if_legacy(str(uuid4()))
         else:
-            uid = cnd_uuid
+            uid = cnd_uid
     else:
         uid = pfx + split_if_legacy(str(uuid4()))
 

+ 4 - 4
lakesuperior/model/ldp_rs.py

@@ -48,8 +48,8 @@ class LdpRs(Ldpr):
         '''
         local_update_str = g.tbox.localize_ext_str(update_str, self.urn)
         delta = self._sparql_delta(local_update_str)
-        self._ensure_single_subject_rdf(delta[0], add_fragment=False)
-        self._ensure_single_subject_rdf(delta[1])
+        #self._ensure_single_subject_rdf(delta[0], add_fragment=False)
+        #self._ensure_single_subject_rdf(delta[1])
 
         return self._modify_rsrc(self.RES_UPDATED, *delta)
 
@@ -85,9 +85,9 @@ class LdpRs(Ldpr):
 
         remove_gr, add_gr = self._dedup_deltas(pre_gr, post_gr)
 
-        #self._logger.info('Removing: {}'.format(
+        #self._logger.debug('Removing: {}'.format(
         #    remove_gr.serialize(format='turtle').decode('utf8')))
-        #self._logger.info('Adding: {}'.format(
+        #self._logger.debug('Adding: {}'.format(
         #    add_gr.serialize(format='turtle').decode('utf8')))
 
         remove_gr = self._check_mgd_terms(remove_gr)

+ 57 - 63
lakesuperior/model/ldpr.py

@@ -45,7 +45,8 @@ def atomic(fn):
         else:
             self._logger.info('Committing transaction.')
             if hasattr(self.rdfly.store, '_edits'):
-                self.rdfly.optimize_edits()
+                # @FIXME ugly.
+                self.rdfly._conn.optimize_edits()
             self.rdfly.store.commit()
             for ev in request.changelog:
                 #self._logger.info('Message: {}'.format(pformat(ev)))
@@ -341,15 +342,15 @@ class Ldpr(metaclass=ABCMeta):
         '''
         if not hasattr(self, '_types'):
             #import pdb; pdb.set_trace()
-            if hasattr(self, '_imr') and len(self.imr.graph):
-                imr = self.imr
+            if len(self.metadata.graph):
+                metadata = self.metadata
             elif getattr(self, 'provided_imr', None) and \
                     len(self.provided_imr.graph):
-                imr = self.provided_imr
+                metadata = self.provided_imr
             else:
                 return set()
 
-            self._types = set(imr.graph[self.urn : RDF.type])
+            self._types = set(metadata.graph[self.urn : RDF.type])
 
         return self._types
 
@@ -669,12 +670,12 @@ class Ldpr(metaclass=ABCMeta):
                 self.uid, incl_inbound=True, strict=False)
 
         # Remove resource itself.
-        self.rdfly.modify_dataset(self.uid, {(self.urn, None, None)}, types=None)
+        self.rdfly.modify_rsrc(self.uid, {(self.urn, None, None)}, types=None)
 
         # Remove fragments.
         for frag_urn in imr.graph[
                 : nsc['fcsystem'].fragmentOf : self.urn]:
-            self.rdfly.modify_dataset(
+            self.rdfly.modify_rsrc(
                     self.uid, {(frag_urn, None, None)}, types={})
 
         # Remove snapshots.
@@ -683,7 +684,7 @@ class Ldpr(metaclass=ABCMeta):
                 (snap_urn, None, None),
                 (None, None, snap_urn),
             }
-            self.rdfly.modify_dataset(self.uid, remove_trp, types={})
+            self.rdfly.modify_rsrc(self.uid, remove_trp, types={})
 
         # Remove inbound references.
         if inbound:
@@ -725,20 +726,19 @@ class Ldpr(metaclass=ABCMeta):
                         g.tbox.replace_term_domain(t[0], self.urn, ver_urn),
                         t[1], t[2]))
 
-        self.rdfly.modify_dataset(
+        self.rdfly.modify_rsrc(
                 self.uid, add_trp=ver_add_gr, types={nsc['fcrepo'].Version})
 
         # Add version metadata.
-        meta_add_gr = Graph()
-        meta_add_gr.add((
-            self.urn, nsc['fcrepo'].hasVersion, ver_urn))
-        meta_add_gr.add(
-                (ver_urn, nsc['fcrepo'].created, g.timestamp_term))
-        meta_add_gr.add(
+        add_gr = set()
+        add_gr.add((
+        elf.urn, nsc['fcrepo'].hasVersion, ver_urn))
+        add_gr.add(
+           (ver_urn, nsc['fcrepo'].created, g.timestamp_term))
+        add_gr.add(
                 (ver_urn, nsc['fcrepo'].hasVersionLabel, Literal(ver_uid)))
 
-        self.rdfly.modify_dataset(
-                self.uid, add_trp=meta_add_gr, types={nsc['fcrepo'].Metadata})
+        self.rdfly.modify_rsrc(self.uid, add_trp=add_gr)
 
         # Update resource.
         rsrc_add_gr = Graph()
@@ -751,7 +751,7 @@ class Ldpr(metaclass=ABCMeta):
 
 
     def _modify_rsrc(self, ev_type, remove_trp=set(), add_trp=set(),
-             remove_meta=set(), add_meta=set(), notify=True):
+             notify=True):
         '''
         Low-level method to modify a graph for a single resource.
 
@@ -762,19 +762,16 @@ class Ldpr(metaclass=ABCMeta):
         @param ev_type (string) The type of event (create, update, delete).
         @param remove_trp (set) Triples to be removed.
         @param add_trp (set) Triples to be added.
-        @param remove_meta (set) Metadata triples to be removed.
-        @param add_meta (set) Metadata triples to be added.
         @param notify (boolean) Whether to send a message about the change.
         '''
-        #for trp in [remove_trp, add_trp, remove_meta, add_meta]:
+        #for trp in [remove_trp, add_trp]:
         #    if not isinstance(trp, set):
         #        trp = set(trp)
 
         type = self.types
         actor = self.metadata.value(nsc['fcrepo'].createdBy)
 
-        ret = self.rdfly.modify_dataset(self.uid, remove_trp, add_trp,
-                remove_meta, add_meta)
+        ret = self.rdfly.modify_rsrc(self.uid, remove_trp, add_trp)
 
         if notify and current_app.config.get('messaging'):
             request.changelog.append((set(remove_trp), set(add_trp), {
@@ -787,22 +784,23 @@ class Ldpr(metaclass=ABCMeta):
         return ret
 
 
-    def _ensure_single_subject_rdf(self, gr, add_fragment=True):
-        '''
-        Ensure that a RDF payload for a POST or PUT has a single resource.
-        '''
-        for s in set(gr.subjects()):
-            # Fragment components
-            if '#' in s:
-                parts = s.split('#')
-                frag = s
-                s = URIRef(parts[0])
-                if add_fragment:
-                    # @TODO This is added to the main graph. It should be added
-                    # to the metadata graph.
-                    gr.add((frag, nsc['fcsystem'].fragmentOf, s))
-            if not s == self.urn:
-                raise SingleSubjectError(s, self.uid)
+    # Not used. @TODO Deprecate or reimplement depending on requirements.
+    #def _ensure_single_subject_rdf(self, gr, add_fragment=True):
+    #    '''
+    #    Ensure that a RDF payload for a POST or PUT has a single resource.
+    #    '''
+    #    for s in set(gr.subjects()):
+    #        # Fragment components
+    #        if '#' in s:
+    #            parts = s.split('#')
+    #            frag = s
+    #            s = URIRef(parts[0])
+    #            if add_fragment:
+    #                # @TODO This is added to the main graph. It should be added
+    #                # to the metadata graph.
+    #                gr.add((frag, nsc['fcsystem'].fragmentOf, s))
+    #        if not s == self.urn:
+    #            raise SingleSubjectError(s, self.uid)
 
 
     def _check_ref_int(self, config):
@@ -983,12 +981,9 @@ class Ldpr(metaclass=ABCMeta):
         '''
         rsrc_uri = nsc['fcres'][uid]
 
-        state_trp = {
-            (rsrc_uri, nsc['fcrepo'].contains, nsc['fcres'][child_uid]),
+        add_trp = {
+            (rsrc_uri, nsc['fcsystem'].contains, nsc['fcres'][child_uid]),
             (rsrc_uri, nsc['ldp'].contains, self.urn),
-        }
-
-        meta_trp = {
             (rsrc_uri, RDF.type, nsc['ldp'].Container),
             (rsrc_uri, RDF.type, nsc['ldp'].BasicContainer),
             (rsrc_uri, RDF.type, nsc['ldp'].RDFSource),
@@ -996,13 +991,13 @@ class Ldpr(metaclass=ABCMeta):
             (rsrc_uri, nsc['fcrepo'].hasParent, nsc['fcres'][real_parent_uid]),
         }
 
-        self.rdfly.modify_dataset(
-                uid, add_trp=state_trp, add_meta=meta_trp)
+        self.rdfly.modify_rsrc(
+                uid, add_trp=add_trp)
 
         # If the path segment is just below root
         if '/' not in uid:
-            self.rdfly.modify_dataset(ROOT_UID, add_meta={
-                (ROOT_RSRC_URI, nsc['fcrepo'].contains, nsc['fcres'][uid])
+            self.rdfly.modify_rsrc(ROOT_UID, add_trp={
+                (ROOT_RSRC_URI, nsc['fcsystem'].contains, nsc['fcres'][uid])
             })
 
 
@@ -1013,38 +1008,37 @@ class Ldpr(metaclass=ABCMeta):
         @param cont_rsrc (rdflib.resource.Resouce)  The container resource.
         '''
         cont_p = set(cont_rsrc.metadata.graph.predicates())
-        add_gr = Graph()
+        add_trp = set()
 
         self._logger.info('Checking direct or indirect containment.')
         self._logger.debug('Parent predicates: {}'.format(cont_p))
 
-        add_gr.add((self.urn, nsc['fcrepo'].hasParent, cont_rsrc.urn))
+        add_trp.add((self.urn, nsc['fcrepo'].hasParent, cont_rsrc.urn))
+
         if self.MBR_RSRC_URI in cont_p and self.MBR_REL_URI in cont_p:
             s = g.tbox.localize_term(
-                    cont_rsrc.imr.value(self.MBR_RSRC_URI).identifier)
-            p = cont_rsrc.imr.value(self.MBR_REL_URI).identifier
+                    cont_rsrc.metadata.value(self.MBR_RSRC_URI).identifier)
+            p = cont_rsrc.metadata.value(self.MBR_REL_URI).identifier
 
-            if cont_rsrc.imr[RDF.type : nsc['ldp'].DirectContainer]:
+            if cont_rsrc.metadata[RDF.type : nsc['ldp'].DirectContainer]:
                 self._logger.info('Parent is a direct container.')
 
                 self._logger.debug('Creating DC triples.')
-                add_gr.add((s, p, self.urn))
+                add_trp.add((s, p, self.urn))
 
-            elif cont_rsrc.imr[RDF.type : nsc['ldp'].IndirectContainer] \
+            elif cont_rsrc.metadata[RDF.type : nsc['ldp'].IndirectContainer] \
                    and self.INS_CNT_REL_URI in cont_p:
                 self._logger.info('Parent is an indirect container.')
-                cont_rel_uri = cont_rsrc.imr.value(self.INS_CNT_REL_URI).identifier
-                target_uri = self.provided_imr.value(cont_rel_uri).identifier
+                cont_rel_uri = cont_rsrc.metadata.value(
+                        self.INS_CNT_REL_URI).identifier
+                target_uri = self.provided_metadata.value(
+                        cont_rel_uri).identifier
                 self._logger.debug('Target URI: {}'.format(target_uri))
                 if target_uri:
                     self._logger.debug('Creating IC triples.')
-                    add_gr.add((s, p, target_uri))
+                    add_trp.add((s, p, target_uri))
 
-        if len(add_gr):
-            #add_gr = self._check_mgd_terms(add_gr)
-            #self._logger.debug('Adding DC/IC triples: {}'.format(
-            #    add_gr.serialize(format='turtle').decode('utf-8')))
-            self._modify_rsrc(self.RES_UPDATED, add_trp=add_gr)
+        self._modify_rsrc(self.RES_UPDATED, add_trp=add_trp)
 
 
     def _send_event_msg(self, remove_trp, add_trp, metadata):

+ 138 - 38
lakesuperior/store_layouts/ldp_rs/rsrc_centric_layout.py

@@ -1,7 +1,6 @@
 import logging
 
 from copy import deepcopy
-from pprint import pformat
 from urllib.parse import quote
 
 import requests
@@ -18,27 +17,121 @@ from lakesuperior.dictionaries.namespaces import ns_mgr as nsm
 from lakesuperior.dictionaries.namespaces import ns_pfx_sparql
 from lakesuperior.exceptions import (InvalidResourceError, InvalidTripleError,
         ResourceNotExistsError, TombstoneError)
-from lakesuperior.store_layouts.ldp_rs.base_rdf_layout import BaseRdfLayout
 from lakesuperior.model.ldpr import ROOT_UID, ROOT_GRAPH_URI, ROOT_RSRC_URI
 
 
-class RsrcCentricLayout(BaseRdfLayout):
+class RsrcCentricLayout:
     '''
-    Resource-centric layout.
+    This class exposes an interface to build graph store layouts. It also
+    provides the basics of the triplestore connection.
 
-    See http://patterns.dataincubator.org/book/graph-per-resource.html
-    This implementation places each resource and its fragments within a named
-    graph. Version snapshots are also stored in individual graphs and are named
-    related in a metadata graph.
+    Some store layouts are provided. New ones aimed at specific uses
+    and optimizations of the repository may be developed by extending this
+    class and implementing all its abstract methods.
 
-    This layout is best used not with a connector that uses RDFlib but rather
-    with one that employs a direct interaction with the Graph Store Protocol,
-    either via HTTP or, ideally, using native API bindings.
+    A layout is implemented via application configuration. However, once
+    contents are ingested in a repository, changing a layout will most likely
+    require a migration.
+
+    The custom layout must be in the lakesuperior.store_layouts.rdf
+    package and the class implementing the layout must be called
+    `StoreLayout`. The module name is the one defined in the app
+    configuration.
+
+    E.g. if the configuration indicates `simple_layout` the application will
+    look for
+    `lakesuperior.store_layouts.rdf.simple_layout.SimpleLayout`.
+
+    Some method naming conventions:
+
+    - Methods starting with `get_` return a resource.
+    - Methods starting with `list_` return an iterable or generator of URIs.
+    - Methods starting with `select_` return an iterable or generator with
+      table-like data such as from a SELECT statement.
+    - Methods starting with `ask_` return a boolean value.
     '''
+
     _logger = logging.getLogger(__name__)
 
     META_GRAPH_URI = nsc['fcsystem'].meta
 
+    attr_map = {
+        nsc['fcmeta']: {
+            # List of metadata predicates. Triples bearing one of these
+            # predicates will go in the metadata graph.
+            'p': {
+                nsc['fcrepo'].created,
+                nsc['fcrepo'].createdBy,
+                nsc['fcrepo'].lastModified,
+                nsc['fcrepo'].lastModifiedBy,
+                nsc['premis'].hasMessageDigest,
+            },
+            # List of metadata RDF types. Triples bearing one of these types in
+            # the object will go in the metadata graph.
+            't': {
+                nsc['fcrepo'].Binary,
+                nsc['fcrepo'].Container,
+                nsc['fcrepo'].Pairtree,
+                nsc['ldp'].BasicContainer,
+                nsc['ldp'].Container,
+                nsc['ldp'].DirectContainer,
+                nsc['ldp'].IndirectContainer,
+                nsc['ldp'].NonRDFSource,
+                nsc['ldp'].RDFSource,
+                nsc['ldp'].Resource,
+            },
+        },
+        nsc['fcstruct']: {
+            # These are placed in a separate graph for optimization purposees.
+            'p': {
+                nsc['fcrepo'].hasParent,
+                nsc['fcsystem'].contains,
+                nsc['ldp'].contains,
+            }
+        },
+    }
+
+
+    ## MAGIC METHODS ##
+
+    def __init__(self, conn, config):
+        '''Initialize the graph store and a layout.
+
+        NOTE: `rdflib.Dataset` requires a RDF 1.1 compliant store with support
+        for Graph Store HTTP protocol
+        (https://www.w3.org/TR/sparql11-http-rdf-update/). Blazegraph supports
+        this only in the (currently unreleased) 2.2 branch. It works with Jena,
+        which is currently the reference implementation.
+        '''
+        self.config = config
+        self._conn = conn
+        self.store = self._conn.store
+
+        #self.UNION_GRAPH_URI = self._conn.UNION_GRAPH_URI
+        self.ds = self._conn.ds
+        self.ds.namespace_manager = nsm
+
+
+    @property
+    def attr_routes(self):
+        '''
+        This is a map that allows specific triples to go to certain graphs.
+        It is a machine-friendly version of the static attribute `attr_map`
+        which is formatted for human readability and to avoid repetition.
+        The attributes not mapped here (usually user-provided triples with no
+        special meaning to the application) go to the `fcstate:` graph.
+        '''
+        if not hasattr(self, '_attr_routes'):
+            self._attr_routes = {'p': {}, 't': {}}
+            for dest in self.attr_map.keys():
+                for term_k, terms in self.attr_map[dest].items():
+                    self._attr_routes[term_k].update(
+                            {term: dest for term in terms})
+
+        return self._attr_routes
+
+
+
     def bootstrap(self):
         '''
         Delete all graphs and insert the basic triples.
@@ -74,7 +167,13 @@ class RsrcCentricLayout(BaseRdfLayout):
         # Include and/or embed children.
         embed_children_trp = embed_children_qry = ''
         if incl_srv_mgd and incl_children:
-            incl_children_qry = ''
+            incl_children_qry = '''
+            UNION {
+              GRAPH ?strg {
+                ?str_s ?str_p ?str_o .
+              }
+            }
+            '''
 
             # Embed children.
             if embed_children:
@@ -86,12 +185,13 @@ class RsrcCentricLayout(BaseRdfLayout):
                 }}
                 '''.format(embed_children_trp)
         else:
-            incl_children_qry = '\nFILTER ( ?p != ldp:contains )' \
+            incl_children_qry = ''
 
         q = '''
         CONSTRUCT {{
             ?meta_s ?meta_p ?meta_o .
             ?s ?p ?o .{inb_cnst}
+            ?str_s ?str_p ?str_o .
             {embed_chld_t}
             #?s fcrepo:writable true .
         }}
@@ -100,9 +200,10 @@ class RsrcCentricLayout(BaseRdfLayout):
             GRAPH ?mg {{
               ?meta_s ?meta_p ?meta_o .
             }}
-          }} UNION {{
+          }}{incl_chld}{embed_chld}
+          UNION {{
             GRAPH ?sg {{
-              ?s ?p ?o .{inb_qry}{incl_chld}{embed_chld}
+              ?s ?p ?o .{inb_qry}
             }}
           }}{inb_qry}
         }}
@@ -113,9 +214,9 @@ class RsrcCentricLayout(BaseRdfLayout):
                 )
 
         mg = ROOT_GRAPH_URI if uid == '' else nsc['fcmeta'][uid]
-        #import pdb; pdb.set_trace()
+        strg = ROOT_GRAPH_URI if uid == '' else nsc['fcstruct'][uid]
         try:
-            qres = self.ds.query(q, initBindings={'mg': mg,
+            qres = self.ds.query(q, initBindings={'mg': mg, 'strg': strg,
                 'sg': self._state_uri(uid, ver_uid)})
         except ResultException:
             # RDFlib bug: https://github.com/RDFLib/rdflib/issues/775
@@ -218,7 +319,7 @@ class RsrcCentricLayout(BaseRdfLayout):
         Create a new resource or replace an existing one.
         '''
         sg_uri = self._state_uri(uid)
-        mg_uri = self._meta_uri(uid)
+        mg_uri = ROOT_GRAPH_URI if uid == '' else nsc['fcmeta'][uid]
         if ver_uid:
             ver_uri = self._state_uri(uid, ver_uid)
             drop_qry = 'MOVE SILENT {sg} TO {vg};\n'.format(
@@ -235,22 +336,17 @@ class RsrcCentricLayout(BaseRdfLayout):
         mg += metadata
 
 
-    def modify_dataset(self, uid, remove_trp=set(), add_trp=set(),
-            remove_meta=set(), add_meta=set(), **kwargs):
+    def modify_rsrc(self, uid, remove_trp=set(), add_trp=set()):
         '''
         See base_rdf_layout.update_rsrc.
         '''
-        gr = self.ds.graph(self._state_uri(uid))
-        if len(remove_trp):
-            gr -= remove_trp
-        if len(add_trp):
-            gr += add_trp
+        for t in remove_trp:
+            target_gr = self.ds.graph(self._map_graph_uri(t, uid))
+            target_gr.remove(t)
 
-        meta_gr = self.ds.graph(self._meta_uri(uid))
-        if len(remove_meta):
-            gr -= remove_meta
-        if len(add_meta):
-            gr += add_meta
+        for t in add_trp:
+            target_gr = self.ds.graph(self._map_graph_uri(t, uid))
+            target_gr.add(t)
 
 
     ## PROTECTED MEMBERS ##
@@ -279,12 +375,16 @@ class RsrcCentricLayout(BaseRdfLayout):
             return nsc['fcmeta'][uid]
 
 
-    def optimize_edits(self):
-        opt_edits = [
-                l for l in self.store._edits
-                if not l.startswith('PREFIX')]
-        #opt_edits = list(ns_pfx_sparql.values()) + opt_edits
-        self.store._edits = opt_edits
-        self._logger.debug('Changes to be committed: {}'.format(
-            pformat(self.store._edits)))
+    def _map_graph_uri(self, t, uid):
+        '''
+        Map a triple to a namespace prefix corresponding to a graph.
+        '''
+        if not uid:
+            return ROOT_GRAPH_URI
 
+        if t[1] in self.attr_routes['p'].keys():
+            return self.attr_routes['p'][t[1]][uid]
+        elif t[1] == RDF.type and t[2] in self.attr_routes['t'].keys():
+            return self.attr_routes['t'][t[2]][uid]
+        else:
+            return nsc['fcstate'][uid]

+ 11 - 0
lakesuperior/store_layouts/ldp_rs/sparql_connector.py

@@ -1,6 +1,7 @@
 import logging
 
 from abc import ABCMeta
+from pprint import pformat
 
 from rdflib import Dataset
 from rdflib.term import URIRef
@@ -45,3 +46,13 @@ class SparqlConnector(BaseConnector):
             self.readonly = True
 
         self.ds = Dataset(self.store, default_union=True)
+
+
+    def optimize_edits(self):
+        opt_edits = [
+                l for l in self.store._edits
+                if not l.startswith('PREFIX')]
+        #opt_edits = list(ns_pfx_sparql.values()) + opt_edits
+        self.store._edits = opt_edits
+        self._logger.debug('Changes to be committed: {}'.format(
+            pformat(self.store._edits)))