Browse Source

Large overhaul mostly in support of better resource deletion and
messaging.

* Move most methods out of BaseRdfLayout into Ldpr.
* Deleting tombstone and additional testing are missing.

Stefano Cossu 7 years ago
parent
commit
fc07a7628c

+ 20 - 7
doc/notes/fcrepo4_deltas.md

@@ -24,7 +24,7 @@ results in `/rest/8c/9a/07/4e/8c9a074e-dda3-5256-ea30-eec2dd4fcf61` being
 created.
 created.
 
 
 The same request in LAKEsuperior would create
 The same request in LAKEsuperior would create
-`rest/8c9a074e-dda3-5256-ea30-eec2dd4fcf61` (obviously the identifiers will be
+`/rest/8c9a074e-dda3-5256-ea30-eec2dd4fcf61` (obviously the identifiers will be
 different).
 different).
 
 
 ## Explicit intermediate paths
 ## Explicit intermediate paths
@@ -63,7 +63,7 @@ while leaving the other server-managed triples when retrieving a resource:
 
 
     Prefer: return=representation; [include | omit]="http://fedora.info/definitions/v4/repository#Children"
     Prefer: return=representation; [include | omit]="http://fedora.info/definitions/v4/repository#Children"
 
 
-The default is `include`.
+The default behavior is including all children URIs.
 
 
 ## Automatic LDP class assignment
 ## Automatic LDP class assignment
 
 
@@ -80,7 +80,7 @@ Container?)
 
 
 FCREPO4 relies on the `/fcr:metadata` identifier to retrieve RDF metadata about
 FCREPO4 relies on the `/fcr:metadata` identifier to retrieve RDF metadata about
 an LDP-NR. LAKEsuperior supports this as a legacy option, but encourages the
 an LDP-NR. LAKEsuperior supports this as a legacy option, but encourages the
-use of content negotiation to do that. Any request to an LDP-NR with an
+use of content negotiation to do the same. Any request to an LDP-NR with an
 `Accept` header set to one of the supported RDF serialization formats will
 `Accept` header set to one of the supported RDF serialization formats will
 yield the RDF metadata of the resource instead of the binary contents.
 yield the RDF metadata of the resource instead of the binary contents.
 
 
@@ -93,10 +93,23 @@ Allowed` regardless of whether the tombstone exists or not.
 LAKEsuperior will return `405` only if the tombstone actually exists, `404`
 LAKEsuperior will return `405` only if the tombstone actually exists, `404`
 otherwise.
 otherwise.
 
 
-## Asynchronous processing
+## Atomicity
 
 
-*TODO*
+FCREPO4 supports batch atomic operations whereas a transaction can be opened
+and a number of operations (i.e. multiple R/W requests to the repository) can
+be performed. The operations are persisted in the repository only if and when
+the transaction is committed.
 
 
-The server may reply with a 202 if the `Prefer` header is set to
-`respond-async`.
+LAKesuperior only supports atomicity for a single LDP request. I.e. a single
+HTTTP request that should reult in multiple write operations to the storage
+layer is only persisted if no exception is thrown. Otherwise, the operation is
+rolled back in order to prevent resources to be left in an inconsistent state.
+
+## Web UI
+
+FCREPO4 includes a web UI for simple CRUD operations.
+
+Such a UI is not foreseen to be built in LAKEsuperior any time soon since the
+API interaction leaves a greater degree of flexibility. In addition, the
+underlying triplestore layer may provide a UI for complex RDF queries.
 
 

+ 6 - 6
etc.skeleton/application.yml

@@ -30,14 +30,14 @@ store:
         # If set to false, properties are allowed to point to resources in the
         # If set to false, properties are allowed to point to resources in the
         # repositoy that do not exist. Also, if a resource is deleted, inbound
         # repositoy that do not exist. Also, if a resource is deleted, inbound
         # relationships may not be cleaned up.
         # relationships may not be cleaned up.
-        # This can be one of `none`, `lenient` or `strict`. `none` does not
-        # check for referential integrity. `lenient` quietly drops a
-        # user-provided triple if its # object violates referential integrity.
+        # This can be one of `False` (boolean), `lenient` or `strict`. `False`
+        # does not check for referential integrity. `lenient` quietly drops a
+        # user-provided triple if its object violates referential integrity.
         # `strict` raises an exception.
         # `strict` raises an exception.
         referential_integrity: lenient
         referential_integrity: lenient
-        webroot: http://localhost:9999/namespace/fcrepo/
-        query_ep: sparql
-        update_ep: sparql
+        webroot: http://localhost:3030/fcrepo
+        query_ep: query
+        update_ep: update
         # Optional
         # Optional
         #username: <set me>
         #username: <set me>
         #password: <set me>
         #password: <set me>

+ 33 - 0
lakesuperior/app.py

@@ -1,17 +1,28 @@
 import logging
 import logging
 import os
 import os
 
 
+from importlib import import_module
 from logging.config import dictConfig
 from logging.config import dictConfig
 
 
 from flask import Flask
 from flask import Flask
 
 
 from lakesuperior.endpoints.ldp import ldp
 from lakesuperior.endpoints.ldp import ldp
 from lakesuperior.endpoints.query import query
 from lakesuperior.endpoints.query import query
+from lakesuperior.toolbox import Toolbox
 
 
 
 
 # App factory.
 # App factory.
 
 
 def create_app(app_conf, logging_conf):
 def create_app(app_conf, logging_conf):
+    '''
+    App factory.
+
+    Create a Flask app with a given configuration and initialize persistent
+    connections.
+
+    @param app_conf (dict) Configuration parsed from `application.yml` file.
+    @param logging_conf (dict) Logging configuration from `logging.yml` file.
+    '''
     app = Flask(__name__)
     app = Flask(__name__)
     app.config.update(app_conf)
     app.config.update(app_conf)
 
 
@@ -30,6 +41,28 @@ def create_app(app_conf, logging_conf):
     })
     })
     app.register_blueprint(query, url_prefix='/query')
     app.register_blueprint(query, url_prefix='/query')
 
 
+    # Initialize RDF and file store.
+    def load_layout(type):
+        layout_cls = app_conf['store'][type]['layout']
+        store_mod = import_module('lakesuperior.store_layouts.{0}.{1}'.format(
+                type, layout_cls))
+        layout_cls = getattr(store_mod, camelcase(layout_cls))
+
+        return layout_cls(app_conf['store'][type])
+
+    app.rdfly = load_layout('ldp_rs')
+    app.nonrdfly = load_layout('ldp_nr')
+
     return app
     return app
 
 
 
 
+def camelcase(word):
+    '''
+    Convert a string with underscores with a camel-cased one.
+
+    Ripped from https://stackoverflow.com/a/6425628
+    '''
+    return ''.join(x.capitalize() or '_' for x in word.split('_'))
+
+
+

+ 13 - 8
lakesuperior/endpoints/ldp.py

@@ -3,13 +3,14 @@ import logging
 from collections import defaultdict
 from collections import defaultdict
 from uuid import uuid4
 from uuid import uuid4
 
 
-from flask import Blueprint, g, request, send_file, url_for
+from flask import Blueprint, current_app, g, request, send_file, url_for
 from rdflib import Graph
 from rdflib import Graph
 from werkzeug.datastructures import FileStorage
 from werkzeug.datastructures import FileStorage
 
 
-from lakesuperior.exceptions import InvalidResourceError, \
-        ResourceExistsError, ResourceNotExistsError, ServerManagedTermError, \
-        TombstoneError
+from lakesuperior.exceptions import (
+    InvalidResourceError, ResourceExistsError, ResourceNotExistsError,
+    ServerManagedTermError, TombstoneError
+)
 from lakesuperior.model.ldpr import Ldpr
 from lakesuperior.model.ldpr import Ldpr
 from lakesuperior.model.ldp_nr import LdpNr
 from lakesuperior.model.ldp_nr import LdpNr
 from lakesuperior.model.ldp_rs import Ldpc, LdpDc, LdpIc, LdpRs
 from lakesuperior.model.ldp_rs import Ldpc, LdpDc, LdpIc, LdpRs
@@ -92,7 +93,7 @@ def get_resource(uuid, force_rdf=False):
             repr_options = prefer['return']
             repr_options = prefer['return']
 
 
     try:
     try:
-        rsrc = Ldpr.readonly_inst(uuid, repr_options)
+        rsrc = Ldpr.inst(uuid, repr_options)
     except ResourceNotExistsError as e:
     except ResourceNotExistsError as e:
         return str(e), 404
         return str(e), 404
     except TombstoneError as e:
     except TombstoneError as e:
@@ -240,10 +241,14 @@ def delete_resource(uuid):
     Delete a resource.
     Delete a resource.
     '''
     '''
     headers = std_headers
     headers = std_headers
-    rsrc = Ldpc(uuid)
 
 
+    # If referential integrity is enforced, grab all inbound relationships
+    # to break them.
+    repr_opts = {'parameters' : {'include' : Ldpr.RETURN_INBOUND_REF_URI}} \
+            if current_app.config['store']['ldp_rs']['referential_integrity'] \
+            else None
     try:
     try:
-        rsrc.delete()
+        Ldpr.inst(uuid, repr_opts).delete()
     except ResourceNotExistsError as e:
     except ResourceNotExistsError as e:
         return str(e), 404
         return str(e), 404
     except TombstoneError as e:
     except TombstoneError as e:
@@ -261,7 +266,7 @@ def tombstone(uuid):
     The only allowed method is DELETE; any other verb will return a 405.
     The only allowed method is DELETE; any other verb will return a 405.
     '''
     '''
     logger.debug('Deleting tombstone for {}.'.format(uuid))
     logger.debug('Deleting tombstone for {}.'.format(uuid))
-    rsrc = Ldpr(uuid, {'value' : 'minimal'})
+    rsrc = Ldpr(uuid, repr_opts={'value' : 'minimal'})
     try:
     try:
         imr = rsrc.imr
         imr = rsrc.imr
     except TombstoneError as e:
     except TombstoneError as e:

+ 9 - 19
lakesuperior/model/ldp_nr.py

@@ -20,18 +20,6 @@ class LdpNr(Ldpr):
         nsc['ldp'].NonRDFSource,
         nsc['ldp'].NonRDFSource,
     }
     }
 
 
-
-    @property
-    def nonrdfly(self):
-        '''
-        Load non-RDF (binary) store layout.
-        '''
-        if not hasattr(self, '_nonrdfly'):
-            self._nonrdfly = __class__.load_layout('non_rdf')
-
-        return self._nonrdfly
-
-
     @property
     @property
     def filename(self):
     def filename(self):
         return self.imr.value(nsc['ebucore'].filename)
         return self.imr.value(nsc['ebucore'].filename)
@@ -61,14 +49,16 @@ class LdpNr(Ldpr):
         file_uuid = self.nonrdfly.persist(stream)
         file_uuid = self.nonrdfly.persist(stream)
 
 
         # Gather RDF metadata.
         # Gather RDF metadata.
+        self.provided_imr = Resource(Graph(), self.urn)
+        for t in self.base_types:
+            self.provided_imr.add(RDF.type, t)
         self._add_metadata(stream, digest=file_uuid, mimetype=mimetype,
         self._add_metadata(stream, digest=file_uuid, mimetype=mimetype,
                 disposition=disposition)
                 disposition=disposition)
 
 
         # Try to persist metadata. If it fails, delete the file.
         # Try to persist metadata. If it fails, delete the file.
-        self._logger.debug('Persisting LDP-NR triples in {}'.format(
-            self.urn))
+        self._logger.debug('Persisting LDP-NR triples in {}'.format(self.urn))
         try:
         try:
-            rsrc = self._create_rsrc(self.imr)
+            rsrc = self._create_rsrc()
         except:
         except:
             self.nonrdfly.delete(file_uuid)
             self.nonrdfly.delete(file_uuid)
         else:
         else:
@@ -93,19 +83,19 @@ class LdpNr(Ldpr):
         '''
         '''
         # File size.
         # File size.
         self._logger.debug('Data stream size: {}'.format(stream.limit))
         self._logger.debug('Data stream size: {}'.format(stream.limit))
-        self.stored_or_new_imr.set(nsc['premis'].hasSize, Literal(stream.limit))
+        self.provided_imr.set(nsc['premis'].hasSize, Literal(stream.limit))
 
 
         # Checksum.
         # Checksum.
         cksum_term = URIRef('urn:sha1:{}'.format(digest))
         cksum_term = URIRef('urn:sha1:{}'.format(digest))
-        self.imr.set(nsc['premis'].hasMessageDigest, cksum_term)
+        self.provided_imr.set(nsc['premis'].hasMessageDigest, cksum_term)
 
 
         # MIME type.
         # MIME type.
-        self.imr.set(nsc['ebucore']['hasMimeType'], Literal(mimetype))
+        self.provided_imr.set(nsc['ebucore']['hasMimeType'], Literal(mimetype))
 
 
         # File name.
         # File name.
         self._logger.debug('Disposition: {}'.format(disposition))
         self._logger.debug('Disposition: {}'.format(disposition))
         try:
         try:
-            self.imr.set(nsc['ebucore']['filename'], Literal(
+            self.provided_imr.set(nsc['ebucore']['filename'], Literal(
                     disposition['attachment']['parameters']['filename']))
                     disposition['attachment']['parameters']['filename']))
         except KeyError:
         except KeyError:
             pass
             pass

+ 17 - 17
lakesuperior/model/ldp_rs.py

@@ -48,7 +48,7 @@ class LdpRs(Ldpr):
 
 
         Perform a POST action after a valid resource URI has been found.
         Perform a POST action after a valid resource URI has been found.
         '''
         '''
-        return self._create_or_update_rsrc(data, format, handling,
+        return self._create_or_replace_rsrc(data, format, handling,
                 create_only=True)
                 create_only=True)
 
 
 
 
@@ -57,7 +57,7 @@ class LdpRs(Ldpr):
         '''
         '''
         https://www.w3.org/TR/ldp/#ldpr-HTTP_PUT
         https://www.w3.org/TR/ldp/#ldpr-HTTP_PUT
         '''
         '''
-        return self._create_or_update_rsrc(data, format, handling)
+        return self._create_or_replace_rsrc(data, format, handling)
 
 
 
 
     @transactional
     @transactional
@@ -77,7 +77,7 @@ class LdpRs(Ldpr):
 
 
     ## PROTECTED METHODS ##
     ## PROTECTED METHODS ##
 
 
-    def _create_or_update_rsrc(self, data, format, handling,
+    def _create_or_replace_rsrc(self, data, format, handling,
             create_only=False):
             create_only=False):
         '''
         '''
         Create or update a resource. PUT and POST methods, which are almost
         Create or update a resource. PUT and POST methods, which are almost
@@ -102,7 +102,7 @@ class LdpRs(Ldpr):
         create = create_only or not self.is_stored
         create = create_only or not self.is_stored
         self._add_srv_mgd_triples(create)
         self._add_srv_mgd_triples(create)
         self._ensure_single_subject_rdf(self.provided_imr.graph)
         self._ensure_single_subject_rdf(self.provided_imr.graph)
-        ref_int = self.rdfly.conf['referential_integrity']
+        ref_int = self.rdfly.config['referential_integrity']
         if ref_int:
         if ref_int:
             self._check_ref_int(ref_int)
             self._check_ref_int(ref_int)
 
 
@@ -203,28 +203,28 @@ class LdpRs(Ldpr):
         modified. If a server-managed term is present in the query but does not
         modified. If a server-managed term is present in the query but does not
         cause any change in the updated resource, no error is raised.
         cause any change in the updated resource, no error is raised.
 
 
-        @return tuple Remove and add triples. These can be used with
-        `BaseStoreLayout.update_resource` and/or recorded as separate events in
-        a provenance tracking system.
+        @return tuple(rdflib.Graph) Remove and add graphs. These can be used
+        with `BaseStoreLayout.update_resource` and/or recorded as separate
+        events in a provenance tracking system.
         '''
         '''
-
         pre_g = self.imr.graph
         pre_g = self.imr.graph
 
 
         post_g = deepcopy(pre_g)
         post_g = deepcopy(pre_g)
         post_g.update(q)
         post_g.update(q)
 
 
-        remove = pre_g - post_g
-        add = post_g - pre_g
+        #remove = pre_g - post_g
+        #add = post_g - pre_g
+        remove_g, add_g = self._dedup_deltas(pre_g, post_g)
 
 
-        self._logger.info('Removing: {}'.format(
-            remove.serialize(format='turtle').decode('utf8')))
-        self._logger.info('Adding: {}'.format(
-            add.serialize(format='turtle').decode('utf8')))
+        #self._logger.info('Removing: {}'.format(
+        #    remove_g.serialize(format='turtle').decode('utf8')))
+        #self._logger.info('Adding: {}'.format(
+        #    add_g.serialize(format='turtle').decode('utf8')))
 
 
-        remove = self._check_mgd_terms(remove, handling)
-        add = self._check_mgd_terms(add, handling)
+        remove_g = self._check_mgd_terms(remove_g, handling)
+        add_g = self._check_mgd_terms(add_g, handling)
 
 
-        return remove, add
+        return remove_g, add_g
 
 
 
 
     def _ensure_single_subject_rdf(self, g):
     def _ensure_single_subject_rdf(self, g):

+ 206 - 178
lakesuperior/model/ldpr.py

@@ -2,7 +2,6 @@ import logging
 
 
 from abc import ABCMeta
 from abc import ABCMeta
 from collections import defaultdict
 from collections import defaultdict
-from importlib import import_module
 from itertools import accumulate
 from itertools import accumulate
 from uuid import uuid4
 from uuid import uuid4
 
 
@@ -12,13 +11,14 @@ from flask import current_app
 from rdflib import Graph
 from rdflib import Graph
 from rdflib.resource import Resource
 from rdflib.resource import Resource
 from rdflib.namespace import RDF, XSD
 from rdflib.namespace import RDF, XSD
+from rdflib.term import URIRef, Literal
 
 
 from lakesuperior.dictionaries.namespaces import ns_collection as nsc
 from lakesuperior.dictionaries.namespaces import ns_collection as nsc
 from lakesuperior.dictionaries.srv_mgd_terms import  srv_mgd_subjects, \
 from lakesuperior.dictionaries.srv_mgd_terms import  srv_mgd_subjects, \
         srv_mgd_predicates, srv_mgd_types
         srv_mgd_predicates, srv_mgd_types
 from lakesuperior.exceptions import InvalidResourceError, \
 from lakesuperior.exceptions import InvalidResourceError, \
         ResourceNotExistsError, ServerManagedTermError
         ResourceNotExistsError, ServerManagedTermError
-from lakesuperior.store_layouts.rdf.base_rdf_layout import BaseRdfLayout
+from lakesuperior.store_layouts.ldp_rs.base_rdf_layout import BaseRdfLayout
 from lakesuperior.toolbox import Toolbox
 from lakesuperior.toolbox import Toolbox
 
 
 
 
@@ -119,6 +119,135 @@ class Ldpr(metaclass=ABCMeta):
     _logger = logging.getLogger(__name__)
     _logger = logging.getLogger(__name__)
 
 
 
 
+    ## STATIC & CLASS METHODS ##
+
+    @classmethod
+    def inst(cls, uuid, repr_opts=None):
+        '''
+        Factory method that creates and returns an instance of an LDPR subclass
+        based on information that needs to be queried from the underlying
+        graph store.
+
+        N.B. The resource must exist.
+
+        @param uuid UUID of the instance.
+        '''
+        imr_urn = nsc['fcres'][uuid] if uuid else cls.ROOT_NODE_URN
+        cls._logger.debug('Representation options: {}'.format(repr_opts))
+        imr_opts = cls.set_imr_options(repr_opts)
+        imr = current_app.rdfly.extract_imr(imr_urn, **imr_opts)
+        rdf_types = set(imr.objects(RDF.type))
+
+        for t in rdf_types:
+            cls._logger.debug('Checking RDF type: {}'.format(t.identifier))
+            if t.identifier == cls.LDP_NR_TYPE:
+                from lakesuperior.model.ldp_nr import LdpNr
+                cls._logger.info('Resource is a LDP-NR.')
+                return LdpNr(uuid, repr_opts)
+            if t.identifier == cls.LDP_RS_TYPE:
+                from lakesuperior.model.ldp_rs import LdpRs
+                cls._logger.info('Resource is a LDP-RS.')
+                return LdpRs(uuid, repr_opts)
+
+        raise ResourceNotExistsError(uuid)
+
+
+    @classmethod
+    def inst_for_post(cls, parent_uuid=None, slug=None):
+        '''
+        Validate conditions to perform a POST and return an LDP resource
+        instancefor using with the `post` method.
+
+        This may raise an exception resulting in a 404 if the parent is not
+        found or a 409 if the parent is not a valid container.
+        '''
+        # Shortcut!
+        if not slug and not parent_uuid:
+            return cls(str(uuid4()))
+
+        parent = cls(parent_uuid, repr_opts={
+            'parameters' : {'omit' : cls.RETURN_CHILD_RES_URI}
+        })
+
+        # Set prefix.
+        if parent_uuid:
+            parent_types = { t.identifier for t in \
+                    parent.imr.objects(RDF.type) }
+            cls._logger.debug('Parent types: {}'.format(
+                    parent_types))
+            if nsc['ldp'].Container not in parent_types:
+                raise InvalidResourceError('Parent {} is not a container.'
+                       .format(parent_uuid))
+
+            pfx = parent_uuid + '/'
+        else:
+            pfx = ''
+
+        # Create candidate UUID and validate.
+        if slug:
+            cnd_uuid = pfx + slug
+            cnd_rsrc = Resource(current_app.rdfly.ds, nsc['fcres'][cnd_uuid])
+            if current_app.rdfly.ask_rsrc_exists(cnd_rsrc.identifier):
+                return cls(pfx + str(uuid4()))
+            else:
+                return cls(cnd_uuid)
+        else:
+            return cls(pfx + str(uuid4()))
+
+
+    @classmethod
+    def set_imr_options(cls, repr_opts):
+        '''
+        Set options to retrieve IMR.
+
+        Ideally, IMR retrieval is done once per request, so all the options
+        are set once in the `imr()` property.
+
+        @param repr_opts (dict): Options parsed from `Prefer` header.
+        '''
+        cls._logger.debug('Setting retrieval options from: {}'.format(repr_opts))
+        imr_options = {}
+
+        if repr_opts.setdefault('value') == 'minimal':
+            imr_options = {
+                'embed_children' : False,
+                'incl_children' : False,
+                'incl_inbound' : False,
+                'incl_srv_mgd' : False,
+            }
+        else:
+            # Default.
+            imr_options = {
+                'embed_children' : False,
+                'incl_children' : True,
+                'incl_inbound' : False,
+                'incl_srv_mgd' : True,
+            }
+
+            # Override defaults.
+            if 'parameters' in repr_opts:
+                include = repr_opts['parameters']['include'].split(' ') \
+                        if 'include' in repr_opts['parameters'] else []
+                omit = repr_opts['parameters']['omit'].split(' ') \
+                        if 'omit' in repr_opts['parameters'] else []
+
+                cls._logger.debug('Include: {}'.format(include))
+                cls._logger.debug('Omit: {}'.format(omit))
+
+                if str(cls.EMBED_CHILD_RES_URI) in include:
+                        imr_options['embed_children'] = True
+                if str(cls.RETURN_CHILD_RES_URI) in omit:
+                        imr_options['incl_children'] = False
+                if str(cls.RETURN_INBOUND_REF_URI) in include:
+                        imr_options['incl_inbound'] = True
+                if str(cls.RETURN_SRV_MGD_RES_URI) in omit:
+                        imr_options['incl_srv_mgd'] = False
+
+        cls._logger.debug('Retrieval options: {}'.format(imr_options))
+
+        return imr_options
+
+
     ## MAGIC METHODS ##
     ## MAGIC METHODS ##
 
 
     def __init__(self, uuid, repr_opts={}):
     def __init__(self, uuid, repr_opts={}):
@@ -132,27 +261,15 @@ class Ldpr(metaclass=ABCMeta):
         @param uuid (string) UUID of the resource. If None (must be explicitly
         @param uuid (string) UUID of the resource. If None (must be explicitly
         set) it refers to the root node.
         set) it refers to the root node.
         '''
         '''
-        self.rdf_store_layout = current_app.config['store']['ldp_rs']['layout']
-        self.non_rdf_store_layout = \
-                current_app.config['store']['ldp_nr']['layout']
-
         self.uuid = uuid
         self.uuid = uuid
-        self.urn = nsc['fcres'][uuid] if self.uuid is not None \
-                else self.ROOT_NODE_URN
+        self.urn = nsc['fcres'][uuid] if self.uuid else self.ROOT_NODE_URN
         self.uri = Toolbox().uuid_to_uri(self.uuid)
         self.uri = Toolbox().uuid_to_uri(self.uuid)
 
 
-        self._imr_options = __class__.set_imr_options(repr_opts)
+        self.repr_opts = repr_opts
+        self._imr_options = __class__.set_imr_options(self.repr_opts)
 
 
-
-    @property
-    def rdfly(self):
-        '''
-        Load RDF store layout.
-        '''
-        if not hasattr(self, '_rdfly'):
-            self._rdfly = __class__.load_layout('rdf')
-
-        return self._rdfly
+        self.rdfly = current_app.rdfly
+        self.nonrdfly = current_app.nonrdfly
 
 
 
 
     @property
     @property
@@ -255,7 +372,7 @@ class Ldpr(metaclass=ABCMeta):
         @return set(rdflib.term.URIRef)
         @return set(rdflib.term.URIRef)
         '''
         '''
         if not hasattr(self, '_types'):
         if not hasattr(self, '_types'):
-            self._types = set(self.imr[RDF.type])
+            self._types = self.imr.graph[self.imr.identifier : RDF.type]
 
 
         return self._types
         return self._types
 
 
@@ -267,162 +384,11 @@ class Ldpr(metaclass=ABCMeta):
         @return set(rdflib.term.URIRef)
         @return set(rdflib.term.URIRef)
         '''
         '''
         if not hasattr(self, '_ldp_types'):
         if not hasattr(self, '_ldp_types'):
-            self._ldp_types = set()
-            for t in self.types:
-                if t.qname()[:4] == 'ldp:':
-                    self._ldp_types.add(t)
+            self._ldp_types = { t for t in self.types if t[:4] == 'ldp:' }
 
 
         return self._ldp_types
         return self._ldp_types
 
 
 
 
-    ## STATIC & CLASS METHODS ##
-
-    @classmethod
-    def load_layout(cls, type):
-        '''
-        Dynamically load the store layout indicated in the configuration.
-
-        @param type (string) One of `rdf` or `non_rdf`. Determines the type of
-        layout to be loaded.
-        '''
-        layout_cls = getattr(cls(None), '{}_store_layout'.format(type))
-        store_mod = import_module('lakesuperior.store_layouts.{0}.{1}'.format(
-                type, layout_cls))
-        layout_cls = getattr(store_mod, Toolbox().camelcase(layout_cls))
-
-        return layout_cls()
-
-
-    @classmethod
-    def readonly_inst(cls, uuid, repr_opts=None):
-        '''
-        Factory method that creates and returns an instance of an LDPR subclass
-        based on information that needs to be queried from the underlying
-        graph store.
-
-        This is used with retrieval methods for resources that already exist.
-
-        @param uuid UUID of the instance.
-        '''
-        rdfly = cls.load_layout('rdf')
-        imr_urn = nsc['fcres'][uuid] if uuid else cls.ROOT_NODE_URN
-        cls._logger.debug('Representation options: {}'.format(repr_opts))
-        imr_opts = cls.set_imr_options(repr_opts)
-        imr = rdfly.extract_imr(imr_urn, **imr_opts)
-        rdf_types = imr.objects(RDF.type)
-
-        for t in rdf_types:
-            cls._logger.debug('Checking RDF type: {}'.format(t.identifier))
-            if t.identifier == cls.LDP_NR_TYPE:
-                from lakesuperior.model.ldp_nr import LdpNr
-                cls._logger.info('Resource is a LDP-NR.')
-                return LdpNr(uuid, repr_opts)
-            if t.identifier == cls.LDP_RS_TYPE:
-                from lakesuperior.model.ldp_rs import LdpRs
-                cls._logger.info('Resource is a LDP-RS.')
-                return LdpRs(uuid, repr_opts)
-
-        raise ResourceNotExistsError(uuid)
-
-
-    @classmethod
-    def inst_for_post(cls, parent_uuid=None, slug=None):
-        '''
-        Validate conditions to perform a POST and return an LDP resource
-        instancefor using with the `post` method.
-
-        This may raise an exception resulting in a 404 if the parent is not
-        found or a 409 if the parent is not a valid container.
-        '''
-        # Shortcut!
-        if not slug and not parent_uuid:
-            return cls(str(uuid4()))
-
-        rdfly = cls.load_layout('rdf')
-
-        parent = cls(parent_uuid, repr_opts={
-            'parameters' : {'omit' : cls.RETURN_CHILD_RES_URI}
-        })
-
-        # Set prefix.
-        if parent_uuid:
-            parent_types = { t.identifier for t in \
-                    parent.imr.objects(RDF.type) }
-            cls._logger.debug('Parent types: {}'.format(
-                    parent_types))
-            if nsc['ldp'].Container not in parent_types:
-                raise InvalidResourceError('Parent {} is not a container.'
-                       .format(parent_uuid))
-
-            pfx = parent_uuid + '/'
-        else:
-            pfx = ''
-
-        # Create candidate UUID and validate.
-        if slug:
-            cnd_uuid = pfx + slug
-            cnd_rsrc = Resource(rdfly.ds, nsc['fcres'][cnd_uuid])
-            if rdfly.ask_rsrc_exists(cnd_rsrc.identifier):
-                return cls(pfx + str(uuid4()))
-            else:
-                return cls(cnd_uuid)
-        else:
-            return cls(pfx + str(uuid4()))
-
-
-    @classmethod
-    def set_imr_options(cls, repr_opts):
-        '''
-        Set options to retrieve IMR.
-
-        Ideally, IMR retrieval is done once per request, so all the options
-        are set once in the `imr()` property.
-
-        @param repr_opts (dict): Options parsed from `Prefer` header.
-        '''
-        cls._logger.debug('Setting retrieval options from: {}'.format(repr_opts))
-        imr_options = {}
-
-        if 'value' in repr_opts and repr_opts['value'] == 'minimal':
-            imr_options = {
-                'embed_children' : False,
-                'incl_children' : False,
-                'incl_inbound' : False,
-                'incl_srv_mgd' : False,
-            }
-        else:
-            # Default.
-            imr_options = {
-                'embed_children' : False,
-                'incl_children' : True,
-                'incl_inbound' : False,
-                'incl_srv_mgd' : True,
-            }
-
-            # Override defaults.
-            if 'parameters' in repr_opts:
-                include = repr_opts['parameters']['include'].split(' ') \
-                        if 'include' in repr_opts['parameters'] else []
-                omit = repr_opts['parameters']['omit'].split(' ') \
-                        if 'omit' in repr_opts['parameters'] else []
-
-                cls._logger.debug('Include: {}'.format(include))
-                cls._logger.debug('Omit: {}'.format(omit))
-
-                if str(cls.EMBED_CHILD_RES_URI) in include:
-                        imr_options['embed_children'] = True
-                if str(cls.RETURN_CHILD_RES_URI) in omit:
-                        imr_options['incl_children'] = False
-                if str(cls.RETURN_INBOUND_REF_URI) in include:
-                        imr_options['incl_inbound'] = True
-                if str(cls.RETURN_SRV_MGD_RES_URI) in omit:
-                        imr_options['incl_srv_mgd'] = False
-
-        cls._logger.debug('Retrieval options: {}'.format(imr_options))
-
-        return imr_options
-
-
     ## LDP METHODS ##
     ## LDP METHODS ##
 
 
     def head(self):
     def head(self):
@@ -444,7 +410,7 @@ class Ldpr(metaclass=ABCMeta):
 
 
         for t in self.ldp_types:
         for t in self.ldp_types:
             out_headers['Link'].append(
             out_headers['Link'].append(
-                    '{};rel="type"'.format(t.identifier.n3()))
+                    '{};rel="type"'.format(t.n3()))
 
 
         return out_headers
         return out_headers
 
 
@@ -467,11 +433,30 @@ class Ldpr(metaclass=ABCMeta):
 
 
     @transactional
     @transactional
     @must_exist
     @must_exist
-    def delete(self):
+    def delete(self, inbound=True, delete_children=True):
         '''
         '''
         https://www.w3.org/TR/ldp/#ldpr-HTTP_DELETE
         https://www.w3.org/TR/ldp/#ldpr-HTTP_DELETE
+
+        @param inbound (boolean) If specified, delete all inbound relationships
+        as well. This is the default and is always the case if referential
+        integrity is enforced by configuration.
+        @param delete_children (boolean) Whether to delete all child resources.
+        This is the default.
         '''
         '''
-        return self.rdfly.delete_rsrc(self.urn)
+        refint = current_app.config['store']['ldp_rs']['referential_integrity']
+        inbound = True if refint else inbound
+
+        children = self.imr[nsc['ldp'].contains * '+'] \
+                if delete_children else []
+
+        ret = self._delete_rsrc(inbound)
+
+        for child_uri in children:
+            child_rsrc = Ldpr.inst(
+                Toolbox().uri_to_uuid(child_uri.identifier), self.repr_opts)
+            child_rsrc._delete_rsrc(inbound, tstone_pointer=self.uri)
+
+        return ret
 
 
 
 
     @transactional
     @transactional
@@ -489,7 +474,7 @@ class Ldpr(metaclass=ABCMeta):
         Create a new resource by comparing an empty graph with the provided
         Create a new resource by comparing an empty graph with the provided
         IMR graph.
         IMR graph.
         '''
         '''
-        self.rdfly.modify_dataset(Graph(), self.provided_imr.graph)
+        self.rdfly.modify_dataset(add_trp=self.provided_imr.graph)
 
 
         return self.RES_CREATED
         return self.RES_CREATED
 
 
@@ -505,12 +490,44 @@ class Ldpr(metaclass=ABCMeta):
         for p in self.protected_pred:
         for p in self.protected_pred:
             self.imr.remove(p)
             self.imr.remove(p)
 
 
-        self.rdfly.modify_dataset(self.imr.graph, self.provided_imr.graph)
+        delta = self._dedup_deltas(self.imr.graph, self.provided_imr.graph)
+        self.rdfly.modify_dataset(*delta)
 
 
         # Reset the IMR because it has changed.
         # Reset the IMR because it has changed.
         delattr(self, 'imr')
         delattr(self, 'imr')
 
 
-        return self.RES_CREATED
+        return self.RES_UPDATED
+
+
+    def _delete_rsrc(self, inbound, tstone_pointer=None):
+        '''
+        Delete a single resource and create a tombstone.
+
+        @param inbound (boolean) Whether to delete the inbound relationships.
+        @param tstone_pointer (URIRef) If set to a URI, this creates a pointer
+        to the tombstone of the resource that used to contain the deleted
+        resource. Otherwise the delete resource becomes a tombstone.
+        '''
+        self._logger.info('Removing resource {}'.format(self.urn))
+
+        remove_trp = set(self.imr.graph)
+        add_trp = set()
+
+        if tstone_pointer:
+            add_trp.add((self.urn, nsc['fcsystem'].tombstone, tstone_pointer))
+        else:
+            ts = Literal(arrow.utcnow(), datatype=XSD.dateTime)
+            add_trp.add((self.urn, RDF.type, nsc['fcsystem'].Tombstone))
+            add_trp.add((self.urn, nsc['fcrepo'].created, ts))
+
+
+        if inbound:
+            for ib_rsrc_uri in self.imr.graph.subjects(None, self.urn):
+                remove_trp.add((ib_rsrc_uri, None, self.urn))
+
+        self.rdfly.modify_dataset(remove_trp, add_trp)
+
+        return self.RES_DELETED
 
 
 
 
     def _set_containment_rel(self):
     def _set_containment_rel(self):
@@ -573,6 +590,17 @@ class Ldpr(metaclass=ABCMeta):
         return None
         return None
 
 
 
 
+    def _dedup_deltas(self, remove_g, add_g):
+        '''
+        Remove duplicate triples from add and remove delta graphs, which would
+        otherwise contain unnecessary statements that annul each other.
+        '''
+        return (
+            remove_g - add_g,
+            add_g - remove_g
+        )
+
+
     def _create_path_segment(self, uri, child_uri):
     def _create_path_segment(self, uri, child_uri):
         '''
         '''
         Create a path segment with a non-LDP containment statement.
         Create a path segment with a non-LDP containment statement.

+ 3 - 3
lakesuperior/store_layouts/non_rdf/base_non_rdf_layout.py → lakesuperior/store_layouts/ldp_nr/base_non_rdf_layout.py

@@ -17,12 +17,12 @@ class BaseNonRdfLayout(metaclass=ABCMeta):
     _logger = logging.getLogger(__name__)
     _logger = logging.getLogger(__name__)
 
 
 
 
-    def __init__(self):
+    def __init__(self, config):
         '''
         '''
         Initialize the base non-RDF store layout.
         Initialize the base non-RDF store layout.
         '''
         '''
-        self.conf = current_app.config['store']['ldp_nr']
-        self.root = self.conf['path']
+        self.config = config
+        self.root = config['path']
 
 
 
 
     ## INTERFACE METHODS ##
     ## INTERFACE METHODS ##

+ 3 - 3
lakesuperior/store_layouts/non_rdf/default_layout.py → lakesuperior/store_layouts/ldp_nr/default_layout.py

@@ -3,7 +3,7 @@ import os
 from hashlib import sha1
 from hashlib import sha1
 from uuid import uuid4
 from uuid import uuid4
 
 
-from lakesuperior.store_layouts.non_rdf.base_non_rdf_layout import \
+from lakesuperior.store_layouts.ldp_nr.base_non_rdf_layout import \
         BaseNonRdfLayout
         BaseNonRdfLayout
 
 
 class DefaultLayout(BaseNonRdfLayout):
 class DefaultLayout(BaseNonRdfLayout):
@@ -79,8 +79,8 @@ class DefaultLayout(BaseNonRdfLayout):
         checksum.
         checksum.
         '''
         '''
         self._logger.debug('Generating path from uuid: {}'.format(uuid))
         self._logger.debug('Generating path from uuid: {}'.format(uuid))
-        bl = self.conf['pairtree_branch_length']
-        bc = self.conf['pairtree_branches']
+        bl = self.config['pairtree_branch_length']
+        bc = self.config['pairtree_branches']
         term = len(uuid) if bc==0 else min(bc*bl, len(uuid))
         term = len(uuid) if bc==0 else min(bc*bl, len(uuid))
 
 
         path = [ uuid[i:i+bl] for i in range(0, term, bl) ]
         path = [ uuid[i:i+bl] for i in range(0, term, bl) ]

+ 44 - 114
lakesuperior/store_layouts/rdf/base_rdf_layout.py → lakesuperior/store_layouts/ldp_rs/base_rdf_layout.py

@@ -12,7 +12,7 @@ from lakesuperior.dictionaries.namespaces import ns_collection as nsc
 from lakesuperior.dictionaries.namespaces import ns_mgr as nsm
 from lakesuperior.dictionaries.namespaces import ns_mgr as nsm
 from lakesuperior.exceptions import ResourceNotExistsError
 from lakesuperior.exceptions import ResourceNotExistsError
 from lakesuperior.messaging.messenger import Messenger
 from lakesuperior.messaging.messenger import Messenger
-from lakesuperior.store_layouts.rdf.graph_store_connector import \
+from lakesuperior.store_layouts.ldp_rs.graph_store_connector import \
         GraphStoreConnector
         GraphStoreConnector
 from lakesuperior.toolbox import Toolbox
 from lakesuperior.toolbox import Toolbox
 
 
@@ -57,7 +57,7 @@ class BaseRdfLayout(metaclass=ABCMeta):
 
 
     ## MAGIC METHODS ##
     ## MAGIC METHODS ##
 
 
-    def __init__(self):
+    def __init__(self, config):
         '''Initialize the graph store and a layout.
         '''Initialize the graph store and a layout.
 
 
         NOTE: `rdflib.Dataset` requires a RDF 1.1 compliant store with support
         NOTE: `rdflib.Dataset` requires a RDF 1.1 compliant store with support
@@ -66,12 +66,10 @@ class BaseRdfLayout(metaclass=ABCMeta):
         this only in the (currently unreleased) 2.2 branch. It works with Jena,
         this only in the (currently unreleased) 2.2 branch. It works with Jena,
         which is currently the reference implementation.
         which is currently the reference implementation.
         '''
         '''
-        self.conf = current_app.config['store']['ldp_rs']
+        self.config = config
         self._conn = GraphStoreConnector(
         self._conn = GraphStoreConnector(
-                query_ep=self.conf['webroot'] + self.conf['query_ep'],
-                update_ep=self.conf['webroot'] + self.conf['update_ep'])
-
-        self._msg = Messenger(current_app.config['messaging'])
+                query_ep=config['webroot'] + config['query_ep'],
+                update_ep=config['webroot'] + config['update_ep'])
 
 
 
 
     @property
     @property
@@ -93,55 +91,44 @@ class BaseRdfLayout(metaclass=ABCMeta):
 
 
     ## PUBLIC METHODS ##
     ## PUBLIC METHODS ##
 
 
-    def create_or_replace_rsrc(self, imr):
-        '''Create a resource graph in the main graph if it does not exist.
+    #def create_or_replace_rsrc(self, imr):
+    #    '''Create a resource graph in the main graph if it does not exist.
 
 
-        If it exists, replace the existing one retaining the creation date.
-        '''
-        if self.ask_rsrc_exists(imr.identifier):
-            self._logger.info(
-                    'Resource {} exists. Removing all outbound triples.'
-                    .format(imr.identifier))
-            ev_type = self.replace_rsrc(imr)
-        else:
-            ev_type = self.create_rsrc(imr)
-
-        #self._msg.send(
-        #    imr.identifier,
-        #    ev_type,
-        #    time=imr.value(nsc['fcrepo'].lastModified),
-        #    type=list(imr.graph.objects(imr.identifier, RDF.type)),
-        #    data=imr.graph,
-        #    metadata={
-        #        'actor' : imr.value(nsc['fcrepo'].lastModifiedBy),
-        #    }
-        #)
-
-        return ev_type
-
-
-    def delete_rsrc(self, urn, inbound=True, delete_children=True):
-        '''
-        Delete a resource and optionally its children.
+    #    If it exists, replace the existing one retaining the creation date.
+    #    '''
+    #    if self.ask_rsrc_exists(imr.identifier):
+    #        self._logger.info(
+    #                'Resource {} exists. Removing all outbound triples.'
+    #                .format(imr.identifier))
+    #        ev_type = self.replace_rsrc(imr)
+    #    else:
+    #        ev_type = self.create_rsrc(imr)
 
 
-        @param urn (rdflib.term.URIRef) URN of the resource to be deleted.
-        @param inbound (boolean) If specified, delete all inbound relationships
-        as well (this is the default).
-        @param delete_children (boolean) Whether to delete all child resources.
-        This is normally true.
-        '''
-        inbound = inbound if self.conf['referential_integrity'] == 'none' \
-                else True
-        rsrc = self.ds.resource(urn)
-        children = rsrc[nsc['ldp'].contains * '+'] if delete_children else []
+    #    return ev_type
+
+
+    #def delete_rsrc(self, urn, inbound=True, delete_children=True):
+    #    '''
+    #    Delete a resource and optionally its children.
 
 
-        self._do_delete_rsrc(rsrc, inbound)
+    #    @param urn (rdflib.term.URIRef) URN of the resource to be deleted.
+    #    @param inbound (boolean) If specified, delete all inbound relationships
+    #    as well (this is the default).
+    #    @param delete_children (boolean) Whether to delete all child resources.
+    #    This is normally true.
+    #    '''
+    #    inbound = inbound if self.config['referential_integrity'] == 'none' \
+    #            else True
+    #    rsrc = self.ds.resource(urn)
+    #    children = rsrc[nsc['ldp'].contains * '+'] if delete_children else []
 
 
-        for child_rsrc in children:
-            self._do_delete_rsrc(child_rsrc, inbound)
-            self.leave_tombstone(child_rsrc.identifier, urn)
+    #    self._do_delete_rsrc(rsrc, inbound)
 
 
-        return self.leave_tombstone(urn)
+    #    for child_rsrc in children:
+    #        self._do_delete_rsrc(child_rsrc, inbound)
+    #        self.leave_tombstone(child_rsrc.identifier, urn)
+
+    #    return self.leave_tombstone(urn)
 
 
 
 
     ## INTERFACE METHODS ##
     ## INTERFACE METHODS ##
@@ -190,79 +177,22 @@ class BaseRdfLayout(metaclass=ABCMeta):
         pass
         pass
 
 
 
 
-    @abstractmethod
-    def create_rsrc(self, imr):
-        '''Create a resource graph in the main graph.
-
-        If the resource exists, raise an exception.
-        '''
-        pass
-
-
-    @abstractmethod
-    def replace_rsrc(self, imr):
-        '''Replace a resource, i.e. delete all the triples and re-add the
-        ones provided.
-
-        @param g (rdflib.Graph) Graph to load. It must not contain
-        `fcrepo:created` and `fcrepo:createdBy`.
-        '''
-        pass
-
-
     @abstractmethod
     @abstractmethod
     def modify_dataset(self, remove_trp, add_trp):
     def modify_dataset(self, remove_trp, add_trp):
         '''
         '''
         Adds and/or removes triples from the graph.
         Adds and/or removes triples from the graph.
 
 
+        This is a crucial point for messaging. Any write operation on the RDF
+        store that needs to be notified must be performed by invoking this
+        method.
+
         NOTE: This is not specific to a resource. The LDP layer is responsible
         NOTE: This is not specific to a resource. The LDP layer is responsible
         for checking that all the +/- triples are referring to the intended
         for checking that all the +/- triples are referring to the intended
         subject(s).
         subject(s).
 
 
-        @param remove (rdflib.Graph) Triples to be removed.
-        @param add (rdflib.Graph) Triples to be added.
+        @param remove_trp (Iterable) Triples to be removed. This can be a graph
+        @param add_trp (Iterable) Triples to be added. This can be a graph.
         '''
         '''
         pass
         pass
 
 
 
 
-    @abstractmethod
-    def leave_tombstone(self, urn, parent_urn=None):
-        '''
-        Leave a tombstone when deleting a resource.
-
-        If a parent resource is specified, a pointer to the parent's tombstone
-        is added instead.
-
-        @param urn (rdflib.term.URIRef) URN of the deleted resource.
-        @param parent_urn (rdflib.term.URIRef) URI of deleted parent.
-        '''
-        pass
-
-
-    @abstractmethod
-    def delete_tombstone(self, rsrc):
-        '''
-        Delete a tombstone.
-
-        This means removing the `fcsystem:Tombstone` RDF type and the tombstone
-        creation date, as well as all inbound `fcsystem:tombstone`
-        relationships.
-
-        NOTE: This method should NOT indiscriminately wipe all triples about
-        the subject. Some other metadata may be left for some good reason.
-
-        NOTE: This operation does not emit a message.
-        '''
-        pass
-
-
-    @abstractmethod
-    def _do_delete_rsrc(self, rsrc, inbound):
-        '''
-        Delete a single resource.
-
-        @param rsrc (rdflib.resource.Resource) Resource to be deleted.
-        @param inbound (boolean) Whether to delete the inbound relationships.
-        '''
-        pass
-

+ 0 - 0
lakesuperior/store_layouts/rdf/full_provenance_layout.py → lakesuperior/store_layouts/ldp_rs/full_provenance_layout.py


+ 0 - 1
lakesuperior/store_layouts/rdf/graph_store_connector.py → lakesuperior/store_layouts/ldp_rs/graph_store_connector.py

@@ -48,7 +48,6 @@ class GraphStoreConnector:
 
 
         @return rdflib.query.Result
         @return rdflib.query.Result
         '''
         '''
-        self._logger.debug('Sending SPARQL query: {}'.format(q))
         return self.ds.query(q, initBindings=initBindings, initNs=nsc)
         return self.ds.query(q, initBindings=initBindings, initNs=nsc)
 
 
 
 

+ 5 - 73
lakesuperior/store_layouts/rdf/simple_layout.py → lakesuperior/store_layouts/ldp_rs/simple_layout.py

@@ -14,7 +14,7 @@ from lakesuperior.dictionaries.srv_mgd_terms import  srv_mgd_subjects, \
         srv_mgd_predicates, srv_mgd_types
         srv_mgd_predicates, srv_mgd_types
 from lakesuperior.exceptions import InvalidResourceError, \
 from lakesuperior.exceptions import InvalidResourceError, \
         ResourceNotExistsError, TombstoneError
         ResourceNotExistsError, TombstoneError
-from lakesuperior.store_layouts.rdf.base_rdf_layout import BaseRdfLayout
+from lakesuperior.store_layouts.ldp_rs.base_rdf_layout import BaseRdfLayout
 from lakesuperior.toolbox import Toolbox
 from lakesuperior.toolbox import Toolbox
 
 
 
 
@@ -90,7 +90,7 @@ class SimpleLayout(BaseRdfLayout):
             raise TombstoneError(
             raise TombstoneError(
                     Toolbox().uri_to_uuid(
                     Toolbox().uri_to_uuid(
                             rsrc.value(nsc['fcsystem'].tombstone).identifier),
                             rsrc.value(nsc['fcsystem'].tombstone).identifier),
-                    tombstone_rsrc.value(nsc['fcrepo'].created))
+                    rsrc.value(nsc['fcrepo'].created))
 
 
         return rsrc
         return rsrc
 
 
@@ -105,50 +105,12 @@ class SimpleLayout(BaseRdfLayout):
             's' : urn})
             's' : urn})
 
 
 
 
-    def create_rsrc(self, imr):
-        '''
-        See base_rdf_layout.create_rsrc.
-        '''
-        self._logger.debug('Creating resource:\n{}'.format(
-            imr.graph.serialize(format='turtle').decode('utf8')))
-        #self.ds |= imr.graph # This does not seem to work with datasets.
-        for t in imr.graph:
-            self.ds.add(t)
-
-        return self.RES_CREATED
-
-
-    def replace_rsrc(self, imr):
-        '''
-        See base_rdf_layout.replace_rsrc.
-        '''
-        rsrc = self.rsrc(imr.identifier)
-
-        # Delete the stored triples but spare the protected predicates.
-        del_trp_qry = []
-        for p in rsrc.predicates():
-            if p.identifier not in self.protected_pred:
-                self._logger.debug('Removing {}'.format(p.identifier))
-                rsrc.remove(p.identifier)
-            else:
-                self._logger.debug('NOT Removing {}'.format(p))
-                imr.remove(p.identifier)
-
-        #self.ds |= imr.graph # This does not seem to work with datasets.
-        for t in imr.graph:
-            self.ds.add(t)
-
-        return self.RES_UPDATED
-
-
-    def modify_dataset(self, remove_trp, add_trp):
+    def modify_dataset(self, remove_trp=[], add_trp=[]):
         '''
         '''
         See base_rdf_layout.update_rsrc.
         See base_rdf_layout.update_rsrc.
         '''
         '''
-        self._logger.debug('Remove triples: {}'.format(
-                remove_trp.serialize(format='turtle').decode('utf-8')))
-        self._logger.debug('Add triples: {}'.format(
-                add_trp.serialize(format='turtle').decode('utf-8')))
+        self._logger.debug('Remove graph: {}'.format(set(remove_trp)))
+        self._logger.debug('Add graph: {}'.format(set(add_trp)))
 
 
         for t in remove_trp:
         for t in remove_trp:
             self.ds.remove(t)
             self.ds.remove(t)
@@ -156,36 +118,6 @@ class SimpleLayout(BaseRdfLayout):
             self.ds.add(t)
             self.ds.add(t)
 
 
 
 
-    ## PROTECTED METHODS ##
-
-    def _do_delete_rsrc(self, rsrc, inbound):
-        '''
-        See BaseRdfLayout._do_delete_rsrc
-        '''
-        urn = rsrc.identifier
-        print('Removing resource {}.'.format(urn))
-
-        rsrc.remove(Variable('p'))
-
-        if inbound:
-            self.ds.remove((Variable('s'), Variable('p'), rsrc.identifier))
-
-        return urn
-
-
-    def leave_tombstone(self, urn, parent_urn=None):
-        '''
-        See BaseRdfLayout.leave_tombstone
-        '''
-        if parent_urn:
-            self.ds.add((urn, nsc['fcsystem'].tombstone, parent_urn))
-        else:
-            # @TODO Use gunicorn to get request timestamp.
-            ts = Literal(arrow.utcnow(), datatype=XSD.dateTime)
-            self.ds.add((urn, RDF.type, nsc['fcsystem'].Tombstone))
-            self.ds.add((urn, nsc['fcrepo'].created, ts))
-
-
     def delete_tombstone(self, urn):
     def delete_tombstone(self, urn):
         '''
         '''
         See BaseRdfLayout.leave_tombstone
         See BaseRdfLayout.leave_tombstone

+ 1 - 10
lakesuperior/toolbox.py

@@ -27,15 +27,6 @@ class Toolbox:
         self.base_url = request.host_url + g.url_prefix
         self.base_url = request.host_url + g.url_prefix
 
 
 
 
-    def camelcase(self, word):
-        '''
-        Convert a string with underscores with a camel-cased one.
-
-        Ripped from https://stackoverflow.com/a/6425628
-        '''
-        return ''.join(x.capitalize() or '_' for x in word.split('_'))
-
-
     def uuid_to_uri(self, uuid):
     def uuid_to_uri(self, uuid):
         '''Convert a UUID to a URI.
         '''Convert a UUID to a URI.
 
 
@@ -52,7 +43,7 @@ class Toolbox:
         @return string
         @return string
         '''
         '''
         if uri == self.ROOT_NODE_URN:
         if uri == self.ROOT_NODE_URN:
-            return ''
+            return None
         elif uri.startswith(nsc['fcres']):
         elif uri.startswith(nsc['fcres']):
             return str(uri).replace(nsc['fcres'], '')
             return str(uri).replace(nsc['fcres'], '')
         else:
         else:

+ 5 - 3
tests/endpoints/test_ldp.py

@@ -76,9 +76,11 @@ class TestLdp:
         PUT a resource with binary payload and verify checksums.
         PUT a resource with binary payload and verify checksums.
         '''
         '''
         rnd_img['content'].seek(0)
         rnd_img['content'].seek(0)
-        self.client.put('/ldp/ldpnr01', data=rnd_img['content'], headers={
-                'Content-Disposition' : 'attachment; filename={}'.format(
-                rnd_img['filename'])})
+        resp = self.client.put('/ldp/ldpnr01', data=rnd_img['content'],
+                headers={
+                    'Content-Disposition' : 'attachment; filename={}'.format(
+                    rnd_img['filename'])})
+        assert resp.status_code == 201
 
 
         resp = self.client.get('/ldp/ldpnr01', headers={'accept' : 'image/png'})
         resp = self.client.get('/ldp/ldpnr01', headers={'accept' : 'image/png'})
         assert resp.status_code == 200
         assert resp.status_code == 200

+ 8 - 8
tests/test_toolbox.py

@@ -17,13 +17,13 @@ class TestToolbox:
     '''
     '''
     Unit tests for toolbox methods.
     Unit tests for toolbox methods.
     '''
     '''
-    def test_camelcase(self, tb):
-        '''
-        Test conversion from underscore notation to camelcase.
-        '''
-        assert tb.camelcase('test_input_string') == 'TestInputString'
-        assert tb.camelcase('_test_input_string') == '_TestInputString'
-        assert tb.camelcase('test__input__string') == 'Test_Input_String'
+    #def test_camelcase(self, tb):
+    #    '''
+    #    Test conversion from underscore notation to camelcase.
+    #    '''
+    #    assert tb.camelcase('test_input_string') == 'TestInputString'
+    #    assert tb.camelcase('_test_input_string') == '_TestInputString'
+    #    assert tb.camelcase('test__input__string') == 'Test_Input_String'
 
 
 
 
     def test_uuid_to_uri(self, tb):
     def test_uuid_to_uri(self, tb):
@@ -36,7 +36,7 @@ class TestToolbox:
         assert tb.uri_to_uuid(URIRef(tb.base_url) + '/test01/test02') == \
         assert tb.uri_to_uuid(URIRef(tb.base_url) + '/test01/test02') == \
                 'test01/test02'
                 'test01/test02'
         assert tb.uri_to_uuid(URIRef(tb.base_url)) == ''
         assert tb.uri_to_uuid(URIRef(tb.base_url)) == ''
-        assert tb.uri_to_uuid(nsc['fcsystem'].root) == ''
+        assert tb.uri_to_uuid(nsc['fcsystem'].root) == None
         assert tb.uri_to_uuid(nsc['fcres']['1234']) == '1234'
         assert tb.uri_to_uuid(nsc['fcres']['1234']) == '1234'
         assert tb.uri_to_uuid(nsc['fcres']['1234/5678']) == '1234/5678'
         assert tb.uri_to_uuid(nsc['fcres']['1234/5678']) == '1234/5678'
 
 

+ 8 - 6
util/bootstrap.py

@@ -7,15 +7,17 @@ sys.path.append('.')
 
 
 from lakesuperior.app import create_app
 from lakesuperior.app import create_app
 from lakesuperior.config_parser import config
 from lakesuperior.config_parser import config
-from lakesuperior.store_layouts.rdf.graph_store_connector import \
+from lakesuperior.store_layouts.ldp_rs.graph_store_connector import \
         GraphStoreConnector
         GraphStoreConnector
 from lakesuperior.model.ldpr import Ldpr
 from lakesuperior.model.ldpr import Ldpr
 
 
-# This script will parse configuration files and initialize a filesystem and
-# triplestore with an empty FCREPO repository.
-# It is used in test suites and on a first run.
-#
-# Additional, scaffolding files may be parsed to create initial contents.
+__doc__ = '''
+This script will parse configuration files and initialize a filesystem and
+triplestore with an empty FCREPO repository.
+It is used in test suites and on a first run.
+
+Additional, scaffolding files may be parsed to create initial contents.
+'''
 
 
 
 
 def bootstrap_db(app):
 def bootstrap_db(app):