瀏覽代碼

Clean up docstrings.

Stefano Cossu 7 年之前
父節點
當前提交
5603b73102

+ 3 - 3
lakesuperior/api/query.py

@@ -18,10 +18,10 @@ def sparql_query(qry_str, fmt):
     Send a SPARQL query to the triplestore.
     Send a SPARQL query to the triplestore.
 
 
     :param str qry_str: SPARQL query string. SPARQL 1.1 Query Language
     :param str qry_str: SPARQL query string. SPARQL 1.1 Query Language
-    (https://www.w3.org/TR/sparql11-query/) is supported.
+        (https://www.w3.org/TR/sparql11-query/) is supported.
     :param str fmt: Serialization format. This varies depending on the
     :param str fmt: Serialization format. This varies depending on the
-    query type (SELECT, ASK, CONSTRUCT, etc.). [@TODO Add reference to RDFLib
-    serialization formats]
+        query type (SELECT, ASK, CONSTRUCT, etc.). [TODO Add reference to
+        RDFLib serialization formats]
 
 
     :rtype: BytesIO
     :rtype: BytesIO
     :return: Serialized SPARQL results.
     :return: Serialized SPARQL results.

+ 23 - 21
lakesuperior/api/resource.py

@@ -156,8 +156,8 @@ def get(uid, repr_options={}):
 
 
     :param string uid: Resource UID.
     :param string uid: Resource UID.
     :param  repr_options: (dict(bool)) Representation options. This is a dict
     :param  repr_options: (dict(bool)) Representation options. This is a dict
-    that is unpacked downstream in the process. The default empty dict results
-    in default values. The accepted dict keys are:
+        that is unpacked downstream in the process. The default empty dict
+        results in default values. The accepted dict keys are:
 
 
     - incl_inbound: include inbound references. Default: False.
     - incl_inbound: include inbound references. Default: False.
     - incl_children: include children URIs. Default: True.
     - incl_children: include children URIs. Default: True.
@@ -188,7 +188,7 @@ def get_version(uid, ver_uid):
 
 
 @transaction(True)
 @transaction(True)
 def create(parent, slug, **kwargs):
 def create(parent, slug, **kwargs):
-    """
+    r"""
     Mint a new UID and create a resource.
     Mint a new UID and create a resource.
 
 
     The UID is computed from a given parent UID and a "slug", a proposed path
     The UID is computed from a given parent UID and a "slug", a proposed path
@@ -196,13 +196,13 @@ def create(parent, slug, **kwargs):
     path but it may use a different one if a conflict with an existing resource
     path but it may use a different one if a conflict with an existing resource
     arises.
     arises.
 
 
-    :param string parent: UID of the parent resource.
-    :param string slug: Tentative path relative to the parent UID.
-    :param **kwargs: Other parameters are passed to the
-    LdpFactory.from_provided method. Please see the documentation for that
-    method for explanation of individual parameters.
+    :param str parent: UID of the parent resource.
+    :param str slug: Tentative path relative to the parent UID.
+    :param \*\*kwargs: Other parameters are passed to the
+      :meth:`LdpFactory.from_provided` method.
 
 
-    @return string UID of the new resource.
+    :rtype: str
+    :return: UID of the new resource.
     """
     """
     uid = LdpFactory.mint_uid(parent, slug)
     uid = LdpFactory.mint_uid(parent, slug)
     logger.debug('Minted UID for new resource: {}'.format(uid))
     logger.debug('Minted UID for new resource: {}'.format(uid))
@@ -215,7 +215,7 @@ def create(parent, slug, **kwargs):
 
 
 @transaction(True)
 @transaction(True)
 def create_or_replace(uid, stream=None, **kwargs):
 def create_or_replace(uid, stream=None, **kwargs):
-    """
+    r"""
     Create or replace a resource with a specified UID.
     Create or replace a resource with a specified UID.
 
 
     If the resource already exists, all user-provided properties of the
     If the resource already exists, all user-provided properties of the
@@ -225,12 +225,12 @@ def create_or_replace(uid, stream=None, **kwargs):
 
 
     :param string uid: UID of the resource to be created or updated.
     :param string uid: UID of the resource to be created or updated.
     :param BytesIO stream: Content stream. If empty, an empty container is
     :param BytesIO stream: Content stream. If empty, an empty container is
-    created.
-    :param **kwargs: Other parameters are passed to the
-    LdpFactory.from_provided method. Please see the documentation for that
-    method for explanation of individual parameters.
+        created.
+    :param \*\*kwargs: Other parameters are passed to the
+        :meth:`LdpFactory.from_provided` method.
 
 
-    @return string Event type: whether the resource was created or updated.
+    :rtype: str
+    :return: Event type: whether the resource was created or updated.
     """
     """
     rsrc = LdpFactory.from_provided(uid, stream=stream, **kwargs)
     rsrc = LdpFactory.from_provided(uid, stream=stream, **kwargs)
 
 
@@ -249,7 +249,8 @@ def update(uid, update_str, is_metadata=False):
     :param string uid: Resource UID.
     :param string uid: Resource UID.
     :param string update_str: SPARQL-Update statements.
     :param string update_str: SPARQL-Update statements.
     :param bool is_metadata: Whether the resource metadata is being updated.
     :param bool is_metadata: Whether the resource metadata is being updated.
-    If False, and the resource being updated is a LDP-NR, an error is raised.
+        If False, and the resource being updated is a LDP-NR, an error is
+        raised.
     """
     """
     rsrc = LdpFactory.from_stored(uid)
     rsrc = LdpFactory.from_stored(uid)
     if LDP_NR_TYPE in rsrc.ldp_types and not is_metadata:
     if LDP_NR_TYPE in rsrc.ldp_types and not is_metadata:
@@ -267,10 +268,11 @@ def create_version(uid, ver_uid):
 
 
     :param string uid: Resource UID.
     :param string uid: Resource UID.
     :param string ver_uid: Version UID to be appended to the resource URI.
     :param string ver_uid: Version UID to be appended to the resource URI.
-    NOTE: this is a "slug", i.e. the version URI is not guaranteed to be the
-    one indicated.
+      NOTE: this is a "slug", i.e. the version URI is not guaranteed to be the
+      one indicated.
 
 
-    @return string Version UID.
+    :rtype: str
+    :return: Version UID.
     """
     """
     return LdpFactory.from_stored(uid).create_version(ver_uid)
     return LdpFactory.from_stored(uid).create_version(ver_uid)
 
 
@@ -282,7 +284,7 @@ def delete(uid, soft=True):
 
 
     :param string uid: Resource UID.
     :param string uid: Resource UID.
     :param bool soft: Whether to perform a soft-delete and leave a
     :param bool soft: Whether to perform a soft-delete and leave a
-    tombstone resource, or wipe any memory of the resource.
+      tombstone resource, or wipe any memory of the resource.
     """
     """
     # If referential integrity is enforced, grab all inbound relationships
     # If referential integrity is enforced, grab all inbound relationships
     # to break them.
     # to break them.
@@ -318,6 +320,6 @@ def resurrect(uid):
     """
     """
     Reinstate a buried (soft-deleted) resource.
     Reinstate a buried (soft-deleted) resource.
 
 
-    :param string uid: Resource UID.
+    :param str uid: Resource UID.
     """
     """
     return LdpFactory.from_stored(uid).resurrect_rsrc()
     return LdpFactory.from_stored(uid).resurrect_rsrc()

+ 2 - 2
lakesuperior/config_parser.py

@@ -19,8 +19,8 @@ def parse_config(config_dir=None):
     ``etc.defaults``.
     ``etc.defaults``.
 
 
     :param config_dir: Location on the filesystem of the configuration
     :param config_dir: Location on the filesystem of the configuration
-    directory. The default is set by the ``FCREPO_CONFIG_DIR`` environment
-    variable or, if this is not set, the ``etc.defaults`` stock directory.
+        directory. The default is set by the ``FCREPO_CONFIG_DIR`` environment
+        variable or, if this is not set, the ``etc.defaults`` stock directory.
     """
     """
     configs = (
     configs = (
         'application',
         'application',

+ 20 - 18
lakesuperior/migrator.py

@@ -75,22 +75,23 @@ class Migrator:
         Set up base paths and clean up existing directories.
         Set up base paths and clean up existing directories.
 
 
         :param rdflib.URIRef src: Webroot of source repository. This must
         :param rdflib.URIRef src: Webroot of source repository. This must
-        correspond to the LDP root node (for Fedora it can be e.g.
-        ``http://localhost:8080fcrepo/rest/``) and is used to determine if URIs
-        retrieved are managed by this repository.
+            correspond to the LDP root node (for Fedora it can be e.g.
+            ``http://localhost:8080fcrepo/rest/``) and is used to determine if
+            URIs retrieved are managed by this repository.
         :param str dest: Destination repository path. If the location exists
         :param str dest: Destination repository path. If the location exists
-        it must be a writable directory. It will be deleted and recreated. If
-        it does not exist, it will be created along with its parents if
-        missing.
+            it must be a writable directory. It will be deleted and recreated.
+            If it does not exist, it will be created along with its parents if
+            missing.
         :param str binary_handling: One of ``include``, ``truncate`` or
         :param str binary_handling: One of ``include``, ``truncate`` or
-        ``split``.
+            ``split``.
         :param bool compact_uris: NOT IMPLEMENTED. Whether the process should
         :param bool compact_uris: NOT IMPLEMENTED. Whether the process should
-        attempt to compact URIs generated with broken up path segments. If the
-        UID matches a pattern such as `/12/34/56/123456...` it is converted to
-        `/123456...`. This would remove a lot of cruft caused by the pairtree
-        segments. Note that this will change the publicly exposed URIs. If
-        durability is a concern, a rewrite directive can be added to the HTTP
-        server that proxies the WSGI endpoint.
+            attempt to compact URIs generated with broken up path segments. If
+            the UID matches a pattern such as ``/12/34/56/123456...`` it is
+            converted to ``/123456...``. This would remove a lot of cruft
+            caused by the pairtree segments. Note that this will change the
+            publicly exposed URIs. If durability is a concern, a rewrite
+            directive can be added to the HTTP server that proxies the WSGI
+            endpoint.
         """
         """
         # Set up repo folder structure and copy default configuration to
         # Set up repo folder structure and copy default configuration to
         # destination file.
         # destination file.
@@ -137,11 +138,12 @@ class Migrator:
         data set contained in a folder from an LDP repository.
         data set contained in a folder from an LDP repository.
 
 
         :param start_pts: List of starting points to retrieve
         :param start_pts: List of starting points to retrieve
-        :type start_pts: tuple or list 
-        resources from. It would typically be the repository root in case of a
-        full dump or one or more resources in the repository for a partial one.
+        :type start_pts: tuple or list
+            resources from. It would typically be the repository root in case
+            of a full dump or one or more resources in the repository for a
+            partial one.
         :param str listf_ile: path to a local file containing a list of URIs,
         :param str listf_ile: path to a local file containing a list of URIs,
-        one per line.
+            one per line.
         """
         """
         from lakesuperior.api import resource as rsrc_api
         from lakesuperior.api import resource as rsrc_api
         self._ct = 0
         self._ct = 0
@@ -179,7 +181,7 @@ class Migrator:
         managed by the repository is encountered.
         managed by the repository is encountered.
 
 
         :param str uid: The path relative to the source server webroot
         :param str uid: The path relative to the source server webroot
-        pointing to the resource to crawl, effectively the resource UID.
+            pointing to the resource to crawl, effectively the resource UID.
         """
         """
         ibase = str(nsc['fcres'])
         ibase = str(nsc['fcres'])
         # Public URI of source repo.
         # Public URI of source repo.

+ 10 - 8
lakesuperior/model/ldp_factory.py

@@ -80,15 +80,16 @@ class LdpFactory:
     @staticmethod
     @staticmethod
     def from_provided(
     def from_provided(
             uid, mimetype=None, stream=None, provided_imr=None, **kwargs):
             uid, mimetype=None, stream=None, provided_imr=None, **kwargs):
-        """
+        r"""
         Determine LDP type from request content.
         Determine LDP type from request content.
 
 
         :param str uid: UID of the resource to be created or updated.
         :param str uid: UID of the resource to be created or updated.
         :param str mimetype: The provided content MIME type.
         :param str mimetype: The provided content MIME type.
-        :param IOStream | None stream: The provided data stream. This can be
-        RDF or non-RDF content, or None. In the latter case, an empty container
-        is created.
-        @param **kwargs Arguments passed to the LDP class constructor.
+        :param stream: The provided data stream. This can be
+            RDF or non-RDF content, or None. In the latter case, an empty
+            container is created.
+        :type stream: IOStream or None
+        :param \*\*kwargs: Arguments passed to the LDP class constructor.
         """
         """
         uri = nsc['fcres'][uid]
         uri = nsc['fcres'][uid]
 
 
@@ -189,11 +190,12 @@ class LdpFactory:
         found or a 409 if the parent is not a valid container.
         found or a 409 if the parent is not a valid container.
 
 
         :param str parent_uid: UID of the parent resource. It must be an
         :param str parent_uid: UID of the parent resource. It must be an
-        existing LDPC.
+            existing LDPC.
         :param str path: path to the resource, relative to the parent.
         :param str path: path to the resource, relative to the parent.
 
 
-        @return string The confirmed resource UID. This may be different from
-        what has been indicated.
+        :rtype: str
+        :return: The confirmed resource UID. This may be different from
+            what has been indicated.
         """
         """
         def split_if_legacy(uid):
         def split_if_legacy(uid):
             if config['application']['store']['ldp_rs']['legacy_ptree_split']:
             if config['application']['store']['ldp_rs']['legacy_ptree_split']:

+ 2 - 2
lakesuperior/model/ldp_nr.py

@@ -72,7 +72,7 @@ class LdpNr(Ldpr):
         Create a new binary resource with a corresponding RDF representation.
         Create a new binary resource with a corresponding RDF representation.
 
 
         :param bool create_only: Whether the resource is being created or
         :param bool create_only: Whether the resource is being created or
-        updated.
+            updated.
         """
         """
         # Persist the stream.
         # Persist the stream.
         self.digest, self.size = nonrdfly.persist(self.stream)
         self.digest, self.size = nonrdfly.persist(self.stream)
@@ -98,7 +98,7 @@ class LdpNr(Ldpr):
         :param BufferedIO stream: The uploaded data stream.
         :param BufferedIO stream: The uploaded data stream.
         :param str mimetype: MIME type of the uploaded file.
         :param str mimetype: MIME type of the uploaded file.
         :param defaultdict disposition: The ``Content-Disposition`` header
         :param defaultdict disposition: The ``Content-Disposition`` header
-        content, parsed through ``parse_rfc7240``.
+            content, parsed through ``parse_rfc7240``.
         """
         """
         super()._add_srv_mgd_triples(create)
         super()._add_srv_mgd_triples(create)
 
 

+ 29 - 30
lakesuperior/model/ldpr.py

@@ -36,21 +36,16 @@ class Ldpr(metaclass=ABCMeta):
     the vanilla LDP specifications. This is extended by the
     the vanilla LDP specifications. This is extended by the
     `lakesuperior.fcrepo.Resource` class.
     `lakesuperior.fcrepo.Resource` class.
 
 
-    Inheritance graph: https://www.w3.org/TR/ldp/#fig-ldpc-types
+    See inheritance graph: https://www.w3.org/TR/ldp/#fig-ldpc-types
 
 
-    Note: Even though LdpNr (which is a subclass of Ldpr) handles binary files,
-    it still has an RDF representation in the triplestore. Hence, some of the
-    RDF-related methods are defined in this class rather than in the LdpRs
-    class.
+    **Note**: Even though LdpNr (which is a subclass of Ldpr) handles binary
+    files, it still has an RDF representation in the triplestore. Hence, some
+    of the RDF-related methods are defined in this class rather than in
+    :class:`~lakesuperior.model.ldp_rs.LdpRs`.
 
 
-    Convention notes:
-
-    All the methods in this class handle internal UUIDs (URN). Public-facing
-    URIs are converted from URNs and passed by these methods to the methods
-    handling HTTP negotiation.
-
-    The data passed to the store layout for processing should be in a graph.
-    All conversion from request payload strings is done here.
+    **Note:** Only internal facing (``info:fcres``-prefixed) URIs are handled
+    in this class. Public-facing URI conversion is handled in the
+    :mod:`~lakesuperior.endpoints.ldp` module.
     """
     """
 
 
     EMBED_CHILD_RES_URI = nsc['fcrepo'].EmbedResources
     EMBED_CHILD_RES_URI = nsc['fcrepo'].EmbedResources
@@ -67,33 +62,35 @@ class Ldpr(metaclass=ABCMeta):
     WRKF_INBOUND = '_workflow:inbound_'
     WRKF_INBOUND = '_workflow:inbound_'
     WRKF_OUTBOUND = '_workflow:outbound_'
     WRKF_OUTBOUND = '_workflow:outbound_'
 
 
-    # Default user to be used for the `createdBy` and `lastUpdatedBy` if a user
-    # is not provided.
     DEFAULT_USER = Literal('BypassAdmin')
     DEFAULT_USER = Literal('BypassAdmin')
+    """
+    Default user to be used for the `createdBy` and `lastUpdatedBy` if a user
+    is not provided.
+    """
 
 
-    # RDF Types that populate a new resource.
     base_types = {
     base_types = {
         nsc['fcrepo'].Resource,
         nsc['fcrepo'].Resource,
         nsc['ldp'].Resource,
         nsc['ldp'].Resource,
         nsc['ldp'].RDFSource,
         nsc['ldp'].RDFSource,
     }
     }
+    """RDF Types that populate a new resource."""
 
 
-    # Predicates that do not get removed when a resource is replaced.
     protected_pred = (
     protected_pred = (
         nsc['fcrepo'].created,
         nsc['fcrepo'].created,
         nsc['fcrepo'].createdBy,
         nsc['fcrepo'].createdBy,
         nsc['ldp'].contains,
         nsc['ldp'].contains,
     )
     )
+    """Predicates that do not get removed when a resource is replaced."""
 
 
-    # Server-managed RDF types ignored in the RDF payload if the resource is
-    # being created. N.B. These still raise an error if the resource exists.
     smt_allow_on_create = {
     smt_allow_on_create = {
         nsc['ldp'].DirectContainer,
         nsc['ldp'].DirectContainer,
         nsc['ldp'].IndirectContainer,
         nsc['ldp'].IndirectContainer,
     }
     }
+    """
+    Server-managed RDF types ignored in the RDF payload if the resource is
+    being created. N.B. These still raise an error if the resource exists.
+    """
 
 
-
-    # Predicates to remove when a resource is replaced.
     delete_preds_on_replace = {
     delete_preds_on_replace = {
         nsc['ebucore'].hasMimeType,
         nsc['ebucore'].hasMimeType,
         nsc['fcrepo'].lastModified,
         nsc['fcrepo'].lastModified,
@@ -101,6 +98,7 @@ class Ldpr(metaclass=ABCMeta):
         nsc['premis'].hasSize,
         nsc['premis'].hasSize,
         nsc['premis'].hasMessageDigest,
         nsc['premis'].hasMessageDigest,
     }
     }
+    """Predicates to remove when a resource is replaced."""
 
 
 
 
     ## MAGIC METHODS ##
     ## MAGIC METHODS ##
@@ -385,9 +383,10 @@ class Ldpr(metaclass=ABCMeta):
         Delete a single resource and create a tombstone.
         Delete a single resource and create a tombstone.
 
 
         :param boolean inbound: Whether to delete the inbound relationships.
         :param boolean inbound: Whether to delete the inbound relationships.
-        :param URIRef tstone_pointer: If set to a URN, this creates a pointer
-        to the tombstone of the resource that used to contain the deleted
-        resource. Otherwise the deleted resource becomes a tombstone.
+        :param rdflib.URIRef tstone_pointer: If set to a URN, this creates a
+            pointer to the tombstone of the resource that used to contain the
+            deleted resource. Otherwise the deleted resource becomes a
+            tombstone.
         """
         """
         logger.info('Burying resource {}'.format(self.uid))
         logger.info('Burying resource {}'.format(self.uid))
         # Create a backup snapshot for resurrection purposes.
         # Create a backup snapshot for resurrection purposes.
@@ -520,12 +519,12 @@ class Ldpr(metaclass=ABCMeta):
         """
         """
         Create a new version of the resource.
         Create a new version of the resource.
 
 
-        NOTE: This creates an event only for the resource being updated (due
-        to the added `hasVersion` triple and possibly to the `hasVersions` one)
-        but not for the version being created.
+        **Note:** This creates an event only for the resource being updated
+        (due to the added `hasVersion` triple and possibly to the
+        ``hasVersions`` one) but not for the version being created.
 
 
-        :param  ver_uid: Version ver_uid. If already existing, an exception is
-        raised.
+        :param str ver_uid: Version UID. If already existing, a new version UID
+            is minted.
         """
         """
         if not ver_uid or ver_uid in self.version_uids:
         if not ver_uid or ver_uid in self.version_uids:
             ver_uid = str(uuid4())
             ver_uid = str(uuid4())
@@ -539,7 +538,7 @@ class Ldpr(metaclass=ABCMeta):
 
 
         :param str ver_uid: Version UID.
         :param str ver_uid: Version UID.
         :param boolean backup: Whether to create a backup snapshot. Default is
         :param boolean backup: Whether to create a backup snapshot. Default is
-        true.
+            True.
         """
         """
         # Create a backup snapshot.
         # Create a backup snapshot.
         if backup:
         if backup:

+ 30 - 25
lakesuperior/store/ldp_rs/lmdb_store.py

@@ -181,18 +181,17 @@ class LmdbStore(Store):
     graph_aware = True
     graph_aware = True
     transaction_aware = True
     transaction_aware = True
 
 
+    MAP_SIZE = 1024 ** 4 # 1Tb
     """
     """
     LMDB map size. See http://lmdb.readthedocs.io/en/release/#environment-class
     LMDB map size. See http://lmdb.readthedocs.io/en/release/#environment-class
     """
     """
-    MAP_SIZE = 1024 ** 4 # 1Tb
 
 
+    TERM_HASH_ALGO = 'sha1'
     """
     """
-    Key hashing algorithm. If you are paranoid, use SHA1. Otherwise, MD5 is
-    faster and takes up less space (16 bytes vs. 20 bytes). This may make a
-    visible difference because keys are generated and parsed very often.
+    Term hashing algorithm. SHA1 is the default.
     """
     """
-    KEY_HASH_ALGO = 'sha1'
 
 
+    KEY_LENGTH = 5
     """
     """
     Fixed length for term keys.
     Fixed length for term keys.
 
 
@@ -210,13 +209,12 @@ class LmdbStore(Store):
     exchange between the store and the application. However it is sensible not
     exchange between the store and the application. However it is sensible not
     to expose this value as a configuration option.
     to expose this value as a configuration option.
     """
     """
-    KEY_LENGTH = 5
 
 
+    KEY_START = 1
     """
     """
-    Lexical sequence start. `\x01` is fine since no special characters are used,
-    but it's good to leave a spare for potential future use.
+    Lexical sequence start. ``\\x01`` is fine since no special characters are
+    used, but it's good to leave a spare for potential future use.
     """
     """
-    KEY_START = 1
 
 
     data_keys = (
     data_keys = (
         # Term key to serialized term content: 1:1
         # Term key to serialized term content: 1:1
@@ -237,6 +235,7 @@ class LmdbStore(Store):
         's:po', 'p:so', 'o:sp', 'c:spo',
         's:po', 'p:so', 'o:sp', 'c:spo',
     )
     )
 
 
+    _lookup_rank = ('s', 'o', 'p')
     """
     """
     Order in which keys are looked up if two terms are bound.
     Order in which keys are looked up if two terms are bound.
     The indices with the smallest average number of values per key should be
     The indices with the smallest average number of values per key should be
@@ -245,16 +244,15 @@ class LmdbStore(Store):
     If we want to get fancy, this can be rebalanced from time to time by
     If we want to get fancy, this can be rebalanced from time to time by
     looking up the number of keys in (s:po, p:so, o:sp).
     looking up the number of keys in (s:po, p:so, o:sp).
     """
     """
-    _lookup_rank = ('s', 'o', 'p')
 
 
-    """
-    Order of terms in the lookup indices. Used to rebuild a triple from lookup.
-    """
     _lookup_ordering = {
     _lookup_ordering = {
         's:po': (0, 1, 2),
         's:po': (0, 1, 2),
         'p:so': (1, 0, 2),
         'p:so': (1, 0, 2),
         'o:sp': (2, 0, 1),
         'o:sp': (2, 0, 1),
     }
     }
+    """
+    Order of terms in the lookup indices. Used to rebuild a triple from lookup.
+    """
 
 
     data_env = None
     data_env = None
     idx_env = None
     idx_env = None
@@ -412,8 +410,8 @@ class LmdbStore(Store):
 
 
         :param lmdb.Transaction txn: This can be a read or write transaction.
         :param lmdb.Transaction txn: This can be a read or write transaction.
 
 
-        @return dict(string, lmdb.Cursor) Keys are index labels, values are
-        index cursors.
+        :rtype: dict(string, lmdb.Cursor)
+        :return: dict of index labels, index cursors.
         """
         """
         return {
         return {
             key: txn.cursor(self.dbs[key])
             key: txn.cursor(self.dbs[key])
@@ -453,7 +451,7 @@ class LmdbStore(Store):
         """
         """
         Add a triple and start indexing.
         Add a triple and start indexing.
 
 
-        :param tuple:rdflib.Identifier triple: Tuple of three identifiers.
+        :param tuple(rdflib.Identifier) triple: Tuple of three identifiers.
         :param context: Context identifier. ``None`` inserts in the default
         :param context: Context identifier. ``None`` inserts in the default
         graph.
         graph.
         :type context: rdflib.Identifier or None
         :type context: rdflib.Identifier or None
@@ -649,10 +647,12 @@ class LmdbStore(Store):
         """
         """
         Get the prefix associated with a namespace.
         Get the prefix associated with a namespace.
 
 
-        @NOTE A namespace can be only bound to one prefix in this
+        **Note:** A namespace can be only bound to one prefix in this
         implementation.
         implementation.
 
 
-        :param rdflib.URIRef namespace: Fully qualified URI of namespace.
+        :param rdflib.Namespace namespace: Fully qualified namespace.
+
+        :rtype: str or None
         """
         """
         with self.cur('ns:pfx') as cur:
         with self.cur('ns:pfx') as cur:
             prefix = cur.get(s2b(namespace))
             prefix = cur.get(s2b(namespace))
@@ -660,7 +660,10 @@ class LmdbStore(Store):
 
 
 
 
     def namespaces(self):
     def namespaces(self):
-        """Get an iterator of all prefix: namespace bindings."""
+        """Get an iterator of all prefix: namespace bindings.
+
+        :rtype: Iterator(tuple(str, rdflib.Namespace))
+        """
         with self.cur('pfx:ns') as cur:
         with self.cur('pfx:ns') as cur:
             for pfx, ns in iter(cur):
             for pfx, ns in iter(cur):
                 yield (b2s(pfx), Namespace(b2s(ns)))
                 yield (b2s(pfx), Namespace(b2s(ns)))
@@ -670,7 +673,7 @@ class LmdbStore(Store):
         """
         """
         Get a list of all contexts.
         Get a list of all contexts.
 
 
-        @return generator(Graph)
+        :rtype: Iterator(rdflib.Graph)
         """
         """
         if triple and any(triple):
         if triple and any(triple):
             with self.cur('spo:c') as cur:
             with self.cur('spo:c') as cur:
@@ -698,7 +701,7 @@ class LmdbStore(Store):
         Therefore it needs to open a write transaction. This is not ideal
         Therefore it needs to open a write transaction. This is not ideal
         but the only way to handle datasets in RDFLib.
         but the only way to handle datasets in RDFLib.
 
 
-        :param URIRef graph: URI of the named graph to add.
+        :param rdflib.URIRef graph: URI of the named graph to add.
         """
         """
         if isinstance(graph, Graph):
         if isinstance(graph, Graph):
             graph = graph.identifier
             graph = graph.identifier
@@ -732,7 +735,7 @@ class LmdbStore(Store):
         """
         """
         Remove all triples from graph and the graph itself.
         Remove all triples from graph and the graph itself.
 
 
-        :param URIRef graph: URI of the named graph to remove.
+        :param rdflib.URIRef graph: URI of the named graph to remove.
         """
         """
         if isinstance(graph, Graph):
         if isinstance(graph, Graph):
             graph = graph.identifier
             graph = graph.identifier
@@ -887,7 +890,9 @@ class LmdbStore(Store):
         :type key: bytes or memoryview
         :type key: bytes or memoryview
         compound one in which case the function will return multiple terms.
         compound one in which case the function will return multiple terms.
 
 
-        @return tuple
+        :rtype: tuple(rdflib.term.Identifier)
+        :return: The term(s) associated with the key(s). The result is always
+        a tuple even for single results.
         """
         """
         with self.cur('t:st') as cur:
         with self.cur('t:st') as cur:
             return tuple(
             return tuple(
@@ -900,7 +905,7 @@ class LmdbStore(Store):
         Convert a triple, quad or term into a key.
         Convert a triple, quad or term into a key.
 
 
         The key is the checksum of the pickled object, therefore unique for
         The key is the checksum of the pickled object, therefore unique for
-        that object. The hashing algorithm is specified in `KEY_HASH_ALGO`.
+        that object. The hashing algorithm is specified in `TERM_HASH_ALGO`.
 
 
         :param Object obj: Anything that can be reduced to terms stored in the
         :param Object obj: Anything that can be reduced to terms stored in the
         database. Pairs of terms, as well as triples and quads, are expressed
         database. Pairs of terms, as well as triples and quads, are expressed
@@ -927,7 +932,7 @@ class LmdbStore(Store):
 
 
     def _hash(self, s):
     def _hash(self, s):
         """Get the hash value of a serialized object."""
         """Get the hash value of a serialized object."""
-        return hashlib.new(self.KEY_HASH_ALGO, s).digest()
+        return hashlib.new(self.TERM_HASH_ALGO, s).digest()
 
 
 
 
     def _split_key(self, keys):
     def _split_key(self, keys):

+ 90 - 85
lakesuperior/store/ldp_rs/rsrc_centric_layout.py

@@ -33,7 +33,7 @@ logger = logging.getLogger(__name__)
 
 
 
 
 class RsrcCentricLayout:
 class RsrcCentricLayout:
-    '''
+    """
     This class exposes an interface to build graph store layouts. It also
     This class exposes an interface to build graph store layouts. It also
     provides the basics of the triplestore connection.
     provides the basics of the triplestore connection.
 
 
@@ -53,7 +53,7 @@ class RsrcCentricLayout:
     E.g. if the configuration indicates `simple_layout` the application will
     E.g. if the configuration indicates `simple_layout` the application will
     look for
     look for
     `lakesuperior.store.rdf.simple_layout.SimpleLayout`.
     `lakesuperior.store.rdf.simple_layout.SimpleLayout`.
-    '''
+    """
     _graph_uids = ('fcadmin', 'fcmain', 'fcstruct')
     _graph_uids = ('fcadmin', 'fcmain', 'fcstruct')
 
 
     # @TODO Move to a config file?
     # @TODO Move to a config file?
@@ -116,14 +116,14 @@ class RsrcCentricLayout:
     ## MAGIC METHODS ##
     ## MAGIC METHODS ##
 
 
     def __init__(self, config):
     def __init__(self, config):
-        '''Initialize the graph store and a layout.
+        """Initialize the graph store and a layout.
 
 
         NOTE: `rdflib.Dataset` requires a RDF 1.1 compliant store with support
         NOTE: `rdflib.Dataset` requires a RDF 1.1 compliant store with support
         for Graph Store HTTP protocol
         for Graph Store HTTP protocol
         (https://www.w3.org/TR/sparql11-http-rdf-update/). Blazegraph supports
         (https://www.w3.org/TR/sparql11-http-rdf-update/). Blazegraph supports
         this only in the (currently unreleased) 2.2 branch. It works with Jena,
         this only in the (currently unreleased) 2.2 branch. It works with Jena,
         which is currently the reference implementation.
         which is currently the reference implementation.
-        '''
+        """
         self.config = config
         self.config = config
         self.store = plugin.get('Lmdb', Store)(config['location'])
         self.store = plugin.get('Lmdb', Store)(config['location'])
         self.ds = Dataset(self.store, default_union=True)
         self.ds = Dataset(self.store, default_union=True)
@@ -132,30 +132,30 @@ class RsrcCentricLayout:
 
 
     @property
     @property
     def attr_routes(self):
     def attr_routes(self):
-        '''
+        """
         This is a map that allows specific triples to go to certain graphs.
         This is a map that allows specific triples to go to certain graphs.
         It is a machine-friendly version of the static attribute `attr_map`
         It is a machine-friendly version of the static attribute `attr_map`
         which is formatted for human readability and to avoid repetition.
         which is formatted for human readability and to avoid repetition.
         The attributes not mapped here (usually user-provided triples with no
         The attributes not mapped here (usually user-provided triples with no
         special meaning to the application) go to the `fcmain:` graph.
         special meaning to the application) go to the `fcmain:` graph.
 
 
-        The output of this is a dict with a similar structure:
-
-        {
-            'p': {
-                <Predicate P1>: <destination graph G1>,
-                <Predicate P2>: <destination graph G1>,
-                <Predicate P3>: <destination graph G1>,
-                <Predicate P4>: <destination graph G2>,
-                [...]
-            },
-            't': {
-                <RDF Type T1>: <destination graph G1>,
-                <RDF Type T2>: <destination graph G3>,
-                [...]
+        The output of this is a dict with a similar structure::
+
+            {
+                'p': {
+                    <Predicate P1>: <destination graph G1>,
+                    <Predicate P2>: <destination graph G1>,
+                    <Predicate P3>: <destination graph G1>,
+                    <Predicate P4>: <destination graph G2>,
+                    [...]
+                },
+                't': {
+                    <RDF Type T1>: <destination graph G1>,
+                    <RDF Type T2>: <destination graph G3>,
+                    [...]
+                }
             }
             }
-        }
-        '''
+        """
         if not hasattr(self, '_attr_routes'):
         if not hasattr(self, '_attr_routes'):
             self._attr_routes = {'p': {}, 't': {}}
             self._attr_routes = {'p': {}, 't': {}}
             for dest in self.attr_map.keys():
             for dest in self.attr_map.keys():
@@ -168,9 +168,9 @@ class RsrcCentricLayout:
 
 
 
 
     def bootstrap(self):
     def bootstrap(self):
-        '''
+        """
         Delete all graphs and insert the basic triples.
         Delete all graphs and insert the basic triples.
-        '''
+        """
         logger.info('Deleting all data from the graph store.')
         logger.info('Deleting all data from the graph store.')
         store = self.ds.store
         store = self.ds.store
         if getattr(store, 'is_txn_open', False):
         if getattr(store, 'is_txn_open', False):
@@ -186,25 +186,25 @@ class RsrcCentricLayout:
 
 
 
 
     def get_raw(self, uri, ctx=None):
     def get_raw(self, uri, ctx=None):
-        '''
+        """
         Get a raw graph of a non-LDP resource.
         Get a raw graph of a non-LDP resource.
 
 
         The graph is queried across all contexts or within a specific one.
         The graph is queried across all contexts or within a specific one.
 
 
-        @param s(rdflib.term.URIRef) URI of the subject.
-        @param ctx (rdflib.term.URIRef) URI of the optional context. If None,
+        :param rdflib.term.URIRef s: URI of the subject.
+        :param rdflib.term.URIRef ctx: URI of the optional context. If None,
         all named graphs are queried.
         all named graphs are queried.
 
 
-        return rdflib.Graph
-        '''
+        :rtype: rdflib.Graph
+        """
         return self.store.triples((nsc['fcres'][uid], None, None), ctx)
         return self.store.triples((nsc['fcres'][uid], None, None), ctx)
 
 
 
 
     def count_rsrc(self):
     def count_rsrc(self):
-        '''
+        """
         Return a count of first-class resources, subdivided in "live" and
         Return a count of first-class resources, subdivided in "live" and
         historic snapshots.
         historic snapshots.
-        '''
+        """
         with TxnManager(self.ds.store) as txn:
         with TxnManager(self.ds.store) as txn:
             main = set(
             main = set(
                     self.ds.graph(META_GR_URI)[ : nsc['foaf'].primaryTopic : ])
                     self.ds.graph(META_GR_URI)[ : nsc['foaf'].primaryTopic : ])
@@ -215,18 +215,18 @@ class RsrcCentricLayout:
 
 
 
 
     def raw_query(self, qry_str):
     def raw_query(self, qry_str):
-        '''
+        """
         Perform a straight query to the graph store.
         Perform a straight query to the graph store.
-        '''
+        """
         return self.ds.query(qry_str)
         return self.ds.query(qry_str)
 
 
 
 
     def extract_imr(
     def extract_imr(
                 self, uid, ver_uid=None, strict=True, incl_inbound=False,
                 self, uid, ver_uid=None, strict=True, incl_inbound=False,
                 incl_children=True, embed_children=False, **kwargs):
                 incl_children=True, embed_children=False, **kwargs):
-        '''
+        """
         See base_rdf_layout.extract_imr.
         See base_rdf_layout.extract_imr.
-        '''
+        """
         if ver_uid:
         if ver_uid:
             uid = self.snapshot_uid(uid, ver_uid)
             uid = self.snapshot_uid(uid, ver_uid)
 
 
@@ -260,9 +260,9 @@ class RsrcCentricLayout:
 
 
 
 
     def ask_rsrc_exists(self, uid):
     def ask_rsrc_exists(self, uid):
-        '''
+        """
         See base_rdf_layout.ask_rsrc_exists.
         See base_rdf_layout.ask_rsrc_exists.
-        '''
+        """
         logger.debug('Checking if resource exists: {}'.format(uid))
         logger.debug('Checking if resource exists: {}'.format(uid))
         meta_gr = self.ds.graph(nsc['fcadmin'][uid])
         meta_gr = self.ds.graph(nsc['fcadmin'][uid])
         return bool(
         return bool(
@@ -270,9 +270,9 @@ class RsrcCentricLayout:
 
 
 
 
     def get_metadata(self, uid, ver_uid=None, strict=True):
     def get_metadata(self, uid, ver_uid=None, strict=True):
-        '''
+        """
         This is an optimized query to get only the administrative metadata.
         This is an optimized query to get only the administrative metadata.
-        '''
+        """
         logger.debug('Getting metadata for: {}'.format(uid))
         logger.debug('Getting metadata for: {}'.format(uid))
         if ver_uid:
         if ver_uid:
             uid = self.snapshot_uid(uid, ver_uid)
             uid = self.snapshot_uid(uid, ver_uid)
@@ -287,12 +287,12 @@ class RsrcCentricLayout:
 
 
 
 
     def get_user_data(self, uid):
     def get_user_data(self, uid):
-        '''
+        """
         Get all the user-provided data.
         Get all the user-provided data.
 
 
-        @param uid (string) Resource UID.
-        '''
-        # @TODO This only works as long as there is only one user-provided
+        :param string uid: Resource UID.
+        """
+        # *TODO* This only works as long as there is only one user-provided
         # graph. If multiple user-provided graphs will be supported, this
         # graph. If multiple user-provided graphs will be supported, this
         # should use another query to get all of them.
         # should use another query to get all of them.
         userdata_gr = self.ds.graph(nsc['fcmain'][uid])
         userdata_gr = self.ds.graph(nsc['fcmain'][uid])
@@ -301,18 +301,19 @@ class RsrcCentricLayout:
 
 
 
 
     def get_version_info(self, uid, strict=True):
     def get_version_info(self, uid, strict=True):
-        '''
+        """
         Get all metadata about a resource's versions.
         Get all metadata about a resource's versions.
-        '''
-        # @NOTE This pretty much bends the ontology—it replaces the graph URI
+        """
+        # **Note:** This pretty much bends the ontology—it replaces the graph URI
         # with the subject URI. But the concepts of data and metadata in Fedora
         # with the subject URI. But the concepts of data and metadata in Fedora
         # are quite fluid anyways...
         # are quite fluid anyways...
+
         # WIP—Is it worth to replace SPARQL here?
         # WIP—Is it worth to replace SPARQL here?
         #versions = self.ds.graph(nsc['fcadmin'][uid]).triples(
         #versions = self.ds.graph(nsc['fcadmin'][uid]).triples(
         #        (nsc['fcres'][uid], nsc['fcrepo'].hasVersion, None))
         #        (nsc['fcres'][uid], nsc['fcrepo'].hasVersion, None))
         #for version in versions:
         #for version in versions:
         #    version_meta = self.ds.graph(HIST_GRAPH_URI).triples(
         #    version_meta = self.ds.graph(HIST_GRAPH_URI).triples(
-        qry = '''
+        qry = """
         CONSTRUCT {
         CONSTRUCT {
           ?s fcrepo:hasVersion ?v .
           ?s fcrepo:hasVersion ?v .
           ?v ?p ?o .
           ?v ?p ?o .
@@ -325,13 +326,13 @@ class RsrcCentricLayout:
             ?vm  ?p ?o .
             ?vm  ?p ?o .
             FILTER (?o != ?v)
             FILTER (?o != ?v)
           }
           }
-        }'''
+        }"""
         gr = self._parse_construct(qry, init_bindings={
         gr = self._parse_construct(qry, init_bindings={
             'ag': nsc['fcadmin'][uid],
             'ag': nsc['fcadmin'][uid],
             'hg': HIST_GR_URI,
             'hg': HIST_GR_URI,
             's': nsc['fcres'][uid]})
             's': nsc['fcres'][uid]})
         rsrc = Resource(gr, nsc['fcres'][uid])
         rsrc = Resource(gr, nsc['fcres'][uid])
-        # @TODO Should return a graph.
+        # TODO Should return a graph.
         if strict:
         if strict:
             self._check_rsrc_status(rsrc)
             self._check_rsrc_status(rsrc)
 
 
@@ -339,19 +340,19 @@ class RsrcCentricLayout:
 
 
 
 
     def get_inbound_rel(self, subj_uri, full_triple=True):
     def get_inbound_rel(self, subj_uri, full_triple=True):
-        '''
+        """
         Query inbound relationships for a subject.
         Query inbound relationships for a subject.
 
 
         This can be a list of either complete triples, or of subjects referring
         This can be a list of either complete triples, or of subjects referring
         to the given URI. It excludes historic version snapshots.
         to the given URI. It excludes historic version snapshots.
 
 
-        @param subj_uri (rdflib.URIRef) Subject URI.
-        @param full_triple (boolean) Whether to return the full triples found
+        :param rdflib.URIRef subj_uri: Subject URI.
+        :param boolean full_triple: Whether to return the full triples found
         or only the subjects. By default, full triples are returned.
         or only the subjects. By default, full triples are returned.
 
 
-        @return iterator(tuple(rdflib.term.Identifier) | rdflib.URIRef)
-        Inbound triples or subjects.
-        '''
+        :rtype: Iterator(tuple(rdflib.term.Identifier) or rdflib.URIRef)
+        :return: Inbound triples or subjects.
+        """
         # Only return non-historic graphs.
         # Only return non-historic graphs.
         meta_gr = self.ds.graph(META_GR_URI)
         meta_gr = self.ds.graph(META_GR_URI)
         ptopic_uri = nsc['foaf'].primaryTopic
         ptopic_uri = nsc['foaf'].primaryTopic
@@ -364,14 +365,15 @@ class RsrcCentricLayout:
 
 
 
 
     def get_descendants(self, uid, recurse=True):
     def get_descendants(self, uid, recurse=True):
-        '''
+        """
         Get descendants (recursive children) of a resource.
         Get descendants (recursive children) of a resource.
 
 
-        @param uid (string) Resource UID.
+        :param string uid: Resource UID.
         result set.
         result set.
 
 
-        @return iterator(rdflib.URIRef) Subjects of descendant resources.
-        '''
+        :rtype: iterator(rdflib.URIRef)
+        :return: Subjects of descendant resources.
+        """
         ds = self.ds
         ds = self.ds
         subj_uri = nsc['fcres'][uid]
         subj_uri = nsc['fcres'][uid]
         ctx_uri = nsc['fcstruct'][uid]
         ctx_uri = nsc['fcstruct'][uid]
@@ -391,15 +393,15 @@ class RsrcCentricLayout:
 
 
 
 
     def patch_rsrc(self, uid, qry):
     def patch_rsrc(self, uid, qry):
-        '''
+        """
         Patch a resource with SPARQL-Update statements.
         Patch a resource with SPARQL-Update statements.
 
 
         The statement(s) is/are executed on the user-provided graph only
         The statement(s) is/are executed on the user-provided graph only
         to ensure that the scope is limited to the resource.
         to ensure that the scope is limited to the resource.
 
 
-        @param uid (string) UID of the resource to be patched.
-        @param qry (dict) Parsed and translated query, or query string.
-        '''
+        :param string uid: UID of the resource to be patched.
+        :param dict qry: Parsed and translated query, or query string.
+        """
         # Add meta graph for user-defined triples. This may not be used but
         # Add meta graph for user-defined triples. This may not be used but
         # it's simple and harmless to add here.
         # it's simple and harmless to add here.
         self.ds.graph(META_GR_URI).add(
         self.ds.graph(META_GR_URI).add(
@@ -413,12 +415,12 @@ class RsrcCentricLayout:
 
 
 
 
     def forget_rsrc(self, uid, inbound=True, children=True):
     def forget_rsrc(self, uid, inbound=True, children=True):
-        '''
+        """
         Completely delete a resource and (optionally) its children and inbound
         Completely delete a resource and (optionally) its children and inbound
         references.
         references.
 
 
         NOTE: inbound references in historic versions are not affected.
         NOTE: inbound references in historic versions are not affected.
-        '''
+        """
         # Localize variables to be used in loops.
         # Localize variables to be used in loops.
         uri = nsc['fcres'][uid]
         uri = nsc['fcres'][uid]
         topic_uri = nsc['foaf'].primaryTopic
         topic_uri = nsc['foaf'].primaryTopic
@@ -447,23 +449,23 @@ class RsrcCentricLayout:
 
 
 
 
     def truncate_rsrc(self, uid):
     def truncate_rsrc(self, uid):
-        '''
+        """
         Remove all user-provided data from a resource and only leave admin and
         Remove all user-provided data from a resource and only leave admin and
         structure data.
         structure data.
-        '''
+        """
         userdata = set(self.get_user_data(uid))
         userdata = set(self.get_user_data(uid))
 
 
         return self.modify_rsrc(uid, remove_trp=userdata)
         return self.modify_rsrc(uid, remove_trp=userdata)
 
 
 
 
     def modify_rsrc(self, uid, remove_trp=set(), add_trp=set()):
     def modify_rsrc(self, uid, remove_trp=set(), add_trp=set()):
-        '''
+        """
         Modify triples about a subject.
         Modify triples about a subject.
 
 
         This method adds and removes triple sets from specific graphs,
         This method adds and removes triple sets from specific graphs,
         indicated by the term router. It also adds metadata about the changed
         indicated by the term router. It also adds metadata about the changed
         graphs.
         graphs.
-        '''
+        """
         remove_routes = defaultdict(set)
         remove_routes = defaultdict(set)
         add_routes = defaultdict(set)
         add_routes = defaultdict(set)
         historic = VERS_CONT_LABEL in uid
         historic = VERS_CONT_LABEL in uid
@@ -502,7 +504,7 @@ class RsrcCentricLayout:
                 ver_uid = uid.split(VERS_CONT_LABEL)[1].lstrip('/')
                 ver_uid = uid.split(VERS_CONT_LABEL)[1].lstrip('/')
                 meta_gr.set((
                 meta_gr.set((
                     gr_uri, nsc['fcrepo'].hasVersionLabel, Literal(ver_uid)))
                     gr_uri, nsc['fcrepo'].hasVersionLabel, Literal(ver_uid)))
-            # @TODO More provenance metadata can be added here.
+            # *TODO* More provenance metadata can be added here.
 
 
         # Add graph RDF types.
         # Add graph RDF types.
         for gr_uri, gr_type in graph_types:
         for gr_uri, gr_type in graph_types:
@@ -510,12 +512,12 @@ class RsrcCentricLayout:
 
 
 
 
     def delete_rsrc(self, uid, historic=False):
     def delete_rsrc(self, uid, historic=False):
-        '''
+        """
         Delete all aspect graphs of an individual resource.
         Delete all aspect graphs of an individual resource.
 
 
-        @param uid Resource UID.
-        @param historic (bool) Whether the UID is of a historic version.
-        '''
+        :param uid: Resource UID.
+        :param bool historic: Whether the UID is of a historic version.
+        """
         meta_gr_uri = HIST_GR_URI if historic else META_GR_URI
         meta_gr_uri = HIST_GR_URI if historic else META_GR_URI
         for gr_uri in self.ds.graph(meta_gr_uri)[
         for gr_uri in self.ds.graph(meta_gr_uri)[
                 : nsc['foaf'].primaryTopic : nsc['fcres'][uid]]:
                 : nsc['foaf'].primaryTopic : nsc['fcres'][uid]]:
@@ -524,9 +526,9 @@ class RsrcCentricLayout:
 
 
 
 
     def snapshot_uid(self, uid, ver_uid):
     def snapshot_uid(self, uid, ver_uid):
-        '''
+        """
         Create a versioned UID string from a main UID and a version UID.
         Create a versioned UID string from a main UID and a version UID.
-        '''
+        """
         if VERS_CONT_LABEL in uid:
         if VERS_CONT_LABEL in uid:
             raise InvalidResourceError(uid,
             raise InvalidResourceError(uid,
                     'Resource \'{}\' is already a version.')
                     'Resource \'{}\' is already a version.')
@@ -535,9 +537,9 @@ class RsrcCentricLayout:
 
 
 
 
     def uri_to_uid(self, uri):
     def uri_to_uid(self, uri):
-        '''
+        """
         Convert an internal URI to a UID.
         Convert an internal URI to a UID.
-        '''
+        """
         return str(uri).replace(nsc['fcres'], '')
         return str(uri).replace(nsc['fcres'], '')
 
 
 
 
@@ -566,9 +568,9 @@ class RsrcCentricLayout:
     ## PROTECTED MEMBERS ##
     ## PROTECTED MEMBERS ##
 
 
     def _check_rsrc_status(self, rsrc):
     def _check_rsrc_status(self, rsrc):
-        '''
+        """
         Check if a resource is not existing or if it is a tombstone.
         Check if a resource is not existing or if it is a tombstone.
-        '''
+        """
         uid = self.uri_to_uid(rsrc.identifier)
         uid = self.uri_to_uid(rsrc.identifier)
         if not len(rsrc.graph):
         if not len(rsrc.graph):
             raise ResourceNotExistsError(uid)
             raise ResourceNotExistsError(uid)
@@ -585,9 +587,11 @@ class RsrcCentricLayout:
 
 
 
 
     def _parse_construct(self, qry, init_bindings={}):
     def _parse_construct(self, qry, init_bindings={}):
-        '''
-        Parse a CONSTRUCT query and return a Graph.
-        '''
+        """
+        Parse a CONSTRUCT query.
+
+        :rtype: rdflib.Graph
+        """
         try:
         try:
             qres = self.ds.query(qry, initBindings=init_bindings)
             qres = self.ds.query(qry, initBindings=init_bindings)
         except ResultException:
         except ResultException:
@@ -598,11 +602,12 @@ class RsrcCentricLayout:
 
 
 
 
     def _map_graph_uri(self, t, uid):
     def _map_graph_uri(self, t, uid):
-        '''
+        """
         Map a triple to a namespace prefix corresponding to a graph.
         Map a triple to a namespace prefix corresponding to a graph.
 
 
-        @return Tuple with a graph URI and an associated RDF type.
-        '''
+        :rtype: tuple
+        :return: 2-tuple with a graph URI and an associated RDF type.
+        """
         if t[1] in self.attr_routes['p'].keys():
         if t[1] in self.attr_routes['p'].keys():
             pfx = self.attr_routes['p'][t[1]]
             pfx = self.attr_routes['p'][t[1]]
         elif t[1] == RDF.type and t[2] in self.attr_routes['t'].keys():
         elif t[1] == RDF.type and t[2] in self.attr_routes['t'].keys():

+ 32 - 27
lakesuperior/toolbox.py

@@ -24,11 +24,11 @@ class Toolbox:
         '''
         '''
         Replace the domain of a term.
         Replace the domain of a term.
 
 
-        @param term (URIRef) The term (URI) to change.
-        @param search (string) Domain string to replace.
-        @param replace (string) Domain string to use for replacement.
+        :param rdflib.URIRef term: The term (URI) to change.
+        :param str search: Domain string to replace.
+        :param str replace: Domain string to use for replacement.
 
 
-        @return URIRef
+        :rtype: rdflib.URIRef
         '''
         '''
         s = str(term)
         s = str(term)
         if s.startswith(search):
         if s.startswith(search):
@@ -40,7 +40,7 @@ class Toolbox:
     def uid_to_uri(self, uid):
     def uid_to_uri(self, uid):
         '''Convert a UID to a URI.
         '''Convert a UID to a URI.
 
 
-        @return URIRef
+        :rtype: rdflib.URIRef
         '''
         '''
         return URIRef(g.webroot + uid)
         return URIRef(g.webroot + uid)
 
 
@@ -48,7 +48,7 @@ class Toolbox:
     def uri_to_uid(self, uri):
     def uri_to_uid(self, uri):
         '''Convert an absolute URI (internal or external) to a UID.
         '''Convert an absolute URI (internal or external) to a UID.
 
 
-        @return string
+        :rtype: str
         '''
         '''
         if uri.startswith(nsc['fcres']):
         if uri.startswith(nsc['fcres']):
             return str(uri).replace(nsc['fcres'], '')
             return str(uri).replace(nsc['fcres'], '')
@@ -59,9 +59,9 @@ class Toolbox:
     def localize_uri_string(self, s):
     def localize_uri_string(self, s):
         '''Convert URIs into URNs in a string using the application base URI.
         '''Convert URIs into URNs in a string using the application base URI.
 
 
-        @param string s Input string.
+        :param str: s Input string.
 
 
-        @return string
+        :rtype: str
         '''
         '''
         if s.strip('/') == g.webroot:
         if s.strip('/') == g.webroot:
             return str(ROOT_RSRC_URI)
             return str(ROOT_RSRC_URI)
@@ -74,9 +74,9 @@ class Toolbox:
         '''
         '''
         Localize an individual term.
         Localize an individual term.
 
 
-        @param rdflib.term.URIRef urn Input URI.
+        :param rdflib.URIRef: urn Input URI.
 
 
-        @return rdflib.term.URIRef
+        :rtype: rdflib.URIRef
         '''
         '''
         return URIRef(self.localize_uri_string(str(uri)))
         return URIRef(self.localize_uri_string(str(uri)))
 
 
@@ -85,9 +85,9 @@ class Toolbox:
         '''
         '''
         Localize terms in a triple.
         Localize terms in a triple.
 
 
-        @param trp (tuple(rdflib.term.URIRef)) The triple to be converted
+        :param tuple(rdflib.URIRef) trp: The triple to be converted
 
 
-        @return tuple(rdflib.term.URIRef)
+        :rtype: tuple(rdflib.URIRef)
         '''
         '''
         s, p, o = trp
         s, p, o = trp
         if s.startswith(g.webroot):
         if s.startswith(g.webroot):
@@ -114,9 +114,9 @@ class Toolbox:
         '''
         '''
         Localize an RDF stream with domain-specific URIs.
         Localize an RDF stream with domain-specific URIs.
 
 
-        @param data (bytes) Binary RDF data.
+        :param bytes data: Binary RDF data.
 
 
-        @return bytes
+        :rtype: bytes
         '''
         '''
         return data.replace(
         return data.replace(
             (g.webroot + '/').encode('utf-8'),
             (g.webroot + '/').encode('utf-8'),
@@ -159,9 +159,9 @@ class Toolbox:
     def globalize_string(self, s):
     def globalize_string(self, s):
         '''Convert URNs into URIs in a string using the application base URI.
         '''Convert URNs into URIs in a string using the application base URI.
 
 
-        @param string s Input string.
+        :param string s: Input string.
 
 
-        @return string
+        :rtype: string
         '''
         '''
         return s.replace(str(nsc['fcres']), g.webroot)
         return s.replace(str(nsc['fcres']), g.webroot)
 
 
@@ -170,9 +170,9 @@ class Toolbox:
         '''
         '''
         Convert an URN into an URI using the application base URI.
         Convert an URN into an URI using the application base URI.
 
 
-        @param rdflib.term.URIRef urn Input URN.
+        :param rdflib.URIRef urn: Input URN.
 
 
-        @return rdflib.term.URIRef
+        :rtype: rdflib.URIRef
         '''
         '''
         return URIRef(self.globalize_string(str(urn)))
         return URIRef(self.globalize_string(str(urn)))
 
 
@@ -181,9 +181,9 @@ class Toolbox:
         '''
         '''
         Globalize terms in a triple.
         Globalize terms in a triple.
 
 
-        @param trp (tuple(rdflib.term.URIRef)) The triple to be converted
+        :param tuple(rdflib.URIRef) trp: The triple to be converted
 
 
-        @return tuple(rdflib.term.URIRef)
+        :rtype: tuple(rdflib.URIRef)
         '''
         '''
         s, p, o = trp
         s, p, o = trp
         if s.startswith(nsc['fcres']):
         if s.startswith(nsc['fcres']):
@@ -221,13 +221,13 @@ class Toolbox:
 
 
     def parse_rfc7240(self, h_str):
     def parse_rfc7240(self, h_str):
         '''
         '''
-        Parse `Prefer` header as per https://tools.ietf.org/html/rfc7240
+        Parse ``Prefer`` header as per https://tools.ietf.org/html/rfc7240
 
 
-        The `cgi.parse_header` standard method does not work with all possible
-        use cases for this header.
+        The ``cgi.parse_header`` standard method does not work with all
+        possible use cases for this header.
 
 
-        @param h_str (string) The header(s) as a comma-separated list of Prefer
-        statements, excluding the `Prefer: ` token.
+        :param str h_str: The header(s) as a comma-separated list of Prefer
+            statements, excluding the ``Prefer:`` token.
         '''
         '''
         parsed_hdr = defaultdict(dict)
         parsed_hdr = defaultdict(dict)
 
 
@@ -267,9 +267,10 @@ class Toolbox:
 
 
         @TODO This can be later reworked to use a custom hashing algorithm.
         @TODO This can be later reworked to use a custom hashing algorithm.
 
 
-        @param rdflib.Graph gr The graph to be hashed.
+        :param rdflib.Graph: gr The graph to be hashed.
 
 
-        @return string SHA1 checksum.
+        :rtype: str
+        :return: SHA1 checksum.
         '''
         '''
         # Remove the messageDigest property, which very likely reflects the
         # Remove the messageDigest property, which very likely reflects the
         # previous state of the resource.
         # previous state of the resource.
@@ -283,6 +284,10 @@ class Toolbox:
     def split_uuid(self, uuid):
     def split_uuid(self, uuid):
         '''
         '''
         Split a UID into pairtree segments. This mimics FCREPO4 behavior.
         Split a UID into pairtree segments. This mimics FCREPO4 behavior.
+
+        :param str uuid: UUID to split.
+
+        :rtype: str
         '''
         '''
         path = '{}/{}/{}/{}/{}'.format(uuid[:2], uuid[2:4],
         path = '{}/{}/{}/{}/{}'.format(uuid[:2], uuid[2:4],
                 uuid[4:6], uuid[6:8], uuid)
                 uuid[4:6], uuid[6:8], uuid)