Browse Source

Docstring conversion #3.

Stefano Cossu 7 years ago
parent
commit
a813ac161f

+ 8 - 8
lakesuperior/endpoints/admin.py

@@ -13,21 +13,21 @@ admin = Blueprint('admin', __name__)
 
 
 @admin.route('/stats', methods=['GET'])
 @admin.route('/stats', methods=['GET'])
 def stats():
 def stats():
-    '''
+    """
     Get repository statistics.
     Get repository statistics.
-    '''
+    """
     def fsize_fmt(num, suffix='b'):
     def fsize_fmt(num, suffix='b'):
-        '''
+        """
         Format an integer into 1024-block file size format.
         Format an integer into 1024-block file size format.
 
 
         Adapted from Python 2 code on
         Adapted from Python 2 code on
         https://stackoverflow.com/a/1094933/3758232
         https://stackoverflow.com/a/1094933/3758232
 
 
-        @param num (int) Size value in bytes.
-        @param suffix (string) Suffix label (defaults to `B`).
+        :param int num: Size value in bytes.
+        :param string suffix: Suffix label (defaults to `B`).
 
 
         @return string Formatted size to largest fitting unit.
         @return string Formatted size to largest fitting unit.
-        '''
+        """
         for unit in ['','K','M','G','T','P','E','Z']:
         for unit in ['','K','M','G','T','P','E','Z']:
             if abs(num) < 1024.0:
             if abs(num) < 1024.0:
                 return "{:3.1f} {}{}".format(num, unit, suffix)
                 return "{:3.1f} {}{}".format(num, unit, suffix)
@@ -42,9 +42,9 @@ def stats():
 
 
 @admin.route('/tools', methods=['GET'])
 @admin.route('/tools', methods=['GET'])
 def admin_tools():
 def admin_tools():
-    '''
+    """
     Admin tools.
     Admin tools.
 
 
     @TODO stub.
     @TODO stub.
-    '''
+    """
     return render_template('admin_tools.html')
     return render_template('admin_tools.html')

+ 41 - 41
lakesuperior/endpoints/ldp.py

@@ -62,7 +62,7 @@ std_headers = {
     #'Allow' : ','.join(allow),
     #'Allow' : ','.join(allow),
 }
 }
 
 
-'''Predicates excluded by view.'''
+"""Predicates excluded by view."""
 vw_blacklist = {
 vw_blacklist = {
 }
 }
 
 
@@ -112,17 +112,17 @@ def log_request_end(rsp):
 @ldp.route('/<path:uid>/fcr:content', defaults={'out_fmt' : 'non_rdf'},
 @ldp.route('/<path:uid>/fcr:content', defaults={'out_fmt' : 'non_rdf'},
         methods=['GET'])
         methods=['GET'])
 def get_resource(uid, out_fmt=None):
 def get_resource(uid, out_fmt=None):
-    '''
+    """
     https://www.w3.org/TR/ldp/#ldpr-HTTP_GET
     https://www.w3.org/TR/ldp/#ldpr-HTTP_GET
 
 
     Retrieve RDF or binary content.
     Retrieve RDF or binary content.
 
 
-    @param uid (string) UID of resource to retrieve. The repository root has
+    :param str uid: UID of resource to retrieve. The repository root has
     an empty string for UID.
     an empty string for UID.
-    @param out_fmt (string) Force output to RDF or non-RDF if the resource is
+    :param str out_fmt: Force output to RDF or non-RDF if the resource is
     a LDP-NR. This is not available in the API but is used e.g. by the
     a LDP-NR. This is not available in the API but is used e.g. by the
     `*/fcr:metadata` and `*/fcr:content` endpoints. The default is False.
     `*/fcr:metadata` and `*/fcr:content` endpoints. The default is False.
-    '''
+    """
     logger.info('UID: {}'.format(uid))
     logger.info('UID: {}'.format(uid))
     out_headers = std_headers
     out_headers = std_headers
     repr_options = defaultdict(dict)
     repr_options = defaultdict(dict)
@@ -169,9 +169,9 @@ def get_resource(uid, out_fmt=None):
 
 
 @ldp.route('/<path:uid>/fcr:versions', methods=['GET'])
 @ldp.route('/<path:uid>/fcr:versions', methods=['GET'])
 def get_version_info(uid):
 def get_version_info(uid):
-    '''
+    """
     Get version info (`fcr:versions`).
     Get version info (`fcr:versions`).
-    '''
+    """
     try:
     try:
         gr = rsrc_api.get_version_info(uid)
         gr = rsrc_api.get_version_info(uid)
     except ResourceNotExistsError as e:
     except ResourceNotExistsError as e:
@@ -186,12 +186,12 @@ def get_version_info(uid):
 
 
 @ldp.route('/<path:uid>/fcr:versions/<ver_uid>', methods=['GET'])
 @ldp.route('/<path:uid>/fcr:versions/<ver_uid>', methods=['GET'])
 def get_version(uid, ver_uid):
 def get_version(uid, ver_uid):
-    '''
+    """
     Get an individual resource version.
     Get an individual resource version.
 
 
-    @param uid (string) Resource UID.
-    @param ver_uid (string) Version UID.
-    '''
+    :param str uid: Resource UID.
+    :param str ver_uid: Version UID.
+    """
     try:
     try:
         gr = rsrc_api.get_version(uid, ver_uid)
         gr = rsrc_api.get_version(uid, ver_uid)
     except ResourceNotExistsError as e:
     except ResourceNotExistsError as e:
@@ -208,11 +208,11 @@ def get_version(uid, ver_uid):
 @ldp.route('/', defaults={'parent_uid': '/'}, methods=['POST'],
 @ldp.route('/', defaults={'parent_uid': '/'}, methods=['POST'],
         strict_slashes=False)
         strict_slashes=False)
 def post_resource(parent_uid):
 def post_resource(parent_uid):
-    '''
+    """
     https://www.w3.org/TR/ldp/#ldpr-HTTP_POST
     https://www.w3.org/TR/ldp/#ldpr-HTTP_POST
 
 
     Add a new resource in a new URI.
     Add a new resource in a new URI.
-    '''
+    """
     out_headers = std_headers
     out_headers = std_headers
     try:
     try:
         slug = request.headers['Slug']
         slug = request.headers['Slug']
@@ -261,11 +261,11 @@ def post_resource(parent_uid):
 @ldp.route('/<path:uid>/fcr:metadata', defaults={'force_rdf' : True},
 @ldp.route('/<path:uid>/fcr:metadata', defaults={'force_rdf' : True},
         methods=['PUT'])
         methods=['PUT'])
 def put_resource(uid):
 def put_resource(uid):
-    '''
+    """
     https://www.w3.org/TR/ldp/#ldpr-HTTP_PUT
     https://www.w3.org/TR/ldp/#ldpr-HTTP_PUT
 
 
     Add or replace a new resource at a specified URI.
     Add or replace a new resource at a specified URI.
-    '''
+    """
     # Parse headers.
     # Parse headers.
     logger.debug('Request headers: {}'.format(request.headers))
     logger.debug('Request headers: {}'.format(request.headers))
 
 
@@ -310,11 +310,11 @@ def put_resource(uid):
 
 
 @ldp.route('/<path:uid>', methods=['PATCH'], strict_slashes=False)
 @ldp.route('/<path:uid>', methods=['PATCH'], strict_slashes=False)
 def patch_resource(uid, is_metadata=False):
 def patch_resource(uid, is_metadata=False):
-    '''
+    """
     https://www.w3.org/TR/ldp/#ldpr-HTTP_PATCH
     https://www.w3.org/TR/ldp/#ldpr-HTTP_PATCH
 
 
     Update an existing resource with a SPARQL-UPDATE payload.
     Update an existing resource with a SPARQL-UPDATE payload.
-    '''
+    """
     rsp_headers = {'Content-Type' : 'text/plain; charset=utf-8'}
     rsp_headers = {'Content-Type' : 'text/plain; charset=utf-8'}
     if request.mimetype != 'application/sparql-update':
     if request.mimetype != 'application/sparql-update':
         return 'Provided content type is not a valid parsable format: {}'\
         return 'Provided content type is not a valid parsable format: {}'\
@@ -344,7 +344,7 @@ def patch_resource_metadata(uid):
 
 
 @ldp.route('/<path:uid>', methods=['DELETE'])
 @ldp.route('/<path:uid>', methods=['DELETE'])
 def delete_resource(uid):
 def delete_resource(uid):
-    '''
+    """
     Delete a resource and optionally leave a tombstone.
     Delete a resource and optionally leave a tombstone.
 
 
     This behaves differently from FCREPO. A tombstone indicated that the
     This behaves differently from FCREPO. A tombstone indicated that the
@@ -355,7 +355,7 @@ def delete_resource(uid):
     In order to completely wipe out all traces of a resource, the tombstone
     In order to completely wipe out all traces of a resource, the tombstone
     must be deleted as well, or the `Prefer:no-tombstone` header can be used.
     must be deleted as well, or the `Prefer:no-tombstone` header can be used.
     The latter will forget (completely delete) the resource immediately.
     The latter will forget (completely delete) the resource immediately.
-    '''
+    """
     headers = std_headers
     headers = std_headers
 
 
     if 'prefer' in request.headers:
     if 'prefer' in request.headers:
@@ -377,12 +377,12 @@ def delete_resource(uid):
 @ldp.route('/<path:uid>/fcr:tombstone', methods=['GET', 'POST', 'PUT',
 @ldp.route('/<path:uid>/fcr:tombstone', methods=['GET', 'POST', 'PUT',
         'PATCH', 'DELETE'])
         'PATCH', 'DELETE'])
 def tombstone(uid):
 def tombstone(uid):
-    '''
+    """
     Handle all tombstone operations.
     Handle all tombstone operations.
 
 
     The only allowed methods are POST and DELETE; any other verb will return a
     The only allowed methods are POST and DELETE; any other verb will return a
     405.
     405.
-    '''
+    """
     try:
     try:
         rsrc = rsrc_api.get(uid)
         rsrc = rsrc_api.get(uid)
     except TombstoneError as e:
     except TombstoneError as e:
@@ -409,9 +409,9 @@ def tombstone(uid):
 
 
 @ldp.route('/<path:uid>/fcr:versions', methods=['POST', 'PUT'])
 @ldp.route('/<path:uid>/fcr:versions', methods=['POST', 'PUT'])
 def post_version(uid):
 def post_version(uid):
-    '''
+    """
     Create a new resource version.
     Create a new resource version.
-    '''
+    """
     if request.method == 'PUT':
     if request.method == 'PUT':
         return 'Method not allowed.', 405
         return 'Method not allowed.', 405
     ver_uid = request.headers.get('slug', None)
     ver_uid = request.headers.get('slug', None)
@@ -430,14 +430,14 @@ def post_version(uid):
 
 
 @ldp.route('/<path:uid>/fcr:versions/<ver_uid>', methods=['PATCH'])
 @ldp.route('/<path:uid>/fcr:versions/<ver_uid>', methods=['PATCH'])
 def patch_version(uid, ver_uid):
 def patch_version(uid, ver_uid):
-    '''
+    """
     Revert to a previous version.
     Revert to a previous version.
 
 
     NOTE: This creates a new version snapshot.
     NOTE: This creates a new version snapshot.
 
 
-    @param uid (string) Resource UID.
-    @param ver_uid (string) Version UID.
-    '''
+    :param str uid: Resource UID.
+    :param str ver_uid: Version UID.
+    """
     try:
     try:
         LdpFactory.from_stored(uid).revert_to_version(ver_uid)
         LdpFactory.from_stored(uid).revert_to_version(ver_uid)
     except ResourceNotExistsError as e:
     except ResourceNotExistsError as e:
@@ -453,9 +453,9 @@ def patch_version(uid, ver_uid):
 ## PRIVATE METHODS ##
 ## PRIVATE METHODS ##
 
 
 def _negotiate_content(gr, headers=None, **vw_kwargs):
 def _negotiate_content(gr, headers=None, **vw_kwargs):
-    '''
+    """
     Return HTML or serialized RDF depending on accept headers.
     Return HTML or serialized RDF depending on accept headers.
-    '''
+    """
     if request.accept_mimetypes.best == 'text/html':
     if request.accept_mimetypes.best == 'text/html':
         return render_template(
         return render_template(
                 'resource.html', gr=gr, nsc=nsc, nsm=nsm,
                 'resource.html', gr=gr, nsc=nsc, nsm=nsm,
@@ -467,9 +467,9 @@ def _negotiate_content(gr, headers=None, **vw_kwargs):
 
 
 
 
 def _bistream_from_req():
 def _bistream_from_req():
-    '''
+    """
     Find how a binary file and its MIMEtype were uploaded in the request.
     Find how a binary file and its MIMEtype were uploaded in the request.
-    '''
+    """
     #logger.debug('Content type: {}'.format(request.mimetype))
     #logger.debug('Content type: {}'.format(request.mimetype))
     #logger.debug('files: {}'.format(request.files))
     #logger.debug('files: {}'.format(request.files))
     #logger.debug('stream: {}'.format(request.stream))
     #logger.debug('stream: {}'.format(request.stream))
@@ -508,9 +508,9 @@ def _tombstone_response(e, uid):
 
 
 
 
 def set_post_put_params():
 def set_post_put_params():
-    '''
+    """
     Sets handling and content disposition for POST and PUT by parsing headers.
     Sets handling and content disposition for POST and PUT by parsing headers.
-    '''
+    """
     handling = 'strict'
     handling = 'strict'
     if 'prefer' in request.headers:
     if 'prefer' in request.headers:
         prefer = g.tbox.parse_rfc7240(request.headers['prefer'])
         prefer = g.tbox.parse_rfc7240(request.headers['prefer'])
@@ -528,10 +528,10 @@ def set_post_put_params():
 
 
 
 
 def is_accept_hdr_rdf_parsable():
 def is_accept_hdr_rdf_parsable():
-    '''
+    """
     Check if any of the 'Accept' header values provided is a RDF parsable
     Check if any of the 'Accept' header values provided is a RDF parsable
     format.
     format.
-    '''
+    """
     for mimetype in request.accept_mimetypes.values():
     for mimetype in request.accept_mimetypes.values():
         if LdpFactory.is_rdf_parsable(mimetype):
         if LdpFactory.is_rdf_parsable(mimetype):
             return True
             return True
@@ -539,14 +539,14 @@ def is_accept_hdr_rdf_parsable():
 
 
 
 
 def parse_repr_options(retr_opts):
 def parse_repr_options(retr_opts):
-    '''
+    """
     Set options to retrieve IMR.
     Set options to retrieve IMR.
 
 
     Ideally, IMR retrieval is done once per request, so all the options
     Ideally, IMR retrieval is done once per request, so all the options
     are set once in the `imr()` property.
     are set once in the `imr()` property.
 
 
-    @param retr_opts (dict): Options parsed from `Prefer` header.
-    '''
+    :param dict retr_opts:: Options parsed from `Prefer` header.
+    """
     logger.debug('Parsing retrieval options: {}'.format(retr_opts))
     logger.debug('Parsing retrieval options: {}'.format(retr_opts))
     imr_options = {}
     imr_options = {}
 
 
@@ -591,12 +591,12 @@ def parse_repr_options(retr_opts):
 
 
 
 
 def _headers_from_metadata(rsrc):
 def _headers_from_metadata(rsrc):
-    '''
+    """
     Create a dict of headers from a metadata graph.
     Create a dict of headers from a metadata graph.
 
 
-    @param rsrc (lakesuperior.model.ldpr.Ldpr) Resource to extract metadata
+    :param lakesuperior.model.ldpr.Ldpr rsrc: Resource to extract metadata
     from.
     from.
-    '''
+    """
     out_headers = defaultdict(list)
     out_headers = defaultdict(list)
 
 
     digest = rsrc.metadata.value(nsc['premis'].hasMessageDigest)
     digest = rsrc.metadata.value(nsc['premis'].hasMessageDigest)

+ 2 - 6
lakesuperior/endpoints/main.py

@@ -13,17 +13,13 @@ main = Blueprint('main', __name__, template_folder='templates',
 
 
 @main.route('/', methods=['GET'])
 @main.route('/', methods=['GET'])
 def index():
 def index():
-    '''
-    Homepage.
-    '''
+    """Homepage."""
     return render_template('index.html')
     return render_template('index.html')
 
 
 
 
 @main.route('/debug', methods=['GET'])
 @main.route('/debug', methods=['GET'])
 def debug():
 def debug():
-    '''
-    Debug page.
-    '''
+    """Debug page."""
     raise RuntimeError()
     raise RuntimeError()
 
 
 
 

+ 5 - 5
lakesuperior/endpoints/query.py

@@ -20,9 +20,9 @@ query = Blueprint('query', __name__)
 
 
 @query.route('/term_search', methods=['GET'])
 @query.route('/term_search', methods=['GET'])
 def term_search():
 def term_search():
-    '''
+    """
     Search by entering a search term and optional property and comparison term.
     Search by entering a search term and optional property and comparison term.
-    '''
+    """
     valid_operands = (
     valid_operands = (
         ('=', 'Equals'),
         ('=', 'Equals'),
         ('>', 'Greater Than'),
         ('>', 'Greater Than'),
@@ -40,11 +40,11 @@ def term_search():
 
 
 @query.route('/sparql', methods=['GET', 'POST'])
 @query.route('/sparql', methods=['GET', 'POST'])
 def sparql():
 def sparql():
-    '''
+    """
     Perform a direct SPARQL query on the underlying triplestore.
     Perform a direct SPARQL query on the underlying triplestore.
 
 
-    @param qry SPARQL query string.
-    '''
+    :param str qry: SPARQL query string.
+    """
     accept_mimetypes = {
     accept_mimetypes = {
         'text/csv': 'csv',
         'text/csv': 'csv',
         'application/sparql-results+json': 'json',
         'application/sparql-results+json': 'json',

+ 14 - 18
lakesuperior/messaging/formatters.py

@@ -8,12 +8,12 @@ from lakesuperior.globals import RES_CREATED, RES_DELETED, RES_UPDATED
 
 
 
 
 class BaseASFormatter(metaclass=ABCMeta):
 class BaseASFormatter(metaclass=ABCMeta):
-    '''
+    """
     Format message as ActivityStreams.
     Format message as ActivityStreams.
 
 
     This is not really a `logging.Formatter` subclass, but a plain string
     This is not really a `logging.Formatter` subclass, but a plain string
     builder.
     builder.
-    '''
+    """
     ev_types = {
     ev_types = {
         RES_CREATED : 'Create',
         RES_CREATED : 'Create',
         RES_DELETED : 'Delete',
         RES_DELETED : 'Delete',
@@ -28,7 +28,7 @@ class BaseASFormatter(metaclass=ABCMeta):
 
 
     def __init__(
     def __init__(
             self, rsrc_uri, ev_type, timestamp, rsrc_type, actor, data=None):
             self, rsrc_uri, ev_type, timestamp, rsrc_type, actor, data=None):
-        '''
+        """
         Format output according to granularity level.
         Format output according to granularity level.
 
 
         NOTE: Granularity level does not refer to the logging levels, i.e.
         NOTE: Granularity level does not refer to the logging levels, i.e.
@@ -36,14 +36,14 @@ class BaseASFormatter(metaclass=ABCMeta):
         are logged under the same level. This it is rather about *what* gets
         are logged under the same level. This it is rather about *what* gets
         logged in a message.
         logged in a message.
 
 
-        @param rsrc_uri (rdflib.URIRef) URI of the resource.
-        @param ev_type (string) one of `create`, `delete` or `update`
-        @param timestamp (string) Timestamp of the event.
-        @param data (tuple(set)) if messaging is configured with `provenance`
+        :param rdflib.URIRef rsrc_uri: URI of the resource.
+        :param str ev_type: one of `create`, `delete` or `update`
+        :param str timestamp: Timestamp of the event.
+        :param  data: (tuple(set)) if messaging is configured with `provenance`
         level, this is a 2-tuple with one set (as 3-tuples of
         level, this is a 2-tuple with one set (as 3-tuples of
         RDFlib.Identifier instances) for removed triples, and one set for
         RDFlib.Identifier instances) for removed triples, and one set for
         added triples.
         added triples.
-        '''
+        """
         self.rsrc_uri = rsrc_uri
         self.rsrc_uri = rsrc_uri
         self.ev_type = ev_type
         self.ev_type = ev_type
         self.timestamp = timestamp
         self.timestamp = timestamp
@@ -59,15 +59,13 @@ class BaseASFormatter(metaclass=ABCMeta):
 
 
 
 
 class ASResourceFormatter(BaseASFormatter):
 class ASResourceFormatter(BaseASFormatter):
-    '''
+    """
     Sends information about a resource being created, updated or deleted, by
     Sends information about a resource being created, updated or deleted, by
     who and when, with no further information about what changed.
     who and when, with no further information about what changed.
-    '''
+    """
 
 
     def __str__(self):
     def __str__(self):
-        '''
-        Output structured data as string.
-        '''
+        """Output structured data as string."""
         ret = {
         ret = {
             '@context': 'https://www.w3.org/ns/activitystreams',
             '@context': 'https://www.w3.org/ns/activitystreams',
             'id' : 'urn:uuid:{}'.format(uuid.uuid4()),
             'id' : 'urn:uuid:{}'.format(uuid.uuid4()),
@@ -86,15 +84,13 @@ class ASResourceFormatter(BaseASFormatter):
 
 
 
 
 class ASDeltaFormatter(BaseASFormatter):
 class ASDeltaFormatter(BaseASFormatter):
-    '''
+    """
     Sends the same information as `ASResourceFormatter` with the addition of
     Sends the same information as `ASResourceFormatter` with the addition of
     the triples that were added and the ones that were removed in the request.
     the triples that were added and the ones that were removed in the request.
     This may be used to send rich provenance data to a preservation system.
     This may be used to send rich provenance data to a preservation system.
-    '''
+    """
     def __str__(self):
     def __str__(self):
-        '''
-        Output structured data as string.
-        '''
+        """Output structured data as string."""
         ret = {
         ret = {
             '@context': 'https://www.w3.org/ns/activitystreams',
             '@context': 'https://www.w3.org/ns/activitystreams',
             'id' : 'urn:uuid:{}'.format(uuid.uuid4()),
             'id' : 'urn:uuid:{}'.format(uuid.uuid4()),

+ 4 - 8
lakesuperior/messaging/handlers.py

@@ -4,13 +4,13 @@ import stomp
 
 
 
 
 class StompHandler(logging.Handler):
 class StompHandler(logging.Handler):
-    '''
+    """
     Send messages to a remote queue broker using the STOMP protocol.
     Send messages to a remote queue broker using the STOMP protocol.
 
 
     This module is named and configured separately from
     This module is named and configured separately from
     standard logging for clarity about its scope: while logging has an
     standard logging for clarity about its scope: while logging has an
     informational purpose, this module has a functional one.
     informational purpose, this module has a functional one.
-    '''
+    """
     def __init__(self, conf):
     def __init__(self, conf):
         self.conf = conf
         self.conf = conf
         if self.conf['protocol'] == '11':
         if self.conf['protocol'] == '11':
@@ -32,15 +32,11 @@ class StompHandler(logging.Handler):
 
 
 
 
     def __del_(self):
     def __del_(self):
-        '''
-        Disconnect the client.
-        '''
+        """Disconnect the client."""
         self.conn.disconnect()
         self.conn.disconnect()
 
 
     def emit(self, record):
     def emit(self, record):
-        '''
-        Send the message to the destination endpoint.
-        '''
+        """Send the message to the destination endpoint."""
         self.conn.send(destination=self.conf['destination'],
         self.conn.send(destination=self.conf['destination'],
                 body=bytes(self.format(record), 'utf-8'))
                 body=bytes(self.format(record), 'utf-8'))
 
 

+ 6 - 8
lakesuperior/messaging/messenger.py

@@ -7,15 +7,15 @@ messenger = logging.getLogger('_messenger')
 
 
 
 
 class Messenger:
 class Messenger:
-    '''
+    """
     Very simple message sender using the standard Python logging facility.
     Very simple message sender using the standard Python logging facility.
-    '''
+    """
     def __init__(self, config):
     def __init__(self, config):
-        '''
+        """
         Set up the messenger.
         Set up the messenger.
 
 
-        @param config (dict) Messenger configuration.
-        '''
+        :param dict config: Messenger configuration.
+        """
         def msg_routes():
         def msg_routes():
             for route in config['routes']:
             for route in config['routes']:
                 handler_cls = getattr(handlers, route['handler'])
                 handler_cls = getattr(handlers, route['handler'])
@@ -31,8 +31,6 @@ class Messenger:
 
 
 
 
     def send(self, *args, **kwargs):
     def send(self, *args, **kwargs):
-        '''
-        Send one or more external messages.
-        '''
+        """Send one or more external messages."""
         for msg, fn in self.msg_routes:
         for msg, fn in self.msg_routes:
             msg.info(fn(*args, **kwargs))
             msg.info(fn(*args, **kwargs))

+ 20 - 20
lakesuperior/model/ldp_factory.py

@@ -26,10 +26,10 @@ logger = logging.getLogger(__name__)
 
 
 
 
 class LdpFactory:
 class LdpFactory:
-    '''
+    """
     Generate LDP instances.
     Generate LDP instances.
     The instance classes are based on provided client data or on stored data.
     The instance classes are based on provided client data or on stored data.
-    '''
+    """
     @staticmethod
     @staticmethod
     def new_container(uid):
     def new_container(uid):
         if not uid.startswith('/') or uid == '/':
         if not uid.startswith('/') or uid == '/':
@@ -43,7 +43,7 @@ class LdpFactory:
 
 
     @staticmethod
     @staticmethod
     def from_stored(uid, repr_opts={}, **kwargs):
     def from_stored(uid, repr_opts={}, **kwargs):
-        '''
+        """
         Create an instance for retrieval purposes.
         Create an instance for retrieval purposes.
 
 
         This factory method creates and returns an instance of an LDPR subclass
         This factory method creates and returns an instance of an LDPR subclass
@@ -52,8 +52,8 @@ class LdpFactory:
 
 
         N.B. The resource must exist.
         N.B. The resource must exist.
 
 
-        @param uid UID of the instance.
-        '''
+        :param  uid: UID of the instance.
+        """
         #logger.info('Retrieving stored resource: {}'.format(uid))
         #logger.info('Retrieving stored resource: {}'.format(uid))
         imr_urn = nsc['fcres'][uid]
         imr_urn = nsc['fcres'][uid]
 
 
@@ -80,16 +80,16 @@ class LdpFactory:
     @staticmethod
     @staticmethod
     def from_provided(
     def from_provided(
             uid, mimetype=None, stream=None, provided_imr=None, **kwargs):
             uid, mimetype=None, stream=None, provided_imr=None, **kwargs):
-        '''
+        """
         Determine LDP type from request content.
         Determine LDP type from request content.
 
 
-        @param uid (string) UID of the resource to be created or updated.
-        @param mimetype (string) The provided content MIME type.
-        @param stream (IOStream | None) The provided data stream. This can be
+        :param str uid: UID of the resource to be created or updated.
+        :param str mimetype: The provided content MIME type.
+        :param IOStream | None stream: The provided data stream. This can be
         RDF or non-RDF content, or None. In the latter case, an empty container
         RDF or non-RDF content, or None. In the latter case, an empty container
         is created.
         is created.
         @param **kwargs Arguments passed to the LDP class constructor.
         @param **kwargs Arguments passed to the LDP class constructor.
-        '''
+        """
         uri = nsc['fcres'][uid]
         uri = nsc['fcres'][uid]
 
 
         if not stream and not mimetype:
         if not stream and not mimetype:
@@ -149,11 +149,11 @@ class LdpFactory:
 
 
     @staticmethod
     @staticmethod
     def is_rdf_parsable(mimetype):
     def is_rdf_parsable(mimetype):
-        '''
+        """
         Checks whether a MIME type support RDF parsing by a RDFLib plugin.
         Checks whether a MIME type support RDF parsing by a RDFLib plugin.
 
 
-        @param mimetype (string) MIME type to check.
-        '''
+        :param str mimetype: MIME type to check.
+        """
         try:
         try:
             plugin.get(mimetype, parser.Parser)
             plugin.get(mimetype, parser.Parser)
         except plugin.PluginException:
         except plugin.PluginException:
@@ -164,11 +164,11 @@ class LdpFactory:
 
 
     @staticmethod
     @staticmethod
     def is_rdf_serializable(mimetype):
     def is_rdf_serializable(mimetype):
-        '''
+        """
         Checks whether a MIME type support RDF serialization by a RDFLib plugin
         Checks whether a MIME type support RDF serialization by a RDFLib plugin
 
 
-        @param mimetype (string) MIME type to check.
-        '''
+        :param str mimetype: MIME type to check.
+        """
         try:
         try:
             plugin.get(mimetype, serializer.Serializer)
             plugin.get(mimetype, serializer.Serializer)
         except plugin.PluginException:
         except plugin.PluginException:
@@ -179,7 +179,7 @@ class LdpFactory:
 
 
     @staticmethod
     @staticmethod
     def mint_uid(parent_uid, path=None):
     def mint_uid(parent_uid, path=None):
-        '''
+        """
         Mint a new resource UID based on client directives.
         Mint a new resource UID based on client directives.
 
 
         This method takes a parent ID and a tentative path and returns an LDP
         This method takes a parent ID and a tentative path and returns an LDP
@@ -188,13 +188,13 @@ class LdpFactory:
         This may raise an exception resulting in a 404 if the parent is not
         This may raise an exception resulting in a 404 if the parent is not
         found or a 409 if the parent is not a valid container.
         found or a 409 if the parent is not a valid container.
 
 
-        @param parent_uid (string) UID of the parent resource. It must be an
+        :param str parent_uid: UID of the parent resource. It must be an
         existing LDPC.
         existing LDPC.
-        @param path (string) path to the resource, relative to the parent.
+        :param str path: path to the resource, relative to the parent.
 
 
         @return string The confirmed resource UID. This may be different from
         @return string The confirmed resource UID. This may be different from
         what has been indicated.
         what has been indicated.
-        '''
+        """
         def split_if_legacy(uid):
         def split_if_legacy(uid):
             if config['application']['store']['ldp_rs']['legacy_ptree_split']:
             if config['application']['store']['ldp_rs']['legacy_ptree_split']:
                 uid = tbox.split_uuid(uid)
                 uid = tbox.split_uuid(uid)

+ 14 - 13
lakesuperior/model/ldp_nr.py

@@ -17,10 +17,10 @@ logger = logging.getLogger(__name__)
 
 
 
 
 class LdpNr(Ldpr):
 class LdpNr(Ldpr):
-    '''LDP-NR (Non-RDF Source).
+    """LDP-NR (Non-RDF Source).
 
 
     Definition: https://www.w3.org/TR/ldp/#ldpnr
     Definition: https://www.w3.org/TR/ldp/#ldpnr
-    '''
+    """
 
 
     base_types = {
     base_types = {
         nsc['fcrepo'].Binary,
         nsc['fcrepo'].Binary,
@@ -31,9 +31,9 @@ class LdpNr(Ldpr):
 
 
     def __init__(self, uuid, stream=None, mimetype=None,
     def __init__(self, uuid, stream=None, mimetype=None,
             disposition=None, **kwargs):
             disposition=None, **kwargs):
-        '''
+        """
         Extends Ldpr.__init__ by adding LDP-NR specific parameters.
         Extends Ldpr.__init__ by adding LDP-NR specific parameters.
-        '''
+        """
         super().__init__(uuid, **kwargs)
         super().__init__(uuid, **kwargs)
 
 
         self._imr_options = {}
         self._imr_options = {}
@@ -68,11 +68,12 @@ class LdpNr(Ldpr):
 
 
 
 
     def create_or_replace(self, create_only=False):
     def create_or_replace(self, create_only=False):
-        '''
+        """
         Create a new binary resource with a corresponding RDF representation.
         Create a new binary resource with a corresponding RDF representation.
 
 
-        @param file (Stream) A Stream resource representing the uploaded file.
-        '''
+        :param bool create_only: Whether the resource is being created or
+        updated.
+        """
         # Persist the stream.
         # Persist the stream.
         self.digest, self.size = nonrdfly.persist(self.stream)
         self.digest, self.size = nonrdfly.persist(self.stream)
 
 
@@ -91,14 +92,14 @@ class LdpNr(Ldpr):
     ## PROTECTED METHODS ##
     ## PROTECTED METHODS ##
 
 
     def _add_srv_mgd_triples(self, create=False):
     def _add_srv_mgd_triples(self, create=False):
-        '''
+        """
         Add all metadata for the RDF representation of the LDP-NR.
         Add all metadata for the RDF representation of the LDP-NR.
 
 
-        @param stream (BufferedIO) The uploaded data stream.
-        @param mimetype (string) MIME type of the uploaded file.
-        @param disposition (defaultdict) The `Content-Disposition` header
-        content, parsed through `parse_rfc7240`.
-        '''
+        :param BufferedIO stream: The uploaded data stream.
+        :param str mimetype: MIME type of the uploaded file.
+        :param defaultdict disposition: The ``Content-Disposition`` header
+        content, parsed through ``parse_rfc7240``.
+        """
         super()._add_srv_mgd_triples(create)
         super()._add_srv_mgd_triples(create)
 
 
         # File size.
         # File size.

+ 15 - 17
lakesuperior/model/ldp_rs.py

@@ -12,20 +12,21 @@ logger = logging.getLogger(__name__)
 
 
 
 
 class LdpRs(Ldpr):
 class LdpRs(Ldpr):
-    '''
+    """
     LDP-RS (LDP RDF source).
     LDP-RS (LDP RDF source).
 
 
     https://www.w3.org/TR/ldp/#ldprs
     https://www.w3.org/TR/ldp/#ldprs
-    '''
+    """
     def __init__(self, uuid, repr_opts={}, handling='lenient', **kwargs):
     def __init__(self, uuid, repr_opts={}, handling='lenient', **kwargs):
-        '''
-        Extends Ldpr.__init__ by adding LDP-RS specific parameters.
-
-        @param handling (string) One of `strict`, `lenient` (the default) or
-        `none`. `strict` raises an error if a server-managed term is in the
-        graph. `lenient` removes all sever-managed triples encountered. `none`
-        skips all server-managed checks. It is used for internal modifications.
-        '''
+        """
+        Extends :meth:`Ldpr.__init__`by adding LDP-RS specific parameters.
+
+        :param str handling: One of ``strict``, ``lenient`` (the default) or
+        ``none``. ``strict`` raises an error if a server-managed term is in the
+        graph. ``lenient`` removes all sever-managed triples encountered.
+        ``none`` skips all server-managed checks. It is used for internal
+        modifications.
+        """
         super().__init__(uuid, **kwargs)
         super().__init__(uuid, **kwargs)
         self.base_types = super().base_types | {
         self.base_types = super().base_types | {
             nsc['fcrepo'].Container,
             nsc['fcrepo'].Container,
@@ -44,8 +45,7 @@ class LdpRs(Ldpr):
 
 
 
 
 class Ldpc(LdpRs):
 class Ldpc(LdpRs):
-    '''LDPC (LDP Container).'''
-
+    """LDPC (LDP Container)."""
     def __init__(self, uuid, *args, **kwargs):
     def __init__(self, uuid, *args, **kwargs):
         super().__init__(uuid, *args, **kwargs)
         super().__init__(uuid, *args, **kwargs)
         self.base_types |= {
         self.base_types |= {
@@ -56,7 +56,7 @@ class Ldpc(LdpRs):
 
 
 
 
 class LdpBc(Ldpc):
 class LdpBc(Ldpc):
-    '''LDP-BC (LDP Basic Container).'''
+    """LDP-BC (LDP Basic Container)."""
     def __init__(self, uuid, *args, **kwargs):
     def __init__(self, uuid, *args, **kwargs):
         super().__init__(uuid, *args, **kwargs)
         super().__init__(uuid, *args, **kwargs)
         self.base_types |= {
         self.base_types |= {
@@ -66,8 +66,7 @@ class LdpBc(Ldpc):
 
 
 
 
 class LdpDc(Ldpc):
 class LdpDc(Ldpc):
-    '''LDP-DC (LDP Direct Container).'''
-
+    """LDP-DC (LDP Direct Container)."""
     def __init__(self, uuid, *args, **kwargs):
     def __init__(self, uuid, *args, **kwargs):
         super().__init__(uuid, *args, **kwargs)
         super().__init__(uuid, *args, **kwargs)
         self.base_types |= {
         self.base_types |= {
@@ -77,8 +76,7 @@ class LdpDc(Ldpc):
 
 
 
 
 class LdpIc(Ldpc):
 class LdpIc(Ldpc):
-    '''LDP-IC (LDP Indirect Container).'''
-
+    """LDP-IC (LDP Indirect Container)."""
     def __init__(self, uuid, *args, **kwargs):
     def __init__(self, uuid, *args, **kwargs):
         super().__init__(uuid, *args, **kwargs)
         super().__init__(uuid, *args, **kwargs)
         self.base_types |= {
         self.base_types |= {

+ 15 - 14
lakesuperior/model/ldpr.py

@@ -106,8 +106,8 @@ class Ldpr(metaclass=ABCMeta):
     ## MAGIC METHODS ##
     ## MAGIC METHODS ##
 
 
     def __init__(self, uid, repr_opts={}, provided_imr=None, **kwargs):
     def __init__(self, uid, repr_opts={}, provided_imr=None, **kwargs):
-        """Instantiate an in-memory LDP resource that can be loaded from and
-        persisted to storage.
+        """
+        Instantiate an in-memory LDP resource.
 
 
         :param str uid: uid of the resource. If None (must be explicitly
         :param str uid: uid of the resource. If None (must be explicitly
         set) it refers to the root node. It can also be the full URI or URN,
         set) it refers to the root node. It can also be the full URI or URN,
@@ -136,7 +136,7 @@ class Ldpr(metaclass=ABCMeta):
         The RDFLib resource representing this LDPR. This is a live
         The RDFLib resource representing this LDPR. This is a live
         representation of the stored data if present.
         representation of the stored data if present.
 
 
-        @return rdflib.resource.Resource
+        :rtype: rdflib.resource.Resource
         """
         """
         if not hasattr(self, '_rsrc'):
         if not hasattr(self, '_rsrc'):
             self._rsrc = rdfly.ds.resource(self.uri)
             self._rsrc = rdfly.ds.resource(self.uri)
@@ -285,7 +285,7 @@ class Ldpr(metaclass=ABCMeta):
     def types(self):
     def types(self):
         """All RDF types.
         """All RDF types.
 
 
-        @return set(rdflib.term.URIRef)
+        :rtype: set(rdflib.term.URIRef)
         """
         """
         if not hasattr(self, '_types'):
         if not hasattr(self, '_types'):
             if len(self.metadata.graph):
             if len(self.metadata.graph):
@@ -305,7 +305,7 @@ class Ldpr(metaclass=ABCMeta):
     def ldp_types(self):
     def ldp_types(self):
         """The LDP types.
         """The LDP types.
 
 
-        @return set(rdflib.term.URIRef)
+        :rtype: set(rdflib.term.URIRef)
         """
         """
         if not hasattr(self, '_ldp_types'):
         if not hasattr(self, '_ldp_types'):
             self._ldp_types = {t for t in self.types if nsc['ldp'] in t}
             self._ldp_types = {t for t in self.types if nsc['ldp'] in t}
@@ -486,14 +486,14 @@ class Ldpr(metaclass=ABCMeta):
         """
         """
         tstone_trp = set(rdfly.extract_imr(self.uid, strict=False).graph)
         tstone_trp = set(rdfly.extract_imr(self.uid, strict=False).graph)
 
 
-        ver_rsp = self.version_info.graph.query("""
+        ver_rsp = self.version_info.graph.query('''
         SELECT ?uid {
         SELECT ?uid {
           ?latest fcrepo:hasVersionLabel ?uid ;
           ?latest fcrepo:hasVersionLabel ?uid ;
             fcrepo:created ?ts .
             fcrepo:created ?ts .
         }
         }
         ORDER BY DESC(?ts)
         ORDER BY DESC(?ts)
         LIMIT 1
         LIMIT 1
-        """)
+        ''')
         ver_uid = str(ver_rsp.bindings[0]['uid'])
         ver_uid = str(ver_rsp.bindings[0]['uid'])
         ver_trp = set(rdfly.get_metadata(self.uid, ver_uid).graph)
         ver_trp = set(rdfly.get_metadata(self.uid, ver_uid).graph)
 
 
@@ -686,7 +686,7 @@ class Ldpr(metaclass=ABCMeta):
         """
         """
         Add server-managed triples to a provided IMR.
         Add server-managed triples to a provided IMR.
 
 
-        :param  create: Whether the resource is being created.
+        :param create: Whether the resource is being created.
         """
         """
         # Base LDP types.
         # Base LDP types.
         for t in self.base_types:
         for t in self.base_types:
@@ -725,11 +725,11 @@ class Ldpr(metaclass=ABCMeta):
         is found.
         is found.
 
 
         E.g. if only fcres:/a exists:
         E.g. if only fcres:/a exists:
-        - If fcres:/a/b/c/d is being created, a becomes container of
-          fcres:/a/b/c/d. Also, containers are created for fcres:a/b and
-          fcres:/a/b/c.
-        - If fcres:/e is being created, the root node becomes container of
-          fcres:/e.
+        - If ``fcres:/a/b/c/d`` is being created, a becomes container of
+          ``fcres:/a/b/c/d``. Also, containers are created for fcres:a/b and
+          ``fcres:/a/b/c``.
+        - If ``fcres:/e`` is being created, the root node becomes container of
+          ``fcres:/e``.
 
 
         :param bool create: Whether the resource is being created. If false,
         :param bool create: Whether the resource is being created. If false,
         the parent container is not updated.
         the parent container is not updated.
@@ -775,7 +775,8 @@ class Ldpr(metaclass=ABCMeta):
         Remove duplicate triples from add and remove delta graphs, which would
         Remove duplicate triples from add and remove delta graphs, which would
         otherwise contain unnecessary statements that annul each other.
         otherwise contain unnecessary statements that annul each other.
 
 
-        @return tuple 2 "clean" sets of respectively remove statements and
+        :rtype: tuple
+        :return: 2 "clean" sets of respectively remove statements and
         add statements.
         add statements.
         """
         """
         return (
         return (

+ 7 - 13
lakesuperior/store/ldp_nr/base_non_rdf_layout.py

@@ -7,18 +7,18 @@ logger = logging.getLogger(__name__)
 
 
 
 
 class BaseNonRdfLayout(metaclass=ABCMeta):
 class BaseNonRdfLayout(metaclass=ABCMeta):
-    '''
+    """
     Abstract class for setting the non-RDF (bitstream) store layout.
     Abstract class for setting the non-RDF (bitstream) store layout.
 
 
     Differerent layouts can be created by implementing all the abstract methods
     Differerent layouts can be created by implementing all the abstract methods
     of this class. A non-RDF layout is not necessarily restricted to a
     of this class. A non-RDF layout is not necessarily restricted to a
     traditional filesystem—e.g. a layout persisting to HDFS can be written too.
     traditional filesystem—e.g. a layout persisting to HDFS can be written too.
-    '''
+    """
 
 
     def __init__(self, config):
     def __init__(self, config):
-        '''
+        """
         Initialize the base non-RDF store layout.
         Initialize the base non-RDF store layout.
-        '''
+        """
         self.config = config
         self.config = config
         self.root = config['path']
         self.root = config['path']
 
 
@@ -27,23 +27,17 @@ class BaseNonRdfLayout(metaclass=ABCMeta):
 
 
     @abstractmethod
     @abstractmethod
     def persist(self, stream):
     def persist(self, stream):
-        '''
-        Store the stream in the designated persistence layer for this layout.
-        '''
+        """Store the stream in the designated persistence layer."""
         pass
         pass
 
 
 
 
     @abstractmethod
     @abstractmethod
     def delete(self, id):
     def delete(self, id):
-        '''
-        Delete a stream by its identifier (i.e. checksum).
-        '''
+        """Delete a stream by its identifier (i.e. checksum)."""
         pass
         pass
 
 
 
 
     @abstractmethod
     @abstractmethod
     def local_path(self, uuid):
     def local_path(self, uuid):
-        '''
-        Return the local path of a file.
-        '''
+        """Return the local path of a file."""
         pass
         pass

+ 15 - 18
lakesuperior/store/ldp_nr/default_layout.py

@@ -12,18 +12,21 @@ logger = logging.getLogger(__name__)
 
 
 
 
 class DefaultLayout(BaseNonRdfLayout):
 class DefaultLayout(BaseNonRdfLayout):
-    '''
+    """
     Default file layout.
     Default file layout.
-    '''
+
+    This is a simple filesystem layout that stores binaries in pairtree folders
+    in a local filesystem. Parameters can be specified for the 
+    """
     @staticmethod
     @staticmethod
     def local_path(root, uuid, bl=4, bc=4):
     def local_path(root, uuid, bl=4, bc=4):
-        '''
+        """
         Generate the resource path splitting the resource checksum according to
         Generate the resource path splitting the resource checksum according to
         configuration parameters.
         configuration parameters.
 
 
-        @param uuid (string) The resource UUID. This corresponds to the content
+        :param str uuid: The resource UUID. This corresponds to the content
         checksum.
         checksum.
-        '''
+        """
         logger.debug('Generating path from uuid: {}'.format(uuid))
         logger.debug('Generating path from uuid: {}'.format(uuid))
         term = len(uuid) if bc == 0 else min(bc * bl, len(uuid))
         term = len(uuid) if bc == 0 else min(bc * bl, len(uuid))
 
 
@@ -37,9 +40,7 @@ class DefaultLayout(BaseNonRdfLayout):
 
 
 
 
     def __init__(self, *args, **kwargs):
     def __init__(self, *args, **kwargs):
-        '''
-        Set up path segmentation parameters.
-        '''
+        """Set up path segmentation parameters."""
         super().__init__(*args, **kwargs)
         super().__init__(*args, **kwargs)
 
 
         self.bl = self.config['pairtree_branch_length']
         self.bl = self.config['pairtree_branch_length']
@@ -49,9 +50,7 @@ class DefaultLayout(BaseNonRdfLayout):
     ## INTERFACE METHODS ##
     ## INTERFACE METHODS ##
 
 
     def bootstrap(self):
     def bootstrap(self):
-        '''
-        Initialize binary file store.
-        '''
+        """Initialize binary file store."""
         try:
         try:
             shutil.rmtree(self.root)
             shutil.rmtree(self.root)
         except FileNotFoundError:
         except FileNotFoundError:
@@ -60,7 +59,7 @@ class DefaultLayout(BaseNonRdfLayout):
 
 
 
 
     def persist(self, stream, bufsize=8192):
     def persist(self, stream, bufsize=8192):
-        '''
+        """
         Store the stream in the file system.
         Store the stream in the file system.
 
 
         This method handles the file in chunks. for each chunk it writes to a
         This method handles the file in chunks. for each chunk it writes to a
@@ -68,9 +67,9 @@ class DefaultLayout(BaseNonRdfLayout):
         to disk and hashed, the temp file is moved to its final location which
         to disk and hashed, the temp file is moved to its final location which
         is determined by the hash value.
         is determined by the hash value.
 
 
-        @param stream (IOstream): file-like object to persist.
-        @param bufsize (int) Chunk size. 2**12 to 2**15 is a good range.
-        '''
+        :param IOstream stream:: file-like object to persist.
+        :param int bufsize: Chunk size. 2**12 to 2**15 is a good range.
+        """
         tmp_file = '{}/tmp/{}'.format(self.root, uuid4())
         tmp_file = '{}/tmp/{}'.format(self.root, uuid4())
         try:
         try:
             with open(tmp_file, 'wb') as f:
             with open(tmp_file, 'wb') as f:
@@ -111,7 +110,5 @@ class DefaultLayout(BaseNonRdfLayout):
 
 
 
 
     def delete(self, uuid):
     def delete(self, uuid):
-        '''
-        See BaseNonRdfLayout.delete.
-        '''
+        """See BaseNonRdfLayout.delete."""
         os.unlink(__class__.local_path(self.root, uuid, self.bl, self.bc))
         os.unlink(__class__.local_path(self.root, uuid, self.bl, self.bc))

+ 162 - 167
lakesuperior/store/ldp_rs/lmdb_store.py

@@ -19,21 +19,21 @@ logger = logging.getLogger(__name__)
 
 
 
 
 def s2b(u, enc='UTF-8'):
 def s2b(u, enc='UTF-8'):
-    '''
+    """
     Convert a string into a bytes object.
     Convert a string into a bytes object.
-    '''
+    """
     return u.encode(enc)
     return u.encode(enc)
 
 
 
 
 def b2s(u, enc='UTF-8'):
 def b2s(u, enc='UTF-8'):
-    '''
+    """
     Convert a bytes or memoryview object into a string.
     Convert a bytes or memoryview object into a string.
-    '''
+    """
     return bytes(u).decode(enc)
     return bytes(u).decode(enc)
 
 
 
 
 class TxnManager(ContextDecorator):
 class TxnManager(ContextDecorator):
-    '''
+    """
     Handle ACID transactions with an LmdbStore.
     Handle ACID transactions with an LmdbStore.
 
 
     Wrap this within a `with` statement:
     Wrap this within a `with` statement:
@@ -43,15 +43,15 @@ class TxnManager(ContextDecorator):
     >>>
     >>>
 
 
     The transaction will be opened and handled automatically.
     The transaction will be opened and handled automatically.
-    '''
+    """
     def __init__(self, store, write=False):
     def __init__(self, store, write=False):
-        '''
+        """
         Begin and close a transaction in a store.
         Begin and close a transaction in a store.
 
 
-        @param store (LmdbStore) The store to open a transaction on.
-        @param write (bool) Whether the transaction is read-write. Default is
+        :param LmdbStore store: The store to open a transaction on.
+        :param bool write: Whether the transaction is read-write. Default is
         False (read-only transaction).
         False (read-only transaction).
-        '''
+        """
         self.store = store
         self.store = store
         self.write = write
         self.write = write
 
 
@@ -69,34 +69,34 @@ class TxnManager(ContextDecorator):
 
 
 
 
 class LexicalSequence:
 class LexicalSequence:
-    '''
+    """
     Fixed-length lexicographically ordered byte sequence.
     Fixed-length lexicographically ordered byte sequence.
 
 
     Useful to generate optimized sequences of keys in LMDB.
     Useful to generate optimized sequences of keys in LMDB.
-    '''
+    """
     def __init__(self, start=1, max_len=5):
     def __init__(self, start=1, max_len=5):
-        '''
-        @param start (bytes) Starting byte value. Bytes below this value are
+        """
+        Create a new lexical sequence.
+
+        :param bytes start: Starting byte value. Bytes below this value are
         never found in this sequence. This is useful to allot special bytes
         never found in this sequence. This is useful to allot special bytes
         to be used e.g. as separators.
         to be used e.g. as separators.
-        @param max_len (int) Maximum number of bytes that a byte string can
+        :param int max_len: Maximum number of bytes that a byte string can
         contain. This should be chosen carefully since the number of all
         contain. This should be chosen carefully since the number of all
         possible key combinations is determined by this value and the `start`
         possible key combinations is determined by this value and the `start`
         value. The default args provide 255**5 (~1 Tn) unique combinations.
         value. The default args provide 255**5 (~1 Tn) unique combinations.
-        '''
+        """
         self.start = start
         self.start = start
         self.length = max_len
         self.length = max_len
 
 
 
 
     def first(self):
     def first(self):
-        '''
-        First possible combination.
-        '''
+        """First possible combination."""
         return bytearray([self.start] * self.length)
         return bytearray([self.start] * self.length)
 
 
 
 
     def next(self, n):
     def next(self, n):
-        '''
+        """
         Calculate the next closest byte sequence in lexicographical order.
         Calculate the next closest byte sequence in lexicographical order.
 
 
         This is used to fill the next available slot after the last one in
         This is used to fill the next available slot after the last one in
@@ -107,8 +107,8 @@ class LexicalSequence:
         This function assumes that all the keys are padded with the `start`
         This function assumes that all the keys are padded with the `start`
         value up to the `max_len` length.
         value up to the `max_len` length.
 
 
-        @param n (bytes) Current byte sequence to add to.
-        '''
+        :param bytes n: Current byte sequence to add to.
+        """
         if not n:
         if not n:
             n = self.first()
             n = self.first()
         elif isinstance(n, bytes) or isinstance(n, memoryview):
         elif isinstance(n, bytes) or isinstance(n, memoryview):
@@ -137,7 +137,7 @@ class LexicalSequence:
 
 
 
 
 class LmdbStore(Store):
 class LmdbStore(Store):
-    '''
+    """
     LMDB-backed store.
     LMDB-backed store.
 
 
     This is an implementation of the RDFLib Store interface:
     This is an implementation of the RDFLib Store interface:
@@ -172,7 +172,7 @@ class LmdbStore(Store):
     (also in a SPARQL query) will look in the  union graph instead of in the
     (also in a SPARQL query) will look in the  union graph instead of in the
     default graph. Also, removing triples without specifying a context will
     default graph. Also, removing triples without specifying a context will
     remove triples from all contexts.
     remove triples from all contexts.
-    '''
+    """
 
 
     context_aware = True
     context_aware = True
     # This is a hassle to maintain for no apparent gain. If some use is devised
     # This is a hassle to maintain for no apparent gain. If some use is devised
@@ -181,19 +181,19 @@ class LmdbStore(Store):
     graph_aware = True
     graph_aware = True
     transaction_aware = True
     transaction_aware = True
 
 
-    '''
+    """
     LMDB map size. See http://lmdb.readthedocs.io/en/release/#environment-class
     LMDB map size. See http://lmdb.readthedocs.io/en/release/#environment-class
-    '''
+    """
     MAP_SIZE = 1024 ** 4 # 1Tb
     MAP_SIZE = 1024 ** 4 # 1Tb
 
 
-    '''
+    """
     Key hashing algorithm. If you are paranoid, use SHA1. Otherwise, MD5 is
     Key hashing algorithm. If you are paranoid, use SHA1. Otherwise, MD5 is
     faster and takes up less space (16 bytes vs. 20 bytes). This may make a
     faster and takes up less space (16 bytes vs. 20 bytes). This may make a
     visible difference because keys are generated and parsed very often.
     visible difference because keys are generated and parsed very often.
-    '''
+    """
     KEY_HASH_ALGO = 'sha1'
     KEY_HASH_ALGO = 'sha1'
 
 
-    '''
+    """
     Fixed length for term keys.
     Fixed length for term keys.
 
 
     4 or 5 is a safe range. 4 allows for ~4 billion (256 ** 4) unique terms
     4 or 5 is a safe range. 4 allows for ~4 billion (256 ** 4) unique terms
@@ -209,13 +209,13 @@ class LmdbStore(Store):
     could improve performance since keys make up the vast majority of record
     could improve performance since keys make up the vast majority of record
     exchange between the store and the application. However it is sensible not
     exchange between the store and the application. However it is sensible not
     to expose this value as a configuration option.
     to expose this value as a configuration option.
-    '''
+    """
     KEY_LENGTH = 5
     KEY_LENGTH = 5
 
 
-    '''
+    """
     Lexical sequence start. `\x01` is fine since no special characters are used,
     Lexical sequence start. `\x01` is fine since no special characters are used,
     but it's good to leave a spare for potential future use.
     but it's good to leave a spare for potential future use.
-    '''
+    """
     KEY_START = 1
     KEY_START = 1
 
 
     data_keys = (
     data_keys = (
@@ -237,19 +237,19 @@ class LmdbStore(Store):
         's:po', 'p:so', 'o:sp', 'c:spo',
         's:po', 'p:so', 'o:sp', 'c:spo',
     )
     )
 
 
-    '''
+    """
     Order in which keys are looked up if two terms are bound.
     Order in which keys are looked up if two terms are bound.
     The indices with the smallest average number of values per key should be
     The indices with the smallest average number of values per key should be
     looked up first.
     looked up first.
 
 
     If we want to get fancy, this can be rebalanced from time to time by
     If we want to get fancy, this can be rebalanced from time to time by
     looking up the number of keys in (s:po, p:so, o:sp).
     looking up the number of keys in (s:po, p:so, o:sp).
-    '''
+    """
     _lookup_rank = ('s', 'o', 'p')
     _lookup_rank = ('s', 'o', 'p')
 
 
-    '''
+    """
     Order of terms in the lookup indices. Used to rebuild a triple from lookup.
     Order of terms in the lookup indices. Used to rebuild a triple from lookup.
-    '''
+    """
     _lookup_ordering = {
     _lookup_ordering = {
         's:po': (0, 1, 2),
         's:po': (0, 1, 2),
         'p:so': (1, 0, 2),
         'p:so': (1, 0, 2),
@@ -279,19 +279,17 @@ class LmdbStore(Store):
 
 
 
 
     def __del__(self):
     def __del__(self):
-        '''
-        Properly close store for garbage collection.
-        '''
+        """Properly close store for garbage collection."""
         self.close(True)
         self.close(True)
 
 
 
 
     def __len__(self, context=None):
     def __len__(self, context=None):
-        '''
+        """
         Return length of the dataset.
         Return length of the dataset.
 
 
-        @param context (rdflib.URIRef | rdflib.Graph) Context to restrict count
-        to.
-        '''
+        :param context: Context to restrict count to.
+        :type context: rdflib.URIRef or rdflib.Graph
+        """
         context = self._normalize_context(context)
         context = self._normalize_context(context)
 
 
         if context is not None:
         if context is not None:
@@ -311,7 +309,7 @@ class LmdbStore(Store):
 
 
 
 
     def open(self, configuration=None, create=True):
     def open(self, configuration=None, create=True):
-        '''
+        """
         Open the database.
         Open the database.
 
 
         The database is best left open for the lifespan of the server. Read
         The database is best left open for the lifespan of the server. Read
@@ -321,7 +319,7 @@ class LmdbStore(Store):
 
 
         This method is called outside of the main transaction. All cursors
         This method is called outside of the main transaction. All cursors
         are created separately within the transaction.
         are created separately within the transaction.
-        '''
+        """
         self._init_db_environments(create)
         self._init_db_environments(create)
         if self.data_env == NO_STORE:
         if self.data_env == NO_STORE:
             return NO_STORE
             return NO_STORE
@@ -331,9 +329,9 @@ class LmdbStore(Store):
 
 
 
 
     def begin(self, write=False):
     def begin(self, write=False):
-        '''
+        """
         Begin the main write transaction and create cursors.
         Begin the main write transaction and create cursors.
-        '''
+        """
         if not self.is_open:
         if not self.is_open:
             raise RuntimeError('Store must be opened first.')
             raise RuntimeError('Store must be opened first.')
         logger.debug('Beginning a {} transaction.'.format(
         logger.debug('Beginning a {} transaction.'.format(
@@ -346,9 +344,7 @@ class LmdbStore(Store):
 
 
 
 
     def stats(self):
     def stats(self):
-        '''
-        Gather statistics about the database.
-        '''
+        """Gather statistics about the database."""
         stats = {
         stats = {
             'data_db_stats': {
             'data_db_stats': {
                 db_label: self.data_txn.stat(self.dbs[db_label])
                 db_label: self.data_txn.stat(self.dbs[db_label])
@@ -368,9 +364,7 @@ class LmdbStore(Store):
 
 
     @property
     @property
     def is_txn_open(self):
     def is_txn_open(self):
-        '''
-        Whether the main transaction is open.
-        '''
+        """Whether the main transaction is open."""
         try:
         try:
             self.data_txn.id()
             self.data_txn.id()
             self.idx_txn.id()
             self.idx_txn.id()
@@ -383,9 +377,7 @@ class LmdbStore(Store):
 
 
 
 
     def cur(self, index):
     def cur(self, index):
-        '''
-        Return a new cursor by its index.
-        '''
+        """Return a new cursor by its index."""
         if index in self.idx_keys:
         if index in self.idx_keys:
             txn = self.idx_txn
             txn = self.idx_txn
             src = self.idx_keys
             src = self.idx_keys
@@ -399,14 +391,14 @@ class LmdbStore(Store):
 
 
 
 
     def get_data_cursors(self, txn):
     def get_data_cursors(self, txn):
-        '''
+        """
         Build the main data cursors for a transaction.
         Build the main data cursors for a transaction.
 
 
-        @param txn (lmdb.Transaction) This can be a read or write transaction.
+        :param lmdb.Transaction txn: This can be a read or write transaction.
 
 
-        @return dict(string, lmdb.Cursor) Keys are index labels, values are
-        index cursors.
-        '''
+        :rtype: dict(string, lmdb.Cursor)
+        :return: Keys are index labels, values are index cursors.
+        """
         return {
         return {
             'tk:t': txn.cursor(self.dbs['tk:t']),
             'tk:t': txn.cursor(self.dbs['tk:t']),
             'tk:c': txn.cursor(self.dbs['tk:c']),
             'tk:c': txn.cursor(self.dbs['tk:c']),
@@ -415,25 +407,25 @@ class LmdbStore(Store):
 
 
 
 
     def get_idx_cursors(self, txn):
     def get_idx_cursors(self, txn):
-        '''
+        """
         Build the index cursors for a transaction.
         Build the index cursors for a transaction.
 
 
-        @param txn (lmdb.Transaction) This can be a read or write transaction.
+        :param lmdb.Transaction txn: This can be a read or write transaction.
 
 
         @return dict(string, lmdb.Cursor) Keys are index labels, values are
         @return dict(string, lmdb.Cursor) Keys are index labels, values are
         index cursors.
         index cursors.
-        '''
+        """
         return {
         return {
             key: txn.cursor(self.dbs[key])
             key: txn.cursor(self.dbs[key])
             for key in self.idx_keys}
             for key in self.idx_keys}
 
 
 
 
     def close(self, commit_pending_transaction=False):
     def close(self, commit_pending_transaction=False):
-        '''
+        """
         Close the database connection.
         Close the database connection.
 
 
         Do this at server shutdown.
         Do this at server shutdown.
-        '''
+        """
         self.__open = False
         self.__open = False
         if self.is_txn_open:
         if self.is_txn_open:
             if commit_pending_transaction:
             if commit_pending_transaction:
@@ -446,26 +438,27 @@ class LmdbStore(Store):
 
 
 
 
     def destroy(self, path):
     def destroy(self, path):
-        '''
+        """
         Destroy the store.
         Destroy the store.
 
 
         https://www.youtube.com/watch?v=lIVq7FJnPwg
         https://www.youtube.com/watch?v=lIVq7FJnPwg
 
 
-        @param path (string) Path of the folder containing the database(s).
-        '''
+        :param str path: Path of the folder containing the database(s).
+        """
         if exists(path):
         if exists(path):
             rmtree(path)
             rmtree(path)
 
 
 
 
     def add(self, triple, context=None, quoted=False):
     def add(self, triple, context=None, quoted=False):
-        '''
+        """
         Add a triple and start indexing.
         Add a triple and start indexing.
 
 
-        @param triple (tuple:rdflib.Identifier) Tuple of three identifiers.
-        @param context (rdflib.Identifier | None) Context identifier.
-        'None' inserts in the default graph.
-        @param quoted (bool) Not used.
-        '''
+        :param tuple:rdflib.Identifier triple: Tuple of three identifiers.
+        :param context: Context identifier. ``None`` inserts in the default
+        graph.
+        :type context: rdflib.Identifier or None
+        :param bool quoted: Not used.
+        """
         context = self._normalize_context(context)
         context = self._normalize_context(context)
         if context is None:
         if context is None:
             context = RDFLIB_DEFAULT_GRAPH_URI
             context = RDFLIB_DEFAULT_GRAPH_URI
@@ -512,16 +505,16 @@ class LmdbStore(Store):
 
 
 
 
     def remove(self, triple_pattern, context=None):
     def remove(self, triple_pattern, context=None):
-        '''
+        """
         Remove triples by a pattern.
         Remove triples by a pattern.
 
 
-        @param triple_pattern (tuple:rdflib.term.Identifier|None) 3-tuple of
+        :param tuple:rdflib.term.Identifier|None triple_pattern: 3-tuple of
         either RDF terms or None, indicating the triple(s) to be removed.
         either RDF terms or None, indicating the triple(s) to be removed.
         None is used as a wildcard.
         None is used as a wildcard.
-        @param context (rdflib.term.Identifier|None) Context to remove the
-        triples from. If None (the default) the matching triples are removed
-        from all contexts.
-        '''
+        :param context: Context to remove the triples from. If None (the
+        default) the matching triples are removed from all contexts.
+        :type context: rdflib.term.Identifier or None
+        """
         #logger.debug('Removing triples by pattern: {} on context: {}'.format(
         #logger.debug('Removing triples by pattern: {} on context: {}'.format(
         #    triple_pattern, context))
         #    triple_pattern, context))
         context = self._normalize_context(context)
         context = self._normalize_context(context)
@@ -562,18 +555,18 @@ class LmdbStore(Store):
 
 
 
 
     def triples(self, triple_pattern, context=None):
     def triples(self, triple_pattern, context=None):
-        '''
+        """
         Generator over matching triples.
         Generator over matching triples.
 
 
-        @param triple_pattern (tuple) 3 RDFLib terms
-        @param context (rdflib.Graph | None) Context graph, if available.
+        :param tuple triple_pattern: 3 RDFLib terms
+        :param rdflib.Graph | None context: Context graph, if available.
 
 
-        @return Generator over triples and contexts in which each result has
+        :return: Generator over triples and contexts in which each result has
         the following format:
         the following format:
         > (s, p, o), generator(contexts)
         > (s, p, o), generator(contexts)
         Where the contexts generator lists all context that the triple appears
         Where the contexts generator lists all context that the triple appears
         in.
         in.
-        '''
+        """
         #logger.debug('Getting triples for pattern: {} and context: {}'.format(
         #logger.debug('Getting triples for pattern: {} and context: {}'.format(
         #    triple_pattern, context))
         #    triple_pattern, context))
         # This sounds strange, RDFLib should be passing None at this point,
         # This sounds strange, RDFLib should be passing None at this point,
@@ -620,12 +613,12 @@ class LmdbStore(Store):
 
 
 
 
     def bind(self, prefix, namespace):
     def bind(self, prefix, namespace):
-        '''
+        """
         Bind a prefix to a namespace.
         Bind a prefix to a namespace.
 
 
-        @param prefix (string) Namespace prefix.
-        @param namespace (rdflib.URIRef) Fully qualified URI of namespace.
-        '''
+        :param str prefix: Namespace prefix.
+        :param rdflib.URIRef namespace: Fully qualified URI of namespace.
+        """
         prefix = s2b(prefix)
         prefix = s2b(prefix)
         namespace = s2b(namespace)
         namespace = s2b(namespace)
         if self.is_txn_rw:
         if self.is_txn_rw:
@@ -643,44 +636,42 @@ class LmdbStore(Store):
 
 
 
 
     def namespace(self, prefix):
     def namespace(self, prefix):
-        '''
+        """
         Get the namespace for a prefix.
         Get the namespace for a prefix.
-        @param prefix (string) Namespace prefix.
-        '''
+        :param str prefix: Namespace prefix.
+        """
         with self.cur('pfx:ns') as cur:
         with self.cur('pfx:ns') as cur:
             ns = cur.get(s2b(prefix))
             ns = cur.get(s2b(prefix))
             return Namespace(b2s(ns)) if ns is not None else None
             return Namespace(b2s(ns)) if ns is not None else None
 
 
 
 
     def prefix(self, namespace):
     def prefix(self, namespace):
-        '''
+        """
         Get the prefix associated with a namespace.
         Get the prefix associated with a namespace.
 
 
         @NOTE A namespace can be only bound to one prefix in this
         @NOTE A namespace can be only bound to one prefix in this
         implementation.
         implementation.
 
 
-        @param namespace (rdflib.URIRef) Fully qualified URI of namespace.
-        '''
+        :param rdflib.URIRef namespace: Fully qualified URI of namespace.
+        """
         with self.cur('ns:pfx') as cur:
         with self.cur('ns:pfx') as cur:
             prefix = cur.get(s2b(namespace))
             prefix = cur.get(s2b(namespace))
             return b2s(prefix) if prefix is not None else None
             return b2s(prefix) if prefix is not None else None
 
 
 
 
     def namespaces(self):
     def namespaces(self):
-        '''
-        Get an iterator of all prefix: namespace bindings.
-        '''
+        """Get an iterator of all prefix: namespace bindings."""
         with self.cur('pfx:ns') as cur:
         with self.cur('pfx:ns') as cur:
             for pfx, ns in iter(cur):
             for pfx, ns in iter(cur):
                 yield (b2s(pfx), Namespace(b2s(ns)))
                 yield (b2s(pfx), Namespace(b2s(ns)))
 
 
 
 
     def contexts(self, triple=None):
     def contexts(self, triple=None):
-        '''
+        """
         Get a list of all contexts.
         Get a list of all contexts.
 
 
         @return generator(Graph)
         @return generator(Graph)
-        '''
+        """
         if triple and any(triple):
         if triple and any(triple):
             with self.cur('spo:c') as cur:
             with self.cur('spo:c') as cur:
                 if cur.set_key(self._to_key(triple)):
                 if cur.set_key(self._to_key(triple)):
@@ -695,7 +686,7 @@ class LmdbStore(Store):
 
 
 
 
     def add_graph(self, graph):
     def add_graph(self, graph):
-        '''
+        """
         Add a graph to the database.
         Add a graph to the database.
 
 
         This creates an empty graph by associating the graph URI with the
         This creates an empty graph by associating the graph URI with the
@@ -707,8 +698,8 @@ class LmdbStore(Store):
         Therefore it needs to open a write transaction. This is not ideal
         Therefore it needs to open a write transaction. This is not ideal
         but the only way to handle datasets in RDFLib.
         but the only way to handle datasets in RDFLib.
 
 
-        @param graph (URIRef) URI of the named graph to add.
-        '''
+        :param URIRef graph: URI of the named graph to add.
+        """
         if isinstance(graph, Graph):
         if isinstance(graph, Graph):
             graph = graph.identifier
             graph = graph.identifier
         pk_c = self._pickle(graph)
         pk_c = self._pickle(graph)
@@ -738,11 +729,11 @@ class LmdbStore(Store):
 
 
 
 
     def remove_graph(self, graph):
     def remove_graph(self, graph):
-        '''
+        """
         Remove all triples from graph and the graph itself.
         Remove all triples from graph and the graph itself.
 
 
-        @param graph (URIRef) URI of the named graph to remove.
-        '''
+        :param URIRef graph: URI of the named graph to remove.
+        """
         if isinstance(graph, Graph):
         if isinstance(graph, Graph):
             graph = graph.identifier
             graph = graph.identifier
         self.remove((None, None, None), graph)
         self.remove((None, None, None), graph)
@@ -753,9 +744,7 @@ class LmdbStore(Store):
 
 
 
 
     def commit(self):
     def commit(self):
-        '''
-        Commit main transaction and push action queue.
-        '''
+        """Commit main transaction."""
         logger.debug('Committing transaction.')
         logger.debug('Committing transaction.')
         try:
         try:
             self.data_txn.commit()
             self.data_txn.commit()
@@ -769,9 +758,7 @@ class LmdbStore(Store):
 
 
 
 
     def rollback(self):
     def rollback(self):
-        '''
-        Roll back main transaction.
-        '''
+        """Roll back main transaction."""
         logger.debug('Rolling back transaction.')
         logger.debug('Rolling back transaction.')
         try:
         try:
             self.data_txn.abort()
             self.data_txn.abort()
@@ -787,16 +774,17 @@ class LmdbStore(Store):
     ## PRIVATE METHODS ##
     ## PRIVATE METHODS ##
 
 
     def _triple_keys(self, triple_pattern, context=None):
     def _triple_keys(self, triple_pattern, context=None):
-        '''
+        """
         Generator over matching triple keys.
         Generator over matching triple keys.
 
 
         This method is used by `triples` which returns native Python tuples,
         This method is used by `triples` which returns native Python tuples,
         as well as by other methods that need to iterate and filter triple
         as well as by other methods that need to iterate and filter triple
         keys without incurring in the overhead of converting them to triples.
         keys without incurring in the overhead of converting them to triples.
 
 
-        @param triple_pattern (tuple) 3 RDFLib terms
-        @param context (rdflib.Graph | None) Context graph or URI, or None.
-        '''
+        :param tuple triple_pattern: 3 RDFLib terms
+        :param context: Context graph or URI, or None.
+        :type context: rdflib.term.Identifier or None
+        """
         if context == self:
         if context == self:
             context = None
             context = None
 
 
@@ -842,16 +830,16 @@ class LmdbStore(Store):
 
 
 
 
     def _init_db_environments(self, create=True):
     def _init_db_environments(self, create=True):
-        '''
+        """
         Initialize the DB environment.
         Initialize the DB environment.
 
 
         The main database is kept in one file, the indices in a separate one
         The main database is kept in one file, the indices in a separate one
         (these may be even further split up depending on performance
         (these may be even further split up depending on performance
         considerations).
         considerations).
 
 
-        @param create (bool) If True, the environment and its databases are
+        :param bool create: If True, the environment and its databases are
         created.
         created.
-        '''
+        """
         path = self.path
         path = self.path
         if not exists(path):
         if not exists(path):
             if create is True:
             if create is True:
@@ -892,14 +880,15 @@ class LmdbStore(Store):
 
 
 
 
     def _from_key(self, key):
     def _from_key(self, key):
-        '''
+        """
         Convert a key into one or more terms.
         Convert a key into one or more terms.
 
 
-        @param key (bytes | memoryview) The key to be converted. It can be a
+        :param key: The key to be converted. It can be a
+        :type key: bytes or memoryview
         compound one in which case the function will return multiple terms.
         compound one in which case the function will return multiple terms.
 
 
         @return tuple
         @return tuple
-        '''
+        """
         with self.cur('t:st') as cur:
         with self.cur('t:st') as cur:
             return tuple(
             return tuple(
                    self._unpickle(cur.get(k))
                    self._unpickle(cur.get(k))
@@ -907,20 +896,21 @@ class LmdbStore(Store):
 
 
 
 
     def _to_key(self, obj):
     def _to_key(self, obj):
-        '''
+        """
         Convert a triple, quad or term into a key.
         Convert a triple, quad or term into a key.
 
 
         The key is the checksum of the pickled object, therefore unique for
         The key is the checksum of the pickled object, therefore unique for
         that object. The hashing algorithm is specified in `KEY_HASH_ALGO`.
         that object. The hashing algorithm is specified in `KEY_HASH_ALGO`.
 
 
-        @param obj (Object) Anything that can be reduced to terms stored in the
+        :param Object obj: Anything that can be reduced to terms stored in the
         database. Pairs of terms, as well as triples and quads, are expressed
         database. Pairs of terms, as well as triples and quads, are expressed
         as tuples.
         as tuples.
 
 
         If more than one term is provided, the keys are concatenated.
         If more than one term is provided, the keys are concatenated.
 
 
-        @return bytes
-        '''
+        :rtype: memoryview
+        :return: Keys stored for the term(s)
+        """
         if not isinstance(obj, list) and not isinstance(obj, tuple):
         if not isinstance(obj, list) and not isinstance(obj, tuple):
             obj = (obj,)
             obj = (obj,)
         key = []
         key = []
@@ -936,33 +926,33 @@ class LmdbStore(Store):
 
 
 
 
     def _hash(self, s):
     def _hash(self, s):
-        '''
-        Get the hash value of a serialized object.
-        '''
+        """Get the hash value of a serialized object."""
         return hashlib.new(self.KEY_HASH_ALGO, s).digest()
         return hashlib.new(self.KEY_HASH_ALGO, s).digest()
 
 
 
 
     def _split_key(self, keys):
     def _split_key(self, keys):
-        '''
+        """
         Split a compound key into individual keys.
         Split a compound key into individual keys.
 
 
         This method relies on the fixed length of all term keys.
         This method relies on the fixed length of all term keys.
 
 
-        @param keys (bytes | memoryview) Concatenated keys.
+        :param keys: Concatenated keys.
+        :type keys: bytes or memoryview
 
 
-        @return tuple: bytes | memoryview
-        '''
+        :rtype: tuple(memoryview)
+        """
         return tuple(
         return tuple(
                 keys[i:i+self.KEY_LENGTH]
                 keys[i:i+self.KEY_LENGTH]
                 for i in range(0, len(keys), self.KEY_LENGTH))
                 for i in range(0, len(keys), self.KEY_LENGTH))
 
 
 
 
     def _normalize_context(self, context):
     def _normalize_context(self, context):
-        '''
+        """
         Normalize a context parameter to conform to the model expectations.
         Normalize a context parameter to conform to the model expectations.
 
 
-        @param context (URIRef | Graph | None) Context URI or graph.
-        '''
+        :param context: Context URI or graph.
+        :type context: URIRef or Graph or None
+        """
         if isinstance(context, Graph):
         if isinstance(context, Graph):
             if context == self or isinstance(context.identifier, Variable):
             if context == self or isinstance(context.identifier, Variable):
                 context = None
                 context = None
@@ -974,11 +964,12 @@ class LmdbStore(Store):
 
 
 
 
     def _lookup(self, triple_pattern):
     def _lookup(self, triple_pattern):
-        '''
+        """
         Look up triples in the indices based on a triple pattern.
         Look up triples in the indices based on a triple pattern.
 
 
-        @return iterator of matching triple keys.
-        '''
+        :rtype: Iterator
+        :return: Matching triple keys.
+        """
         s, p, o = triple_pattern
         s, p, o = triple_pattern
 
 
         if s is not None:
         if s is not None:
@@ -1022,15 +1013,16 @@ class LmdbStore(Store):
 
 
 
 
     def _lookup_1bound(self, label, term):
     def _lookup_1bound(self, label, term):
-        '''
+        """
         Lookup triples for a pattern with one bound term.
         Lookup triples for a pattern with one bound term.
 
 
-        @param label (string) Which term is being searched for. One of `s`,
+        :param str label: Which term is being searched for. One of `s`,
         `p`, or `o`.
         `p`, or `o`.
-        @param term (rdflib.URIRef) Bound term to search for.
+        :param rdflib.URIRef term: Bound term to search for.
 
 
-        @return iterator(bytes) SPO keys matching the pattern.
-        '''
+        :rtype: iterator(bytes)
+        :return: SPO keys matching the pattern.
+        """
         k = self._to_key(term)
         k = self._to_key(term)
         if not k:
         if not k:
             return iter(())
             return iter(())
@@ -1051,15 +1043,16 @@ class LmdbStore(Store):
 
 
 
 
     def _lookup_2bound(self, bound_terms):
     def _lookup_2bound(self, bound_terms):
-        '''
+        """
         Look up triples for a pattern with two bound terms.
         Look up triples for a pattern with two bound terms.
 
 
-        @param bound terms (dict) Triple labels and terms to search for,
+        :param  bound: terms (dict) Triple labels and terms to search for,
         in the format of, e.g. {'s': URIRef('urn:s:1'), 'o':
         in the format of, e.g. {'s': URIRef('urn:s:1'), 'o':
         URIRef('urn:o:1')}
         URIRef('urn:o:1')}
 
 
-        @return iterator(bytes) SPO keys matching the pattern.
-        '''
+        :rtype: iterator(bytes)
+        :return: SPO keys matching the pattern.
+        """
         if len(bound_terms) != 2:
         if len(bound_terms) != 2:
             raise ValueError(
             raise ValueError(
                     'Exactly 2 terms need to be bound. Got {}'.format(
                     'Exactly 2 terms need to be bound. Got {}'.format(
@@ -1112,14 +1105,15 @@ class LmdbStore(Store):
 
 
 
 
     def _append(self, cur, values, **kwargs):
     def _append(self, cur, values, **kwargs):
-        '''
+        """
         Append one or more values to the end of a database.
         Append one or more values to the end of a database.
 
 
-        @param cur (lmdb.Cursor) The write cursor to act on.
-        @param data (list(bytes)) Value(s) to append.
+        :param lmdb.Cursor cur: The write cursor to act on.
+        :param list(bytes) values: Value(s) to append.
 
 
-        @return list(bytes) Last key(s) inserted.
-        '''
+        :rtype: list(memoryview)
+        :return: Last key(s) inserted.
+        """
         if not isinstance(values, list) and not isinstance(values, tuple):
         if not isinstance(values, list) and not isinstance(values, tuple):
             raise ValueError('Input must be a list or tuple.')
             raise ValueError('Input must be a list or tuple.')
         data = []
         data = []
@@ -1134,13 +1128,12 @@ class LmdbStore(Store):
 
 
 
 
     def _index_triple(self, action, spok):
     def _index_triple(self, action, spok):
-        '''
+        """
         Update index for a triple and context (add or remove).
         Update index for a triple and context (add or remove).
 
 
-        @param action (string) 'add' or 'remove'.
-        @param spok (bytes) Triple key.
-        indexed. Context MUST be specified for 'add'.
-        '''
+        :param str action: 'add' or 'remove'.
+        :param bytes spok: Triple key.
+        """
         # Split and rearrange-join keys for association and indices.
         # Split and rearrange-join keys for association and indices.
         triple = self._split_key(spok)
         triple = self._split_key(spok)
         sk, pk, ok = triple
         sk, pk, ok = triple
@@ -1173,13 +1166,14 @@ class LmdbStore(Store):
     ## debugging.
     ## debugging.
 
 
     def _keys_in_ctx(self, pk_ctx):
     def _keys_in_ctx(self, pk_ctx):
-        '''
+        """
         Convenience method to list all keys in a context.
         Convenience method to list all keys in a context.
 
 
-        @param pk_ctx (bytes) Pickled context URI.
+        :param bytes pk_ctx: Pickled context URI.
 
 
-        @return Iterator:tuple Generator of triples.
-        '''
+        :rtype: Iterator(tuple)
+        :return: Generator of triples.
+        """
         with self.cur('c:spo') as cur:
         with self.cur('c:spo') as cur:
             if cur.set_key(pk_ctx):
             if cur.set_key(pk_ctx):
                 tkeys = cur.iternext_dup()
                 tkeys = cur.iternext_dup()
@@ -1189,13 +1183,14 @@ class LmdbStore(Store):
 
 
 
 
     def _ctx_for_key(self, tkey):
     def _ctx_for_key(self, tkey):
-        '''
+        """
         Convenience method to list all contexts that a key is in.
         Convenience method to list all contexts that a key is in.
 
 
-        @param tkey (bytes) Triple key.
+        :param bytes tkey: Triple key.
 
 
-        @return Iterator:URIRef Generator of context URIs.
-        '''
+        :rtype: Iterator(rdflib.URIRef)
+        :return: Generator of context URIs.
+        """
         with self.cur('spo:c') as cur:
         with self.cur('spo:c') as cur:
             if cur.set_key(tkey):
             if cur.set_key(tkey):
                 ctx = cur.iternext_dup()
                 ctx = cur.iternext_dup()