Prechádzať zdrojové kódy

Merge pull request #108 from scossu/gh_92_93

Issues 92, 93 & 102
Stefano Cossu 5 rokov pred
rodič
commit
7c57b9584f

+ 1 - 1
lakesuperior/__init__.py

@@ -4,7 +4,7 @@ from os import path
 
 
 version = '1.0 alpha'
-release = '1.0.0a21'
+release = '1.0.0a22'
 
 basedir = path.dirname(path.realpath(__file__))
 """

+ 6 - 3
lakesuperior/api/resource.py

@@ -237,19 +237,22 @@ def create_or_replace(uid, **kwargs):
 
 
 @transaction(True)
-def update(uid, update_str, is_metadata=False):
+def update(uid, update_str, is_metadata=False, handling='strict'):
     """
     Update a resource with a SPARQL-Update string.
 
     :param string uid: Resource UID.
     :param string update_str: SPARQL-Update statements.
     :param bool is_metadata: Whether the resource metadata are being updated.
+    :param str handling: How to handle servre-managed triples. ``strict``
+        (the default) rejects the update with an exception if server-managed
+        triples are being changed. ``lenient`` modifies the update graph so
+        offending triples are removed and the update can be applied.
 
     :raise InvalidResourceError: If ``is_metadata`` is False and the resource
         being updated is a LDP-NR.
     """
-    # FCREPO is lenient here and Hyrax requires it.
-    rsrc = LdpFactory.from_stored(uid, handling='lenient')
+    rsrc = LdpFactory.from_stored(uid, handling=handling)
     if LDP_NR_TYPE in rsrc.ldp_types and not is_metadata:
         raise InvalidResourceError(
                 'Cannot use this method to update an LDP-NR content.')

+ 24 - 5
lakesuperior/endpoints/ldp.py

@@ -23,7 +23,8 @@ from lakesuperior.dictionaries.namespaces import ns_mgr as nsm
 from lakesuperior.exceptions import (
         ChecksumValidationError, ResourceNotExistsError, TombstoneError,
         ServerManagedTermError, InvalidResourceError, SingleSubjectError,
-        ResourceExistsError, IncompatibleLdpTypeError)
+        ResourceExistsError, IncompatibleLdpTypeError,
+        IndigestibleError)
 from lakesuperior.globals import RES_CREATED
 from lakesuperior.model.ldp.ldp_factory import LdpFactory
 from lakesuperior.model.ldp.ldp_nr import LdpNr
@@ -283,11 +284,22 @@ def post_resource(parent_uid):
         kwargs['mimetype'] = mimetype
         # Check digest if requested.
         if 'digest' in request.headers:
-            kwargs['prov_cksum_algo'], kwargs['prov_cksum'] = \
+            try:
+                kwargs['prov_cksum_algo'], kwargs['prov_cksum'] = (
                     request.headers['digest'].split('=')
+                )
+            except ValueError:
+                return (
+                    f'Cannot parse digest value: {request.headers["digest"]}',
+                    400
+                )
 
     try:
         rsrc = rsrc_api.create(parent_uid, slug, **kwargs)
+    except IndigestibleError:
+        return (
+            f'Unable to parse digest header: {request.headers["digest"]}'
+        ), 400
     except ResourceNotExistsError as e:
         return str(e), 404
     except (InvalidResourceError, ChecksumValidationError) as e:
@@ -295,6 +307,10 @@ def post_resource(parent_uid):
     except TombstoneError as e:
         return _tombstone_response(e, uid)
     except ServerManagedTermError as e:
+        rsp_headers['Link'] = (
+            f'<{uri}>; rel="{nsc["ldp"].constrainedBy}"; '
+            f'{g.webroot}/info/ldp_constraints"'
+        )
         return str(e), 412
 
     uri = g.tbox.uid_to_uri(rsrc.uid)
@@ -303,8 +319,9 @@ def post_resource(parent_uid):
     rsp_headers['Location'] = uri
 
     if mimetype and kwargs.get('rdf_fmt') is None:
-        rsp_headers['Link'] = (f'<{uri}/fcr:metadata>; rel="describedby"; '
-                               f'anchor="{uri}"')
+        rsp_headers['Link'] = (
+            f'<{uri}/fcr:metadata>; rel="describedby"; anchor="{uri}"'
+        )
 
     return uri, 201, rsp_headers
 
@@ -393,6 +410,8 @@ def patch_resource(uid, is_metadata=False):
     if cond_ret:
         return cond_ret
 
+    handling, _ = set_post_put_params()
+
     rsp_headers = {'Content-Type' : 'text/plain; charset=utf-8'}
     if request.mimetype != 'application/sparql-update':
         return 'Provided content type is not a valid parsable format: {}'\
@@ -401,7 +420,7 @@ def patch_resource(uid, is_metadata=False):
     update_str = request.get_data().decode('utf-8')
     local_update_str = g.tbox.localize_ext_str(update_str, nsc['fcres'][uid])
     try:
-        rsrc = rsrc_api.update(uid, local_update_str, is_metadata)
+        rsrc = rsrc_api.update(uid, local_update_str, is_metadata, handling)
     except (ServerManagedTermError, SingleSubjectError) as e:
         return str(e), 412
     except InvalidResourceError as e:

+ 10 - 7
lakesuperior/endpoints/main.py

@@ -2,9 +2,10 @@ import logging
 
 from os import path
 
-from flask import Blueprint, render_template
+from flask import Blueprint, jsonify, render_template
 
 from lakesuperior import release
+from lakesuperior.dictionaries import srv_mgd_terms as smt
 
 logger = logging.getLogger(__name__)
 
@@ -21,9 +22,11 @@ def index():
     return render_template('index.html', release=release)
 
 
-@main.route('/debug', methods=['GET'])
-def debug():
-    """Debug page."""
-    raise RuntimeError()
-
-
+@main.route('/info/ldp_constraints', methods=['GET'])
+def ldp_constraints():
+    """ LDP term constraints. """
+    return jsonify({
+        'srv_mgd_subjects': [*smt.srv_mgd_subjects],
+        'srv_mgd_predicates': [*smt.srv_mgd_predicates],
+        'srv_mgd_types': [*smt.srv_mgd_types],
+    })

+ 1 - 7
lakesuperior/etc.defaults/application.yml

@@ -43,7 +43,7 @@ uuid:
     #
     #   ``blake2b`` is a strong, fast cryptographic alternative to SHA2/3:
     #   https://blake2.net/
-    algo: sha1
+    algo: blake2b
 
 ###
 # Data store configuration.
@@ -77,12 +77,6 @@ store:
         #   Changes to this parameter require a full migration.
         referential_integrity: lenient
 
-        ###
-        #   Split newly minted URIs into pairtrees.
-        #
-        #   This mimics Fedora4 behavior which segments an identifier on POST.
-        legacy_ptree_split: False
-
     ###
     #   The path used to persist LDP-NR (bitstreams).
     #

+ 8 - 2
lakesuperior/exceptions.py

@@ -39,11 +39,17 @@ class ChecksumValidationError(ResourceError):
 
 
     def __str__(self):
-        return self.msg or (f'Validation failed for resource {self.uid}. '
-                            f'Provided checksum: {self.prov_cksum}; '
+        return self.msg or (f'validation failed for resource {self.uid}. '
+                            f'provided checksum: {self.prov_cksum}; '
                             f'calculated checksum: {self.calc_cksum}')
 
 
+class IndigestibleError(ResourceError):
+    """
+    Raised when an unsupported digest algorithm is requested.
+    """
+    pass
+
 
 class ResourceNotExistsError(ResourceError):
     '''

+ 6 - 13
lakesuperior/model/ldp/ldp_factory.py

@@ -118,12 +118,10 @@ class LdpFactory:
                     'Binary stream must be provided if mimetype is specified.')
 
             # Determine whether it is a basic, direct or indirect container.
-            if provided_imr[ : Ldpr.MBR_RSRC_URI : ] and \
-                    provided_imr[ : Ldpr.MBR_REL_URI : ]:
-                if provided_imr[ : Ldpr.INS_CNT_REL_URI : ]:
-                    cls = LdpIc
-                else:
-                    cls = LdpDc
+            if provided_imr[nsc['rdf'].type] == nsc['ldp'].IndirectContainer:
+                cls = LdpIc
+            elif provided_imr[nsc['rdf'].type] == nsc['ldp'].DirectContainer:
+                cls = LdpDc
             else:
                 cls = Ldpc
 
@@ -173,16 +171,11 @@ class LdpFactory:
         :return: The confirmed resource UID. This may be different from
             what has been indicated.
         """
-        def split_if_legacy(uid):
-            if config['application']['store']['ldp_rs']['legacy_ptree_split']:
-                uid = tbox.split_uuid(uid)
-            return uid
-
         if path and path.startswith('/'):
             raise ValueError('Slug cannot start with a slash.')
         # Shortcut!
         if not path and parent_uid == '/':
-            return '/' + split_if_legacy(str(uuid4()))
+            return f'/{uuid4()}'
 
         if not parent_uid.startswith('/'):
             raise ValueError('Invalid parent UID: {}'.format(parent_uid))
@@ -198,6 +191,6 @@ class LdpFactory:
             if not rdfly.ask_rsrc_exists(cnd_uid):
                 return cnd_uid
 
-        return pfx + split_if_legacy(str(uuid4()))
+        return f'{pfx}{uuid4()}'
 
 

+ 21 - 14
lakesuperior/model/ldp/ldpr.py

@@ -31,6 +31,8 @@ from lakesuperior.model.rdf.graph import Graph
 from lakesuperior.store.ldp_rs.rsrc_centric_layout import VERS_CONT_LABEL
 from lakesuperior.util.toolbox import replace_term_domain
 
+DEF_MBR_REL_URI = nsc['ldp'].member
+DEF_INS_CNT_REL_URI = nsc['ldp'].memberSubject
 
 rdfly = env.app_globals.rdfly
 logger = logging.getLogger(__name__)
@@ -910,30 +912,35 @@ class Ldpr(metaclass=ABCMeta):
 
         :param rdflib.resource.Resouce cont_rsrc:  The container resource.
         """
-        cont_p = cont_rsrc.metadata.terms_by_type('p')
-
         logger.info('Checking direct or indirect containment.')
-        logger.debug('Parent predicates: {}'.format(cont_p))
 
         add_trp = {(self.uri, nsc['fcrepo'].hasParent, cont_rsrc.uri)}
 
-        if self.MBR_RSRC_URI in cont_p and self.MBR_REL_URI in cont_p:
+        if (
+            nsc['ldp'].DirectContainer in cont_rsrc.ldp_types
+            or nsc['ldp'].IndirectContainer in cont_rsrc.ldp_types
+        ):
             from lakesuperior.model.ldp.ldp_factory import LdpFactory
 
-            s = cont_rsrc.metadata.value(self.MBR_RSRC_URI)
-            p = cont_rsrc.metadata.value(self.MBR_REL_URI)
+            cont_p = cont_rsrc.metadata.terms_by_type('p')
+            logger.debug('Parent predicates: {}'.format(cont_p))
 
-            if nsc['ldp'].DirectContainer in cont_rsrc.ldp_types:
-                logger.info('Parent is a direct container.')
-                logger.debug('Creating DC triples.')
-                o = self.uri
+            s = cont_rsrc.metadata.value(self.MBR_RSRC_URI) or cont_rsrc.uri
+            p = cont_rsrc.metadata.value(self.MBR_REL_URI) or DEF_MBR_REL_URI
+            #import pdb; pdb.set_trace()
 
-            elif nsc['ldp'].IndirectContainer in cont_rsrc.ldp_types:
+            if nsc['ldp'].IndirectContainer in cont_rsrc.ldp_types:
                 logger.info('Parent is an indirect container.')
                 cont_rel_uri = cont_rsrc.metadata.value(self.INS_CNT_REL_URI)
-                o = self.provided_imr.value(cont_rel_uri)
-                logger.debug('Target URI: {}'.format(o))
-                logger.debug('Creating IC triples.')
+                o = (
+                    self.provided_imr.value(cont_rel_uri)
+                    or DEF_INS_CNT_REL_URI
+                )
+                logger.debug(f'Target URI: {o}')
+
+            else:
+                logger.info('Parent is a direct container.')
+                o = self.uri
 
             target_rsrc = LdpFactory.from_stored(rdfly.uri_to_uid(s))
             target_rsrc.modify(RES_UPDATED, add_trp={(s, p, o)})

+ 8 - 3
lakesuperior/store/ldp_nr/default_layout.py

@@ -7,7 +7,7 @@ from uuid import uuid4
 
 from lakesuperior import env
 from lakesuperior.store.ldp_nr.base_non_rdf_layout import BaseNonRdfLayout
-from lakesuperior.exceptions import ChecksumValidationError
+from lakesuperior.exceptions import ChecksumValidationError, IndigestibleError
 
 
 logger = logging.getLogger(__name__)
@@ -98,9 +98,14 @@ class DefaultLayout(BaseNonRdfLayout):
                 logger.debug(f'Writing temp file to {tmp_fname}.')
 
                 store_hash = hashlib.new(default_hash_algo)
-                verify_hash = (
+                try:
+                    verify_hash = (
                         store_hash if prov_cksum_algo == default_hash_algo
-                        else hashlib.new(prov_cksum_algo))
+                        else hashlib.new(prov_cksum_algo)
+                    )
+                except ValueError as e:
+                    raise IndigestibleError(uid, str(e))
+
                 size = 0
                 while True:
                     buf = stream.read(bufsize)

+ 152 - 4
tests/2_api/test_2_0_resource_api.py

@@ -27,7 +27,8 @@ def dc_rdf():
     PREFIX dcterms: <http://purl.org/dc/terms/>
     PREFIX ldp: <http://www.w3.org/ns/ldp#>
 
-    <> dcterms:title "Direct Container" ;
+    <> a ldp:DirectContainer ;
+        dcterms:title "Direct Container" ;
         ldp:membershipResource <info:fcres/member> ;
         ldp:hasMemberRelation dcterms:relation .
     '''
@@ -40,7 +41,8 @@ def ic_rdf():
     PREFIX ldp: <http://www.w3.org/ns/ldp#>
     PREFIX ore: <http://www.openarchives.org/ore/terms/>
 
-    <> dcterms:title "Indirect Container" ;
+    <> a ldp:IndirectContainer ;
+        dcterms:title "Indirect Container" ;
         ldp:membershipResource <info:fcres/top_container> ;
         ldp:hasMemberRelation dcterms:relation ;
         ldp:insertedContentRelation ore:proxyFor .
@@ -343,7 +345,7 @@ class TestResourceCRUD:
         _, dc_rsrc = rsrc_api.create_or_replace(
                 dc_uid, rdf_data=dc_rdf, rdf_fmt='turtle')
 
-        child_uid = rsrc_api.create(dc_uid, None).uid
+        child_uid = rsrc_api.create(dc_uid).uid
         member_rsrc = rsrc_api.get('/member')
 
         with env.app_globals.rdf_store.txn_ctx():
@@ -351,9 +353,80 @@ class TestResourceCRUD:
                 member_rsrc.uri: nsc['dcterms'].relation: nsc['fcres'][child_uid]]
 
 
+    def test_create_ldp_dc_defaults1(self):
+        """
+        Create an LDP Direct Container with default values.
+        """
+        dc_rdf = b'''
+        PREFIX dcterms: <http://purl.org/dc/terms/>
+        PREFIX ldp: <http://www.w3.org/ns/ldp#>
+
+        <> a ldp:DirectContainer ;
+            ldp:membershipResource <info:fcres/member> .
+        '''
+        dc_uid = '/test_dc_defaults1'
+        _, dc_rsrc = rsrc_api.create_or_replace(
+                dc_uid, rdf_data=dc_rdf, rdf_fmt='turtle')
+
+        child_uid = rsrc_api.create(dc_uid).uid
+        member_rsrc = rsrc_api.get('/member')
+
+        with env.app_globals.rdf_store.txn_ctx():
+            assert member_rsrc.imr[
+                member_rsrc.uri: nsc['ldp'].member: nsc['fcres'][child_uid]
+            ]
+
+
+    def test_create_ldp_dc_defaults2(self):
+        """
+        Create an LDP Direct Container with default values.
+        """
+        dc_rdf = b'''
+        PREFIX dcterms: <http://purl.org/dc/terms/>
+        PREFIX ldp: <http://www.w3.org/ns/ldp#>
+
+        <> a ldp:DirectContainer ;
+            ldp:hasMemberRelation dcterms:relation .
+        '''
+        dc_uid = '/test_dc_defaults2'
+        _, dc_rsrc = rsrc_api.create_or_replace(
+                dc_uid, rdf_data=dc_rdf, rdf_fmt='turtle')
+
+        child_uid = rsrc_api.create(dc_uid).uid
+        member_rsrc = rsrc_api.get(dc_uid)
+
+        with env.app_globals.rdf_store.txn_ctx():
+            #import pdb; pdb.set_trace()
+            assert member_rsrc.imr[
+                member_rsrc.uri: nsc['dcterms'].relation:
+                nsc['fcres'][child_uid]]
+
+
+    def test_create_ldp_dc_defaults3(self):
+        """
+        Create an LDP Direct Container with default values.
+        """
+        dc_rdf = b'''
+        PREFIX dcterms: <http://purl.org/dc/terms/>
+        PREFIX ldp: <http://www.w3.org/ns/ldp#>
+
+        <> a ldp:DirectContainer .
+        '''
+        dc_uid = '/test_dc_defaults3'
+        _, dc_rsrc = rsrc_api.create_or_replace(
+                dc_uid, rdf_data=dc_rdf, rdf_fmt='turtle')
+
+        child_uid = rsrc_api.create(dc_uid, None).uid
+        member_rsrc = rsrc_api.get(dc_uid)
+
+        with env.app_globals.rdf_store.txn_ctx():
+            assert member_rsrc.imr[
+                member_rsrc.uri: nsc['ldp'].member: nsc['fcres'][child_uid]]
+
+
     def test_indirect_container(self, ic_rdf):
         """
-        Create an indirect container verify special properties.
+        Create an indirect container and verify special properties.
         """
         cont_uid = '/top_container'
         ic_uid = '{}/test_ic'.format(cont_uid)
@@ -383,6 +456,81 @@ class TestResourceCRUD:
                 nsc['fcres'][target_uid]]
 
 
+    # TODO WIP Complex test of all possible combinations of missing IC triples
+    # falling back to default values.
+    #def test_indirect_container_defaults(self):
+    #    """
+    #    Create an indirect container with various default values.
+    #    """
+    #    ic_rdf_base = b'''
+    #    PREFIX dcterms: <http://purl.org/dc/terms/>
+    #    PREFIX ldp: <http://www.w3.org/ns/ldp#>
+    #    PREFIX ore: <http://www.openarchives.org/ore/terms/>
+
+    #    <> a ldp:IndirectContainer ;
+    #    '''
+    #    ic_rdf_trp1 = '\nldp:membershipResource <info:fcres/top_container> ;'
+    #    ic_rdf_trp2 = '\nldp:hasMemberRelation dcterms:relation ;'
+    #    ic_rdf_trp3 = '\nldp:insertedContentRelation ore:proxyFor ;'
+
+    #    ic_def_rdf = [
+    #        ic_rdf_base + ic_rdf_trp1 + ic_trp2 + '\n.',
+    #        ic_rdf_base + ic_rdf_trp1 + ic_trp3 + '\n.',
+    #        ic_rdf_base + ic_rdf_trp2 + ic_trp3 + '\n.',
+    #        ic_rdf_base + ic_rdf_trp1 + '\n.',
+    #        ic_rdf_base + ic_rdf_trp2 + '\n.',
+    #        ic_rdf_base + ic_rdf_trp3 + '\n.',
+    #        ic_rdf_base + '\n.',
+    #    ]
+
+    #    target_uid = '/ic_target_def'
+    #    rsrc_api.create_or_replace(target_uid)
+
+    #    # Create several sets of indirect containers, each missing one or more
+    #    # triples from the original graph, which should be replaced by default
+    #    # values. All combinations are tried.
+    #    for i, ic_rdf in enumerate(ic_def_rdf):
+    #        cont_uid = f'/top_container_def{i}'
+    #        ic_uid = '{}/test_ic'.format(cont_uid)
+    #        member_uid = '{}/ic_member'.format(ic_uid)
+
+    #        rsrc_api.create_or_replace(cont_uid)
+    #        rsrc_api.create_or_replace(
+    #            ic_uid, rdf_data=ic_rdf, rdf_fmt='turtle'
+    #        )
+
+    #        ic_member_p = (
+    #            nsc['ore'].proxyFor if i in (1, 2, 5)
+    #            else nsc['ldp'].memberSubject
+    #        )
+    #        # WIP
+    #        #ic_member_o_uid = (
+    #        #    'ic_target_def' if i in (1, 2, 5)
+    #        #    else nsc['ldp'].memberSubject
+    #        #)
+
+    #        ic_member_rdf = b'''
+    #        PREFIX ore: <http://www.openarchives.org/ore/terms/>
+    #        <> ore:proxyFor <info:fcres/ic_target_def> .'''
+
+    #        rsrc_api.create_or_replace(
+    #                member_uid, rdf_data=ic_member_rdf, rdf_fmt='turtle')
+
+    #        ic_rsrc = rsrc_api.get(ic_uid)
+    #        with env.app_globals.rdf_store.txn_ctx():
+    #            assert nsc['ldp'].Container in ic_rsrc.ldp_types
+    #            assert nsc['ldp'].IndirectContainer in ic_rsrc.ldp_types
+
+    #    top_cont_rsrc = rsrc_api.get(cont_uid)
+
+    #    for i, ic_rdf in enumerate(ic_def_rdf):
+    #        member_rsrc = rsrc_api.get(member_uid)
+    #        with env.app_globals.rdf_store.txn_ctx():
+    #            assert top_cont_rsrc.imr[
+    #                top_cont_rsrc.uri: nsc['dcterms'].relation:
+    #                nsc['fcres'][target_uid]]
+
+
 
 @pytest.mark.usefixtures('db')
 class TestAdvancedDelete:

+ 103 - 1
tests/3_endpoints/test_3_0_ldp.py

@@ -416,7 +416,7 @@ class TestLdp:
         assert gr[ URIRef(uri) : nsc['dc'].title : Literal('Ciao') ]
 
 
-    def test_patch_ssr(self):
+    def test_patch_no_single_subject(self):
         """
         Test patching a resource violating the single-subject rule.
         """
@@ -509,6 +509,99 @@ class TestLdp:
         assert ldprs_resp.status_code == ldpnr_resp.status_code == 415
 
 
+    def test_patch_srv_mgd_pred(self, rnd_img):
+        """
+        Verify that adding or removing a server-managed predicate fails.
+        """
+        uid = '/test_patch_sm_pred'
+        path = f'/ldp{uid}'
+        self.client.put(path)
+        self.client.put(path + '/child1')
+
+        uri = g.webroot + uid
+
+        ins_qry1 = f'INSERT {{ <> <{nsc["ldp"].contains}> <http://bogus.com/ext1> . }} WHERE {{}}'
+        ins_qry2 = (
+            f'INSERT {{ <> <{nsc["fcrepo"].created}>'
+            f'"2019-04-01T05:57:36.899033+00:00"^^<{nsc["xsd"].dateTime}> . }}'
+            'WHERE {}'
+        )
+        # The following won't change the graph so it does not raise an error.
+        ins_qry3 = f'INSERT {{ <> a <{nsc["ldp"].Container}> . }} WHERE {{}}'
+        del_qry1 = (
+            f'DELETE {{ <> <{nsc["ldp"].contains}> ?o . }} '
+            f'WHERE {{ <> <{nsc["ldp"].contains}> ?o . }}'
+        )
+        del_qry2 = f'DELETE {{ <> a <{nsc["ldp"].Container}> . }} WHERE {{}}'
+        # No-op as ins_qry3
+        del_qry3 = (
+            f'DELETE {{ <> a <{nsc["ldp"].DirectContainer}> .}} '
+            'WHERE {}'
+        )
+
+        assert self.client.patch(
+            path, data=ins_qry1,
+            headers={'content-type': 'application/sparql-update'}
+        ).status_code == 412
+
+        assert self.client.patch(
+            path, data=ins_qry1,
+            headers={
+                'content-type': 'application/sparql-update',
+                'prefer': 'handling=lenient',
+            }
+        ).status_code == 204
+
+        assert self.client.patch(
+            path, data=ins_qry2,
+            headers={'content-type': 'application/sparql-update'}
+        ).status_code == 412
+
+        assert self.client.patch(
+            path, data=ins_qry2,
+            headers={
+                'content-type': 'application/sparql-update',
+                'prefer': 'handling=lenient',
+            }
+        ).status_code == 204
+
+        assert self.client.patch(
+            path, data=ins_qry3,
+            headers={'content-type': 'application/sparql-update'}
+        ).status_code == 204
+
+        assert self.client.patch(
+            path, data=del_qry1,
+            headers={'content-type': 'application/sparql-update'}
+        ).status_code == 412
+
+        assert self.client.patch(
+            path, data=del_qry1,
+            headers={
+                'content-type': 'application/sparql-update',
+                'prefer': 'handling=lenient',
+            }
+        ).status_code == 204
+
+        assert self.client.patch(
+            path, data=del_qry2,
+            headers={'content-type': 'application/sparql-update'}
+        ).status_code == 412
+
+        assert self.client.patch(
+            path, data=ins_qry2,
+            headers={
+                'content-type': 'application/sparql-update',
+                'prefer': 'handling=lenient',
+            }
+        ).status_code == 204
+
+        assert self.client.patch(
+            path, data=del_qry3,
+            headers={'content-type': 'application/sparql-update'}
+        ).status_code == 204
+
+
     def test_delete(self):
         """
         Test delete response codes.
@@ -877,6 +970,15 @@ class TestDigestHeaders:
         assert self.client.post(path, data=content, headers={
                 'digest': f'blake2b={content_blake2b}'}).status_code == 201
 
+        assert self.client.post(path, data=content, headers={
+                'digest': f'bogusmd={content_blake2b}'}).status_code == 400
+
+        bencoded = b64encode(content_blake2b.encode())
+        assert self.client.post(
+            path, data=content,
+            headers={'digest': f'blake2b={bencoded}'}
+        ).status_code == 400
+
 
 
 @pytest.mark.usefixtures('client_class')