瀏覽代碼

Set webroot in app context

* Move code to before_request hook
* Change references from Toolbox().base_url to g.webroot
* Rename `g` local variables across application to avoid naming conflict
Stefano Cossu 7 年之前
父節點
當前提交
a101b5bf84
共有 5 個文件被更改,包括 134 次插入140 次删除
  1. 1 0
      lakesuperior/endpoints/ldp.py
  2. 53 53
      lakesuperior/model/ldpr.py
  3. 26 34
      lakesuperior/toolbox.py
  4. 43 43
      tests/endpoints/test_ldp.py
  5. 11 10
      tests/test_toolbox.py

+ 1 - 0
lakesuperior/endpoints/ldp.py

@@ -75,6 +75,7 @@ def bp_url_defaults(endpoint, values):
 @ldp.url_value_preprocessor
 @ldp.url_value_preprocessor
 def bp_url_value_preprocessor(endpoint, values):
 def bp_url_value_preprocessor(endpoint, values):
     g.url_prefix = values.pop('url_prefix')
     g.url_prefix = values.pop('url_prefix')
+    g.webroot = request.host_url + g.url_prefix
 
 
 
 
 @ldp.before_request
 @ldp.before_request

+ 53 - 53
lakesuperior/model/ldpr.py

@@ -179,19 +179,19 @@ class Ldpr(metaclass=ABCMeta):
         elif __class__.is_rdf_parsable(mimetype):
         elif __class__.is_rdf_parsable(mimetype):
             # Create container and populate it with provided RDF data.
             # Create container and populate it with provided RDF data.
             input_rdf = stream.read()
             input_rdf = stream.read()
-            provided_g = Graph().parse(data=input_rdf,
+            provided_gr = Graph().parse(data=input_rdf,
                     format=mimetype, publicID=urn)
                     format=mimetype, publicID=urn)
             logger.debug('Provided graph: {}'.format(
             logger.debug('Provided graph: {}'.format(
-                    pformat(set(provided_g))))
-            local_g = Toolbox().localize_graph(provided_g)
+                    pformat(set(provided_gr))))
+            local_gr = Toolbox().localize_graph(provided_gr)
             logger.debug('Parsed local graph: {}'.format(
             logger.debug('Parsed local graph: {}'.format(
-                    pformat(set(local_g))))
-            provided_imr = Resource(local_g, urn)
+                    pformat(set(local_gr))))
+            provided_imr = Resource(local_gr, urn)
 
 
             # Determine whether it is a basic, direct or indirect container.
             # Determine whether it is a basic, direct or indirect container.
-            if Ldpr.MBR_RSRC_URI in local_g.predicates() and \
-                    Ldpr.MBR_REL_URI in local_g.predicates():
-                if Ldpr.INS_CNT_REL_URI in local_g.predicates():
+            if Ldpr.MBR_RSRC_URI in local_gr.predicates() and \
+                    Ldpr.MBR_REL_URI in local_gr.predicates():
+                if Ldpr.INS_CNT_REL_URI in local_gr.predicates():
                     cls = LdpIc
                     cls = LdpIc
                 else:
                 else:
                     cls = LdpDc
                     cls = LdpDc
@@ -379,12 +379,12 @@ class Ldpr(metaclass=ABCMeta):
                 self._logger.debug('Removing type: {}'.format(t))
                 self._logger.debug('Removing type: {}'.format(t))
                 self.imr.remove(RDF.type, t)
                 self.imr.remove(RDF.type, t)
 
 
-        out_g = Toolbox().globalize_graph(self.imr.graph)
+        out_gr = Toolbox().globalize_graph(self.imr.graph)
         # Clear IMR because it's been pruned. In the rare case it is needed
         # Clear IMR because it's been pruned. In the rare case it is needed
         # after this method, it will be retrieved again.
         # after this method, it will be retrieved again.
         delattr(self, 'imr')
         delattr(self, 'imr')
 
 
-        return out_g
+        return out_gr
 
 
 
 
     @property
     @property
@@ -632,14 +632,14 @@ class Ldpr(metaclass=ABCMeta):
                 remove_trp = set(remove_trp)
                 remove_trp = set(remove_trp)
             if isinstance(add_trp, Graph):
             if isinstance(add_trp, Graph):
                 add_trp = set(add_trp)
                 add_trp = set(add_trp)
-            merge_g = remove_trp | add_trp
-            type = { trp[2] for trp in merge_g if trp[1] == RDF.type }
-            actor = { trp[2] for trp in merge_g \
+            merge_gr = remove_trp | add_trp
+            type = { trp[2] for trp in merge_gr if trp[1] == RDF.type }
+            actor = { trp[2] for trp in merge_gr \
                     if trp[1] == nsc['fcrepo'].createdBy }
                     if trp[1] == nsc['fcrepo'].createdBy }
         else:
         else:
-            merge_g = remove_trp | add_trp
-            type = merge_g[ self.urn : RDF.type ]
-            actor = merge_g[ self.urn : nsc['fcrepo'].createdBy ]
+            merge_gr = remove_trp | add_trp
+            type = merge_gr[ self.urn : RDF.type ]
+            actor = merge_gr[ self.urn : nsc['fcrepo'].createdBy ]
 
 
 
 
         return self.rdfly.modify_dataset(remove_trp, add_trp, metadata={
         return self.rdfly.modify_dataset(remove_trp, add_trp, metadata={
@@ -650,20 +650,20 @@ class Ldpr(metaclass=ABCMeta):
         })
         })
 
 
 
 
-    def _ensure_single_subject_rdf(self, g):
+    def _ensure_single_subject_rdf(self, gr):
         '''
         '''
         Ensure that a RDF payload for a POST or PUT has a single resource.
         Ensure that a RDF payload for a POST or PUT has a single resource.
         '''
         '''
-        for s in set(g.subjects()):
+        for s in set(gr.subjects()):
             if not s == self.urn:
             if not s == self.urn:
                 raise SingleSubjectError(s, self.uuid)
                 raise SingleSubjectError(s, self.uuid)
 
 
 
 
     def _check_ref_int(self, config):
     def _check_ref_int(self, config):
-        g = self.provided_imr.graph
+        gr = self.provided_imr.graph
 
 
-        for o in g.objects():
-            if isinstance(o, URIRef) and str(o).startswith(Toolbox().base_url)\
+        for o in gr.objects():
+            if isinstance(o, URIRef) and str(o).startswith(g.webroot)\
                     and not self.rdfly.ask_rsrc_exists(o):
                     and not self.rdfly.ask_rsrc_exists(o):
                 if config == 'strict':
                 if config == 'strict':
                     raise RefIntViolationError(o)
                     raise RefIntViolationError(o)
@@ -671,46 +671,46 @@ class Ldpr(metaclass=ABCMeta):
                     self._logger.info(
                     self._logger.info(
                             'Removing link to non-existent repo resource: {}'
                             'Removing link to non-existent repo resource: {}'
                             .format(o))
                             .format(o))
-                    g.remove((None, None, o))
+                    gr.remove((None, None, o))
 
 
 
 
-    def _check_mgd_terms(self, g):
+    def _check_mgd_terms(self, gr):
         '''
         '''
         Check whether server-managed terms are in a RDF payload.
         Check whether server-managed terms are in a RDF payload.
         '''
         '''
         if self.handling == 'none':
         if self.handling == 'none':
             return
             return
 
 
-        offending_subjects = set(g.subjects()) & srv_mgd_subjects
+        offending_subjects = set(gr.subjects()) & srv_mgd_subjects
         if offending_subjects:
         if offending_subjects:
             if self.handling=='strict':
             if self.handling=='strict':
                 raise ServerManagedTermError(offending_subjects, 's')
                 raise ServerManagedTermError(offending_subjects, 's')
             else:
             else:
                 for s in offending_subjects:
                 for s in offending_subjects:
                     self._logger.info('Removing offending subj: {}'.format(s))
                     self._logger.info('Removing offending subj: {}'.format(s))
-                    g.remove((s, None, None))
+                    gr.remove((s, None, None))
 
 
-        offending_predicates = set(g.predicates()) & srv_mgd_predicates
+        offending_predicates = set(gr.predicates()) & srv_mgd_predicates
         if offending_predicates:
         if offending_predicates:
             if self.handling=='strict':
             if self.handling=='strict':
                 raise ServerManagedTermError(offending_predicates, 'p')
                 raise ServerManagedTermError(offending_predicates, 'p')
             else:
             else:
                 for p in offending_predicates:
                 for p in offending_predicates:
                     self._logger.info('Removing offending pred: {}'.format(p))
                     self._logger.info('Removing offending pred: {}'.format(p))
-                    g.remove((None, p, None))
+                    gr.remove((None, p, None))
 
 
-        offending_types = set(g.objects(predicate=RDF.type)) & srv_mgd_types
+        offending_types = set(gr.objects(predicate=RDF.type)) & srv_mgd_types
         if offending_types:
         if offending_types:
             if self.handling=='strict':
             if self.handling=='strict':
                 raise ServerManagedTermError(offending_types, 't')
                 raise ServerManagedTermError(offending_types, 't')
             else:
             else:
                 for t in offending_types:
                 for t in offending_types:
                     self._logger.info('Removing offending type: {}'.format(t))
                     self._logger.info('Removing offending type: {}'.format(t))
-                    g.remove((None, RDF.type, t))
+                    gr.remove((None, RDF.type, t))
 
 
-        self._logger.debug('Sanitized graph: {}'.format(g.serialize(
+        self._logger.debug('Sanitized graph: {}'.format(gr.serialize(
             format='turtle').decode('utf-8')))
             format='turtle').decode('utf-8')))
-        return g
+        return gr
 
 
 
 
     def _sparql_delta(self, q):
     def _sparql_delta(self, q):
@@ -736,22 +736,22 @@ class Ldpr(metaclass=ABCMeta):
         with `BaseStoreLayout.update_resource` and/or recorded as separate
         with `BaseStoreLayout.update_resource` and/or recorded as separate
         events in a provenance tracking system.
         events in a provenance tracking system.
         '''
         '''
-        pre_g = self.imr.graph
+        pre_gr = self.imr.graph
 
 
-        post_g = deepcopy(pre_g)
-        post_g.update(q)
+        post_gr = deepcopy(pre_gr)
+        post_gr.update(q)
 
 
-        remove_g, add_g = self._dedup_deltas(pre_g, post_g)
+        remove_gr, add_gr = self._dedup_deltas(pre_gr, post_gr)
 
 
         #self._logger.info('Removing: {}'.format(
         #self._logger.info('Removing: {}'.format(
-        #    remove_g.serialize(format='turtle').decode('utf8')))
+        #    remove_gr.serialize(format='turtle').decode('utf8')))
         #self._logger.info('Adding: {}'.format(
         #self._logger.info('Adding: {}'.format(
-        #    add_g.serialize(format='turtle').decode('utf8')))
+        #    add_gr.serialize(format='turtle').decode('utf8')))
 
 
-        remove_g = self._check_mgd_terms(remove_g)
-        add_g = self._check_mgd_terms(add_g)
+        remove_gr = self._check_mgd_terms(remove_gr)
+        add_gr = self._check_mgd_terms(add_gr)
 
 
-        return remove_g, add_g
+        return remove_gr, add_gr
 
 
 
 
     def _add_srv_mgd_triples(self, create=False):
     def _add_srv_mgd_triples(self, create=False):
@@ -798,11 +798,11 @@ class Ldpr(metaclass=ABCMeta):
         else:
         else:
             parent_uri = self.ROOT_NODE_URN
             parent_uri = self.ROOT_NODE_URN
 
 
-        add_g = Graph()
-        add_g.add((parent_uri, nsc['ldp'].contains, self.urn))
+        add_gr = Graph()
+        add_gr.add((parent_uri, nsc['ldp'].contains, self.urn))
         parent_rsrc = Ldpc(parent_uri, repr_opts={
         parent_rsrc = Ldpc(parent_uri, repr_opts={
                 'incl_children' : False}, handling='none')
                 'incl_children' : False}, handling='none')
-        parent_rsrc._modify_rsrc(self.RES_UPDATED, add_trp=add_g)
+        parent_rsrc._modify_rsrc(self.RES_UPDATED, add_trp=add_gr)
 
 
         # Direct or indirect container relationship.
         # Direct or indirect container relationship.
         self._add_ldp_dc_ic_rel(parent_uri)
         self._add_ldp_dc_ic_rel(parent_uri)
@@ -843,14 +843,14 @@ class Ldpr(metaclass=ABCMeta):
         return self.ROOT_NODE_URN
         return self.ROOT_NODE_URN
 
 
 
 
-    def _dedup_deltas(self, remove_g, add_g):
+    def _dedup_deltas(self, remove_gr, add_gr):
         '''
         '''
         Remove duplicate triples from add and remove delta graphs, which would
         Remove duplicate triples from add and remove delta graphs, which would
         otherwise contain unnecessary statements that annul each other.
         otherwise contain unnecessary statements that annul each other.
         '''
         '''
         return (
         return (
-            remove_g - add_g,
-            add_g - remove_g
+            remove_gr - add_gr,
+            add_gr - remove_gr
         )
         )
 
 
 
 
@@ -890,7 +890,7 @@ class Ldpr(metaclass=ABCMeta):
         cont_rsrc = Ldpr.outbound_inst(cont_uuid,
         cont_rsrc = Ldpr.outbound_inst(cont_uuid,
                 repr_opts={'incl_children' : False})
                 repr_opts={'incl_children' : False})
         cont_p = set(cont_rsrc.imr.graph.predicates())
         cont_p = set(cont_rsrc.imr.graph.predicates())
-        add_g = Graph()
+        add_gr = Graph()
 
 
         self._logger.info('Checking direct or indirect containment.')
         self._logger.info('Checking direct or indirect containment.')
         self._logger.debug('Parent predicates: {}'.format(cont_p))
         self._logger.debug('Parent predicates: {}'.format(cont_p))
@@ -904,7 +904,7 @@ class Ldpr(metaclass=ABCMeta):
                 self._logger.info('Parent is a direct container.')
                 self._logger.info('Parent is a direct container.')
 
 
                 self._logger.debug('Creating DC triples.')
                 self._logger.debug('Creating DC triples.')
-                add_g.add((s, p, self.urn))
+                add_gr.add((s, p, self.urn))
 
 
             elif cont_rsrc.imr[RDF.type : nsc['ldp'].IndirectContainer] \
             elif cont_rsrc.imr[RDF.type : nsc['ldp'].IndirectContainer] \
                    and self.INS_CNT_REL_URI in cont_p:
                    and self.INS_CNT_REL_URI in cont_p:
@@ -914,13 +914,13 @@ class Ldpr(metaclass=ABCMeta):
                 self._logger.debug('Target URI: {}'.format(target_uri))
                 self._logger.debug('Target URI: {}'.format(target_uri))
                 if target_uri:
                 if target_uri:
                     self._logger.debug('Creating IC triples.')
                     self._logger.debug('Creating IC triples.')
-                    add_g.add((s, p, target_uri))
+                    add_gr.add((s, p, target_uri))
 
 
-        if len(add_g):
-            add_g = self._check_mgd_terms(add_g)
+        if len(add_gr):
+            add_gr = self._check_mgd_terms(add_gr)
             self._logger.debug('Adding DC/IC triples: {}'.format(
             self._logger.debug('Adding DC/IC triples: {}'.format(
-                add_g.serialize(format='turtle').decode('utf-8')))
-            self._modify_rsrc(self.RES_UPDATED, add_trp=add_g)
+                add_gr.serialize(format='turtle').decode('utf-8')))
+            self._modify_rsrc(self.RES_UPDATED, add_trp=add_gr)
 
 
 
 
     def _send_event_msg(self, remove_trp, add_trp, metadata):
     def _send_event_msg(self, remove_trp, add_trp, metadata):

+ 26 - 34
lakesuperior/toolbox.py

@@ -19,20 +19,12 @@ class Toolbox:
 
 
     ROOT_NODE_URN = nsc['fcsystem'].root
     ROOT_NODE_URN = nsc['fcsystem'].root
 
 
-    def __init__(self):
-        '''
-        Set the base URL for the requests. This class has to be instantiated
-        within a request context.
-       '''
-        self.base_url = request.host_url + g.url_prefix
-
-
     def uuid_to_uri(self, uuid):
     def uuid_to_uri(self, uuid):
         '''Convert a UUID to a URI.
         '''Convert a UUID to a URI.
 
 
         @return URIRef
         @return URIRef
         '''
         '''
-        uri = '{}/{}'.format(self.base_url, uuid) if uuid else self.base_url
+        uri = '{}/{}'.format(g.webroot, uuid) if uuid else g.webroot
 
 
         return URIRef(uri)
         return URIRef(uri)
 
 
@@ -47,7 +39,7 @@ class Toolbox:
         elif uri.startswith(nsc['fcres']):
         elif uri.startswith(nsc['fcres']):
             return str(uri).replace(nsc['fcres'], '')
             return str(uri).replace(nsc['fcres'], '')
         else:
         else:
-            return str(uri).replace(self.base_url, '').strip('/')
+            return str(uri).replace(g.webroot, '').strip('/')
 
 
 
 
     def localize_string(self, s):
     def localize_string(self, s):
@@ -57,10 +49,10 @@ class Toolbox:
 
 
         @return string
         @return string
         '''
         '''
-        if s.strip('/') == self.base_url:
+        if s.strip('/') == g.webroot:
             return str(self.ROOT_NODE_URN)
             return str(self.ROOT_NODE_URN)
         else:
         else:
-            return s.strip('/').replace(self.base_url+'/', str(nsc['fcres']))
+            return s.strip('/').replace(g.webroot+'/', str(nsc['fcres']))
 
 
 
 
     def localize_term(self, uri):
     def localize_term(self, uri):
@@ -74,7 +66,7 @@ class Toolbox:
         return URIRef(self.localize_string(str(uri)))
         return URIRef(self.localize_string(str(uri)))
 
 
 
 
-    def localize_graph(self, g):
+    def localize_graph(self, gr):
         '''
         '''
         Locbalize a graph.
         Locbalize a graph.
         '''
         '''
@@ -92,18 +84,18 @@ class Toolbox:
               STRSTARTS(str(?o), "{0}/")
               STRSTARTS(str(?o), "{0}/")
             ) .
             ) .
           }}
           }}
-        }}'''.format(self.base_url)
-        flt_g = g.query(q)
+        }}'''.format(g.webroot)
+        flt_gr = gr.query(q)
 
 
-        for t in flt_g:
+        for t in flt_gr:
             local_s = self.localize_term(t[0])
             local_s = self.localize_term(t[0])
             local_o = self.localize_term(t[2]) \
             local_o = self.localize_term(t[2]) \
                     if isinstance(t[2], URIRef) \
                     if isinstance(t[2], URIRef) \
                     else t[2]
                     else t[2]
-            g.remove(t)
-            g.add((local_s, t[1], local_o))
+            gr.remove(t)
+            gr.add((local_s, t[1], local_o))
 
 
-        return g
+        return gr
 
 
 
 
     def globalize_string(self, s):
     def globalize_string(self, s):
@@ -113,7 +105,7 @@ class Toolbox:
 
 
         @return string
         @return string
         '''
         '''
-        return s.replace(str(nsc['fcres']), self.base_url + '/')
+        return s.replace(str(nsc['fcres']), g.webroot + '/')
 
 
 
 
     def globalize_term(self, urn):
     def globalize_term(self, urn):
@@ -130,7 +122,7 @@ class Toolbox:
         return URIRef(self.globalize_string(str(urn)))
         return URIRef(self.globalize_string(str(urn)))
 
 
 
 
-    def globalize_graph(self, g):
+    def globalize_graph(self, gr):
         '''
         '''
         Globalize a graph.
         Globalize a graph.
         '''
         '''
@@ -149,30 +141,30 @@ class Toolbox:
             ) .
             ) .
           }}
           }}
         }}'''.format(nsc['fcres'], self.ROOT_NODE_URN)
         }}'''.format(nsc['fcres'], self.ROOT_NODE_URN)
-        flt_g = g.query(q)
+        flt_gr = gr.query(q)
 
 
-        for t in flt_g:
+        for t in flt_gr:
             global_s = self.globalize_term(t[0])
             global_s = self.globalize_term(t[0])
             global_o = self.globalize_term(t[2]) \
             global_o = self.globalize_term(t[2]) \
                     if isinstance(t[2], URIRef) \
                     if isinstance(t[2], URIRef) \
                     else t[2]
                     else t[2]
-            g.remove(t)
-            g.add((global_s, t[1], global_o))
+            gr.remove(t)
+            gr.add((global_s, t[1], global_o))
 
 
-        return g
+        return gr
 
 
 
 
     def globalize_rsrc(self, rsrc):
     def globalize_rsrc(self, rsrc):
         '''
         '''
         Globalize a resource.
         Globalize a resource.
         '''
         '''
-        g = rsrc.graph
+        gr = rsrc.graph
         urn = rsrc.identifier
         urn = rsrc.identifier
 
 
-        global_g = self.globalize_graph(g)
+        global_gr = self.globalize_graph(gr)
         global_uri = self.globalize_term(urn)
         global_uri = self.globalize_term(urn)
 
 
-        return global_g.resource(global_uri)
+        return global_gr.resource(global_uri)
 
 
 
 
     def parse_rfc7240(self, h_str):
     def parse_rfc7240(self, h_str):
@@ -212,7 +204,7 @@ class Toolbox:
         return parsed_hdr
         return parsed_hdr
 
 
 
 
-    def rdf_cksum(self, g):
+    def rdf_cksum(self, gr):
         '''
         '''
         Generate a checksum for a graph.
         Generate a checksum for a graph.
 
 
@@ -227,16 +219,16 @@ class Toolbox:
 
 
         @TODO This can be later reworked to use a custom hashing algorithm.
         @TODO This can be later reworked to use a custom hashing algorithm.
 
 
-        @param rdflib.Graph g The graph to be hashed.
+        @param rdflib.Graph gr The graph to be hashed.
 
 
         @return string SHA1 checksum.
         @return string SHA1 checksum.
         '''
         '''
         # Remove the messageDigest property, which very likely reflects the
         # Remove the messageDigest property, which very likely reflects the
         # previous state of the resource.
         # previous state of the resource.
-        g.remove((Variable('s'), nsc['premis'].messageDigest, Variable('o')))
+        gr.remove((Variable('s'), nsc['premis'].messageDigest, Variable('o')))
 
 
-        ord_g = sorted(list(g), key=lambda x : (x[0], x[1], x[2]))
-        hash = sha1(pickle.dumps(ord_g)).hexdigest()
+        ord_gr = sorted(list(gr), key=lambda x : (x[0], x[1], x[2]))
+        hash = sha1(pickle.dumps(ord_gr)).hexdigest()
 
 
         return hash
         return hash
 
 

+ 43 - 43
tests/endpoints/test_ldp.py

@@ -3,7 +3,7 @@ import uuid
 
 
 from hashlib import sha1
 from hashlib import sha1
 
 
-from flask import url_for
+from flask import url_for, g
 from rdflib import Graph
 from rdflib import Graph
 from rdflib.namespace import RDF
 from rdflib.namespace import RDF
 from rdflib.term import Literal, URIRef
 from rdflib.term import Literal, URIRef
@@ -44,7 +44,7 @@ class TestLdp:
         resp = self.client.put('/ldp/new_resource')
         resp = self.client.put('/ldp/new_resource')
         assert resp.status_code == 201
         assert resp.status_code == 201
         assert resp.data == bytes(
         assert resp.data == bytes(
-                '{}/new_resource'.format(Toolbox().base_url), 'utf-8')
+                '{}/new_resource'.format(g.webroot), 'utf-8')
 
 
 
 
     def test_put_existing_resource(self, random_uuid):
     def test_put_existing_resource(self, random_uuid):
@@ -96,16 +96,16 @@ class TestLdp:
 
 
         cont1_data = self.client.get('/ldp').data
         cont1_data = self.client.get('/ldp').data
         g1 = Graph().parse(data=cont1_data, format='turtle')
         g1 = Graph().parse(data=cont1_data, format='turtle')
-        assert g1[ URIRef(Toolbox().base_url + '/') : nsc['ldp'].contains : \
-                URIRef(Toolbox().base_url + '/' + uuid1) ]
+        assert g1[ URIRef(g.webroot + '/') : nsc['ldp'].contains : \
+                URIRef(g.webroot + '/' + uuid1) ]
 
 
         self.client.put(path2)
         self.client.put(path2)
 
 
         cont2_data = self.client.get(path1).data
         cont2_data = self.client.get(path1).data
         g1 = Graph().parse(data=cont2_data, format='turtle')
         g1 = Graph().parse(data=cont2_data, format='turtle')
-        assert g1[ URIRef(Toolbox().base_url + '/' + uuid1) : \
+        assert g1[ URIRef(g.webroot + '/' + uuid1) : \
                 nsc['ldp'].contains : \
                 nsc['ldp'].contains : \
-                URIRef(Toolbox().base_url + '/' + uuid2) ]
+                URIRef(g.webroot + '/' + uuid2) ]
 
 
 
 
     def test_put_ldp_rs(self, client):
     def test_put_ldp_rs(self, client):
@@ -119,9 +119,9 @@ class TestLdp:
                 headers={'accept' : 'text/turtle'})
                 headers={'accept' : 'text/turtle'})
         assert resp.status_code == 200
         assert resp.status_code == 200
 
 
-        g = Graph().parse(data=resp.data, format='text/turtle')
+        gr = Graph().parse(data=resp.data, format='text/turtle')
         assert URIRef('http://vocab.getty.edu/ontology#Subject') in \
         assert URIRef('http://vocab.getty.edu/ontology#Subject') in \
-                g.objects(None, RDF.type)
+                gr.objects(None, RDF.type)
 
 
 
 
     def test_put_ldp_nr(self, rnd_img):
     def test_put_ldp_nr(self, rnd_img):
@@ -203,12 +203,12 @@ class TestLdp:
         slug01_resp = self.client.post('/ldp', headers={'slug' : 'slug01'})
         slug01_resp = self.client.post('/ldp', headers={'slug' : 'slug01'})
         assert slug01_resp.status_code == 201
         assert slug01_resp.status_code == 201
         assert slug01_resp.headers['location'] == \
         assert slug01_resp.headers['location'] == \
-                Toolbox().base_url + '/slug01'
+                g.webroot + '/slug01'
 
 
         slug02_resp = self.client.post('/ldp', headers={'slug' : 'slug01'})
         slug02_resp = self.client.post('/ldp', headers={'slug' : 'slug01'})
         assert slug02_resp.status_code == 201
         assert slug02_resp.status_code == 201
         assert slug02_resp.headers['location'] != \
         assert slug02_resp.headers['location'] != \
-                Toolbox().base_url + '/slug01'
+                g.webroot + '/slug01'
 
 
 
 
     def test_post_404(self):
     def test_post_404(self):
@@ -237,7 +237,7 @@ class TestLdp:
         path = '/ldp/test_patch01'
         path = '/ldp/test_patch01'
         self.client.put(path)
         self.client.put(path)
 
 
-        uri = Toolbox().base_url + '/test_patch01'
+        uri = g.webroot + '/test_patch01'
 
 
         with open('tests/data/sparql_update/simple_insert.sparql') as data:
         with open('tests/data/sparql_update/simple_insert.sparql') as data:
             resp = self.client.patch(path,
             resp = self.client.patch(path,
@@ -247,16 +247,16 @@ class TestLdp:
         assert resp.status_code == 204
         assert resp.status_code == 204
 
 
         resp = self.client.get(path)
         resp = self.client.get(path)
-        g = Graph().parse(data=resp.data, format='text/turtle')
-        assert g[ URIRef(uri) : nsc['dc'].title : Literal('Hello') ]
+        gr = Graph().parse(data=resp.data, format='text/turtle')
+        assert gr[ URIRef(uri) : nsc['dc'].title : Literal('Hello') ]
 
 
         self.client.patch(path,
         self.client.patch(path,
                 data=open('tests/data/sparql_update/delete+insert+where.sparql'),
                 data=open('tests/data/sparql_update/delete+insert+where.sparql'),
                 headers={'content-type' : 'application/sparql-update'})
                 headers={'content-type' : 'application/sparql-update'})
 
 
         resp = self.client.get(path)
         resp = self.client.get(path)
-        g = Graph().parse(data=resp.data, format='text/turtle')
-        assert g[ URIRef(uri) : nsc['dc'].title : Literal('Ciao') ]
+        gr = Graph().parse(data=resp.data, format='text/turtle')
+        assert gr[ URIRef(uri) : nsc['dc'].title : Literal('Ciao') ]
 
 
 
 
     def test_patch_ldp_nr_metadata(self):
     def test_patch_ldp_nr_metadata(self):
@@ -274,9 +274,9 @@ class TestLdp:
         resp = self.client.get(path + '/fcr:metadata')
         resp = self.client.get(path + '/fcr:metadata')
         assert resp.status_code == 200
         assert resp.status_code == 200
 
 
-        uri = Toolbox().base_url + '/ldpnr01'
-        g = Graph().parse(data=resp.data, format='text/turtle')
-        assert g[ URIRef(uri) : nsc['dc'].title : Literal('Hello') ]
+        uri = g.webroot + '/ldpnr01'
+        gr = Graph().parse(data=resp.data, format='text/turtle')
+        assert gr[ URIRef(uri) : nsc['dc'].title : Literal('Hello') ]
 
 
         with open(
         with open(
                 'tests/data/sparql_update/delete+insert+where.sparql') as data:
                 'tests/data/sparql_update/delete+insert+where.sparql') as data:
@@ -288,8 +288,8 @@ class TestLdp:
         resp = self.client.get(path + '/fcr:metadata')
         resp = self.client.get(path + '/fcr:metadata')
         assert resp.status_code == 200
         assert resp.status_code == 200
 
 
-        g = Graph().parse(data=resp.data, format='text/turtle')
-        assert g[ URIRef(uri) : nsc['dc'].title : Literal('Ciao') ]
+        gr = Graph().parse(data=resp.data, format='text/turtle')
+        assert gr[ URIRef(uri) : nsc['dc'].title : Literal('Ciao') ]
 
 
 
 
     def test_patch_ldp_nr(self, rnd_img):
     def test_patch_ldp_nr(self, rnd_img):
@@ -325,7 +325,7 @@ class TestLdp:
         assert tstone_resp.status_code == 410
         assert tstone_resp.status_code == 410
         assert tstone_resp.headers['Link'] == \
         assert tstone_resp.headers['Link'] == \
                 '<{}/test_delete01/fcr:tombstone>; rel="hasTombstone"'\
                 '<{}/test_delete01/fcr:tombstone>; rel="hasTombstone"'\
-                .format(Toolbox().base_url)
+                .format(g.webroot)
 
 
         tstone_path = '/ldp/test_delete01/fcr:tombstone'
         tstone_path = '/ldp/test_delete01/fcr:tombstone'
         assert self.client.get(tstone_path).status_code == 405
         assert self.client.get(tstone_path).status_code == 405
@@ -350,7 +350,7 @@ class TestLdp:
         assert tstone_resp.status_code == 410
         assert tstone_resp.status_code == 410
         assert tstone_resp.headers['Link'] == \
         assert tstone_resp.headers['Link'] == \
             '<{}/test_delete_recursive01/fcr:tombstone>; rel="hasTombstone"'\
             '<{}/test_delete_recursive01/fcr:tombstone>; rel="hasTombstone"'\
-            .format(Toolbox().base_url)
+            .format(g.webroot)
 
 
         child_tstone_resp = self.client.get('/ldp/test_delete_recursive01/a')
         child_tstone_resp = self.client.get('/ldp/test_delete_recursive01/a')
         assert child_tstone_resp.status_code == tstone_resp.status_code
         assert child_tstone_resp.status_code == tstone_resp.status_code
@@ -378,7 +378,7 @@ class TestPrefHeader:
         return {
         return {
             'path' : parent_path,
             'path' : parent_path,
             'response' : self.client.get(parent_path),
             'response' : self.client.get(parent_path),
-            'subject' : URIRef(Toolbox().base_url + '/test_parent')
+            'subject' : URIRef(g.webroot + '/test_parent')
         }
         }
 
 
 
 
@@ -441,18 +441,18 @@ class TestPrefHeader:
 
 
         assert omit_embed_children_resp.data == cont_resp.data
         assert omit_embed_children_resp.data == cont_resp.data
 
 
-        incl_g = Graph().parse(
+        incl_gr = Graph().parse(
                 data=incl_embed_children_resp.data, format='turtle')
                 data=incl_embed_children_resp.data, format='turtle')
-        omit_g = Graph().parse(
+        omit_gr = Graph().parse(
                 data=omit_embed_children_resp.data, format='turtle')
                 data=omit_embed_children_resp.data, format='turtle')
 
 
-        children = set(incl_g[cont_subject : nsc['ldp'].contains])
+        children = set(incl_gr[cont_subject : nsc['ldp'].contains])
         assert len(children) == 3
         assert len(children) == 3
 
 
-        children = set(incl_g[cont_subject : nsc['ldp'].contains])
+        children = set(incl_gr[cont_subject : nsc['ldp'].contains])
         for child_uri in children:
         for child_uri in children:
-            assert set(incl_g[ child_uri : : ])
-            assert not set(omit_g[ child_uri : : ])
+            assert set(incl_gr[ child_uri : : ])
+            assert not set(omit_gr[ child_uri : : ])
 
 
 
 
     def test_return_children(self, cont_structure):
     def test_return_children(self, cont_structure):
@@ -474,12 +474,12 @@ class TestPrefHeader:
 
 
         assert incl_children_resp.data == cont_resp.data
         assert incl_children_resp.data == cont_resp.data
 
 
-        incl_g = Graph().parse(data=incl_children_resp.data, format='turtle')
-        omit_g = Graph().parse(data=omit_children_resp.data, format='turtle')
+        incl_gr = Graph().parse(data=incl_children_resp.data, format='turtle')
+        omit_gr = Graph().parse(data=omit_children_resp.data, format='turtle')
 
 
-        children = incl_g[cont_subject : nsc['ldp'].contains]
+        children = incl_gr[cont_subject : nsc['ldp'].contains]
         for child_uri in children:
         for child_uri in children:
-            assert not omit_g[ cont_subject : nsc['ldp'].contains : child_uri ]
+            assert not omit_gr[ cont_subject : nsc['ldp'].contains : child_uri ]
 
 
 
 
     def test_inbound_rel(self, cont_structure):
     def test_inbound_rel(self, cont_structure):
@@ -501,11 +501,11 @@ class TestPrefHeader:
 
 
         assert omit_inbound_resp.data == cont_resp.data
         assert omit_inbound_resp.data == cont_resp.data
 
 
-        incl_g = Graph().parse(data=incl_inbound_resp.data, format='turtle')
-        omit_g = Graph().parse(data=omit_inbound_resp.data, format='turtle')
+        incl_gr = Graph().parse(data=incl_inbound_resp.data, format='turtle')
+        omit_gr = Graph().parse(data=omit_inbound_resp.data, format='turtle')
 
 
-        assert set(incl_g[ : : cont_subject ])
-        assert not set(omit_g[ : : cont_subject ])
+        assert set(incl_gr[ : : cont_subject ])
+        assert not set(omit_gr[ : : cont_subject ])
 
 
 
 
     def test_srv_mgd_triples(self, cont_structure):
     def test_srv_mgd_triples(self, cont_structure):
@@ -527,8 +527,8 @@ class TestPrefHeader:
 
 
         assert incl_srv_mgd_resp.data == cont_resp.data
         assert incl_srv_mgd_resp.data == cont_resp.data
 
 
-        incl_g = Graph().parse(data=incl_srv_mgd_resp.data, format='turtle')
-        omit_g = Graph().parse(data=omit_srv_mgd_resp.data, format='turtle')
+        incl_gr = Graph().parse(data=incl_srv_mgd_resp.data, format='turtle')
+        omit_gr = Graph().parse(data=omit_srv_mgd_resp.data, format='turtle')
 
 
         for pred in {
         for pred in {
             nsc['fcrepo'].created,
             nsc['fcrepo'].created,
@@ -537,16 +537,16 @@ class TestPrefHeader:
             nsc['fcrepo'].lastModifiedBy,
             nsc['fcrepo'].lastModifiedBy,
             nsc['ldp'].contains,
             nsc['ldp'].contains,
         }:
         }:
-            assert set(incl_g[ cont_subject : pred : ])
-            assert not set(omit_g[ cont_subject : pred : ])
+            assert set(incl_gr[ cont_subject : pred : ])
+            assert not set(omit_gr[ cont_subject : pred : ])
 
 
         for type in {
         for type in {
                 nsc['fcrepo'].Resource,
                 nsc['fcrepo'].Resource,
                 nsc['ldp'].Container,
                 nsc['ldp'].Container,
                 nsc['ldp'].Resource,
                 nsc['ldp'].Resource,
         }:
         }:
-            assert incl_g[ cont_subject : RDF.type : type ]
-            assert not omit_g[ cont_subject : RDF.type : type ]
+            assert incl_gr[ cont_subject : RDF.type : type ]
+            assert not omit_gr[ cont_subject : RDF.type : type ]
 
 
 
 
     def test_delete_no_tstone(self):
     def test_delete_no_tstone(self):

+ 11 - 10
tests/test_toolbox.py

@@ -1,5 +1,6 @@
 import pytest
 import pytest
 
 
+from flask import g
 from rdflib.term import URIRef
 from rdflib.term import URIRef
 
 
 from lakesuperior.dictionaries.namespaces import ns_collection as nsc
 from lakesuperior.dictionaries.namespaces import ns_collection as nsc
@@ -27,15 +28,15 @@ class TestToolbox:
 
 
 
 
     def test_uuid_to_uri(self, tb):
     def test_uuid_to_uri(self, tb):
-        assert tb.uuid_to_uri('1234') == URIRef(tb.base_url + '/1234')
-        assert tb.uuid_to_uri('') == URIRef(tb.base_url)
+        assert tb.uuid_to_uri('1234') == URIRef(g.webroot + '/1234')
+        assert tb.uuid_to_uri('') == URIRef(g.webroot)
 
 
 
 
     def test_uri_to_uuid(self, tb):
     def test_uri_to_uuid(self, tb):
-        assert tb.uri_to_uuid(URIRef(tb.base_url) + '/test01') == 'test01'
-        assert tb.uri_to_uuid(URIRef(tb.base_url) + '/test01/test02') == \
+        assert tb.uri_to_uuid(URIRef(g.webroot) + '/test01') == 'test01'
+        assert tb.uri_to_uuid(URIRef(g.webroot) + '/test01/test02') == \
                 'test01/test02'
                 'test01/test02'
-        assert tb.uri_to_uuid(URIRef(tb.base_url)) == ''
+        assert tb.uri_to_uuid(URIRef(g.webroot)) == ''
         assert tb.uri_to_uuid(nsc['fcsystem'].root) == None
         assert tb.uri_to_uuid(nsc['fcsystem'].root) == None
         assert tb.uri_to_uuid(nsc['fcres']['1234']) == '1234'
         assert tb.uri_to_uuid(nsc['fcres']['1234']) == '1234'
         assert tb.uri_to_uuid(nsc['fcres']['1234/5678']) == '1234/5678'
         assert tb.uri_to_uuid(nsc['fcres']['1234/5678']) == '1234/5678'
@@ -45,10 +46,10 @@ class TestToolbox:
         '''
         '''
         Test string localization.
         Test string localization.
         '''
         '''
-        assert tb.localize_string(tb.base_url + '/test/uid') == \
-                tb.localize_string(tb.base_url + '/test/uid/') == \
+        assert tb.localize_string(g.webroot + '/test/uid') == \
+                tb.localize_string(g.webroot + '/test/uid/') == \
                 str(nsc['fcres']['test/uid'])
                 str(nsc['fcres']['test/uid'])
-        assert tb.localize_string(tb.base_url) == str(nsc['fcsystem'].root)
+        assert tb.localize_string(g.webroot) == str(nsc['fcsystem'].root)
         assert tb.localize_string('http://bogus.org/test/uid') == \
         assert tb.localize_string('http://bogus.org/test/uid') == \
                 'http://bogus.org/test/uid'
                 'http://bogus.org/test/uid'
 
 
@@ -57,6 +58,6 @@ class TestToolbox:
         '''
         '''
         Test term localization.
         Test term localization.
         '''
         '''
-        assert tb.localize_term(tb.base_url + '/test/uid') == \
-                tb.localize_term(tb.base_url + '/test/uid/') == \
+        assert tb.localize_term(g.webroot + '/test/uid') == \
+                tb.localize_term(g.webroot + '/test/uid/') == \
                 nsc['fcres']['test/uid']
                 nsc['fcres']['test/uid']