Kaynağa Gözat

Put all loggers at the module level.

Stefano Cossu 7 yıl önce
ebeveyn
işleme
89dd96280c

+ 1 - 1
doc/notes/TODO

@@ -90,7 +90,7 @@
   - [D] LDP
   - [D] Admin
   - [D] Query
-- [ ] Align logger variable
+- [D] Align logger variable
 - [ ] CLI prototype
 - [ ] Update documentation
 

+ 5 - 9
lakesuperior/model/ldp_factory.py

@@ -20,6 +20,7 @@ LDP_NR_TYPE = nsc['ldp'].NonRDFSource
 LDP_RS_TYPE = nsc['ldp'].RDFSource
 
 rdfly = env.app_globals.rdfly
+logger = logging.getLogger(__name__)
 
 
 class LdpFactory:
@@ -27,9 +28,6 @@ class LdpFactory:
     Generate LDP instances.
     The instance classes are based on provided client data or on stored data.
     '''
-    _logger = logging.getLogger(__name__)
-
-
     @staticmethod
     def new_container(uid):
         if not uid:
@@ -55,19 +53,19 @@ class LdpFactory:
 
         @param uid UID of the instance.
         '''
-        #__class__._logger.info('Retrieving stored resource: {}'.format(uid))
+        #logger.info('Retrieving stored resource: {}'.format(uid))
         imr_urn = nsc['fcres'][uid]
 
         rsrc_meta = rdfly.get_metadata(uid)
-        #__class__._logger.debug('Extracted metadata: {}'.format(
+        #logger.debug('Extracted metadata: {}'.format(
         #        pformat(set(rsrc_meta.graph))))
         rdf_types = set(rsrc_meta.graph[imr_urn : RDF.type])
 
         if LDP_NR_TYPE in rdf_types:
-            __class__._logger.info('Resource is a LDP-NR.')
+            logger.info('Resource is a LDP-NR.')
             rsrc = model.ldp_nr.LdpNr(uid, repr_opts, **kwargs)
         elif LDP_RS_TYPE in rdf_types:
-            __class__._logger.info('Resource is a LDP-RS.')
+            logger.info('Resource is a LDP-RS.')
             rsrc = model.ldp_rs.LdpRs(uid, repr_opts, **kwargs)
         else:
             raise ResourceNotExistsError(uid)
@@ -91,8 +89,6 @@ class LdpFactory:
         '''
         uri = nsc['fcres'][uid]
 
-        logger = __class__._logger
-
         if not stream:
             # Create empty LDPC.
             logger.info('No data received in request. '

+ 5 - 3
lakesuperior/model/ldp_nr.py

@@ -1,3 +1,4 @@
+import logging
 import pdb
 
 from rdflib import Graph
@@ -12,6 +13,7 @@ from lakesuperior.model.ldp_rs import LdpRs
 
 
 nonrdfly = env.app_globals.nonrdfly
+logger = logging.getLogger(__name__)
 
 
 class LdpNr(Ldpr):
@@ -74,7 +76,7 @@ class LdpNr(Ldpr):
         self.digest, self.size = nonrdfly.persist(self.stream)
 
         # Try to persist metadata. If it fails, delete the file.
-        self._logger.debug('Persisting LDP-NR triples in {}'.format(self.uri))
+        logger.debug('Persisting LDP-NR triples in {}'.format(self.uri))
         try:
             ev_type = super().create_or_replace_rsrc(create_only)
         except:
@@ -110,7 +112,7 @@ class LdpNr(Ldpr):
         super()._add_srv_mgd_triples(create)
 
         # File size.
-        self._logger.debug('Data stream size: {}'.format(self.size))
+        logger.debug('Data stream size: {}'.format(self.size))
         self.provided_imr.set(nsc['premis'].hasSize, Literal(self.size))
 
         # Checksum.
@@ -122,7 +124,7 @@ class LdpNr(Ldpr):
                 Literal(self.mimetype))
 
         # File name.
-        self._logger.debug('Disposition: {}'.format(self.disposition))
+        logger.debug('Disposition: {}'.format(self.disposition))
         try:
             self.provided_imr.set(nsc['ebucore']['filename'], Literal(
                     self.disposition['attachment']['parameters']['filename']))

+ 9 - 3
lakesuperior/model/ldp_rs.py

@@ -1,3 +1,5 @@
+import logging
+
 from rdflib import Graph
 
 from lakesuperior.env import env
@@ -5,6 +7,10 @@ from lakesuperior.globals import RES_UPDATED
 from lakesuperior.dictionaries.namespaces import ns_collection as nsc
 from lakesuperior.model.ldpr import Ldpr
 
+
+logger = logging.getLogger(__name__)
+
+
 class LdpRs(Ldpr):
     '''LDP-RS (LDP RDF source).
 
@@ -71,7 +77,7 @@ class LdpRs(Ldpr):
     #    with `BaseStoreLayout.update_resource` and/or recorded as separate
     #    events in a provenance tracking system.
     #    '''
-    #    self._logger.debug('Provided SPARQL query: {}'.format(q))
+    #    logger.debug('Provided SPARQL query: {}'.format(q))
     #    pre_gr = self.imr.graph
 
     #    post_gr = pre_gr | Graph()
@@ -79,9 +85,9 @@ class LdpRs(Ldpr):
 
     #    remove_gr, add_gr = self._dedup_deltas(pre_gr, post_gr)
 
-    #    #self._logger.debug('Removing: {}'.format(
+    #    #logger.debug('Removing: {}'.format(
     #    #    remove_gr.serialize(format='turtle').decode('utf8')))
-    #    #self._logger.debug('Adding: {}'.format(
+    #    #logger.debug('Adding: {}'.format(
     #    #    add_gr.serialize(format='turtle').decode('utf8')))
 
     #    remove_gr = self._check_mgd_terms(remove_gr)

+ 21 - 22
lakesuperior/model/ldpr.py

@@ -29,6 +29,7 @@ from lakesuperior.toolbox import Toolbox
 
 
 rdfly = env.app_globals.rdfly
+logger = logging.getLogger(__name__)
 
 
 class Ldpr(metaclass=ABCMeta):
@@ -96,8 +97,6 @@ class Ldpr(metaclass=ABCMeta):
         nsc['ldp'].IndirectContainer,
     }
 
-    _logger = logging.getLogger(__name__)
-
 
     ## MAGIC METHODS ##
 
@@ -150,10 +149,10 @@ class Ldpr(metaclass=ABCMeta):
         '''
         if not hasattr(self, '_imr'):
             if hasattr(self, '_imr_options'):
-                self._logger.debug(
+                logger.debug(
                     'Getting RDF representation for resource /{}'
                     .format(self.uid))
-                #self._logger.debug('IMR options:{}'.format(self._imr_options))
+                #logger.debug('IMR options:{}'.format(self._imr_options))
                 imr_options = self._imr_options
             else:
                 imr_options = {}
@@ -193,10 +192,10 @@ class Ldpr(metaclass=ABCMeta):
         '''
         if not hasattr(self, '_metadata'):
             if hasattr(self, '_imr'):
-                self._logger.info('Metadata is IMR.')
+                logger.info('Metadata is IMR.')
                 self._metadata = self._imr
             else:
-                self._logger.info(
+                logger.info(
                     'Getting metadata for resource /{}'.format(self.uid))
                 self._metadata = rdfly.get_metadata(self.uid)
 
@@ -225,7 +224,7 @@ class Ldpr(metaclass=ABCMeta):
         '''
         if not hasattr(self, '_imr'):
             if hasattr(self, '_imr_options'):
-                #self._logger.debug('IMR options:{}'.format(self._imr_options))
+                #logger.debug('IMR options:{}'.format(self._imr_options))
                 imr_options = self._imr_options
             else:
                 imr_options = {}
@@ -410,7 +409,7 @@ class Ldpr(metaclass=ABCMeta):
         to the tombstone of the resource that used to contain the deleted
         resource. Otherwise the deleted resource becomes a tombstone.
         '''
-        self._logger.info('Burying resource {}'.format(self.uid))
+        logger.info('Burying resource {}'.format(self.uid))
         # Create a backup snapshot for resurrection purposes.
         self.create_rsrc_snapshot(uuid4())
 
@@ -444,7 +443,7 @@ class Ldpr(metaclass=ABCMeta):
         '''
         Remove all traces of a resource and versions.
         '''
-        self._logger.info('Purging resource {}'.format(self.uid))
+        logger.info('Purging resource {}'.format(self.uid))
         refint = env.config['store']['ldp_rs']['referential_integrity']
         inbound = True if refint else inbound
         rdfly.forget_rsrc(self.uid, inbound)
@@ -458,7 +457,7 @@ class Ldpr(metaclass=ABCMeta):
         Perform version creation and return the version UID.
         '''
         # Create version resource from copying the current state.
-        self._logger.info(
+        logger.info(
             'Creating version snapshot {} for resource {}.'.format(
                 ver_uid, self.uid))
         ver_add_gr = set()
@@ -647,7 +646,7 @@ class Ldpr(metaclass=ABCMeta):
                     if config == 'strict':
                         raise RefIntViolationError(obj_uid)
                     else:
-                        self._logger.info(
+                        logger.info(
                             'Removing link to non-existent repo resource: {}'
                             .format(obj_uid))
                         gr.remove((None, None, o))
@@ -665,7 +664,7 @@ class Ldpr(metaclass=ABCMeta):
                 raise ServerManagedTermError(offending_subjects, 's')
             else:
                 for s in offending_subjects:
-                    self._logger.info('Removing offending subj: {}'.format(s))
+                    logger.info('Removing offending subj: {}'.format(s))
                     gr.remove((s, None, None))
 
         offending_predicates = set(gr.predicates()) & srv_mgd_predicates
@@ -675,7 +674,7 @@ class Ldpr(metaclass=ABCMeta):
                 raise ServerManagedTermError(offending_predicates, 'p')
             else:
                 for p in offending_predicates:
-                    self._logger.info('Removing offending pred: {}'.format(p))
+                    logger.info('Removing offending pred: {}'.format(p))
                     gr.remove((None, p, None))
 
         offending_types = set(gr.objects(predicate=RDF.type)) & srv_mgd_types
@@ -686,10 +685,10 @@ class Ldpr(metaclass=ABCMeta):
                 raise ServerManagedTermError(offending_types, 't')
             else:
                 for t in offending_types:
-                    self._logger.info('Removing offending type: {}'.format(t))
+                    logger.info('Removing offending type: {}'.format(t))
                     gr.remove((None, RDF.type, t))
 
-        #self._logger.debug('Sanitized graph: {}'.format(gr.serialize(
+        #logger.debug('Sanitized graph: {}'.format(gr.serialize(
         #    format='turtle').decode('utf-8')))
         return gr
 
@@ -792,8 +791,8 @@ class Ldpr(metaclass=ABCMeta):
         '''
         cont_p = set(cont_rsrc.metadata.graph.predicates())
 
-        self._logger.info('Checking direct or indirect containment.')
-        self._logger.debug('Parent predicates: {}'.format(cont_p))
+        logger.info('Checking direct or indirect containment.')
+        logger.debug('Parent predicates: {}'.format(cont_p))
 
         add_trp = {(self.uri, nsc['fcrepo'].hasParent, cont_rsrc.uri)}
 
@@ -802,20 +801,20 @@ class Ldpr(metaclass=ABCMeta):
             p = cont_rsrc.metadata.value(self.MBR_REL_URI).identifier
 
             if cont_rsrc.metadata[RDF.type: nsc['ldp'].DirectContainer]:
-                self._logger.info('Parent is a direct container.')
+                logger.info('Parent is a direct container.')
 
-                self._logger.debug('Creating DC triples.')
+                logger.debug('Creating DC triples.')
                 o = self.uri
 
             elif (
                     cont_rsrc.metadata[RDF.type: nsc['ldp'].IndirectContainer]
                     and self.INS_CNT_REL_URI in cont_p):
-                self._logger.info('Parent is an indirect container.')
+                logger.info('Parent is an indirect container.')
                 cont_rel_uri = cont_rsrc.metadata.value(
                     self.INS_CNT_REL_URI).identifier
                 o = self.provided_imr.value(cont_rel_uri).identifier
-                self._logger.debug('Target URI: {}'.format(o))
-                self._logger.debug('Creating IC triples.')
+                logger.debug('Target URI: {}'.format(o))
+                logger.debug('Creating IC triples.')
 
             target_rsrc = LdpFactory.from_stored(rdfly.uri_to_uid(s))
             target_rsrc._modify_rsrc(RES_UPDATED, add_trp={(s, p, o)})

+ 3 - 3
lakesuperior/store/ldp_nr/base_non_rdf_layout.py

@@ -3,6 +3,9 @@ import logging
 from abc import ABCMeta, abstractmethod
 
 
+logger = logging.getLogger(__name__)
+
+
 class BaseNonRdfLayout(metaclass=ABCMeta):
     '''
     Abstract class for setting the non-RDF (bitstream) store layout.
@@ -12,9 +15,6 @@ class BaseNonRdfLayout(metaclass=ABCMeta):
     traditional filesystem—e.g. a layout persisting to HDFS can be written too.
     '''
 
-    _logger = logging.getLogger(__name__)
-
-
     def __init__(self, config):
         '''
         Initialize the base non-RDF store layout.

+ 11 - 6
lakesuperior/store/ldp_nr/default_layout.py

@@ -1,3 +1,4 @@
+import logging
 import os
 import shutil
 
@@ -6,6 +7,10 @@ from uuid import uuid4
 
 from lakesuperior.store.ldp_nr.base_non_rdf_layout import BaseNonRdfLayout
 
+
+logger = logging.getLogger(__name__)
+
+
 class DefaultLayout(BaseNonRdfLayout):
     '''
     Default file layout.
@@ -39,7 +44,7 @@ class DefaultLayout(BaseNonRdfLayout):
         tmp_file = '{}/tmp/{}'.format(self.root, uuid4())
         try:
             with open(tmp_file, 'wb') as f:
-                self._logger.debug('Writing temp file to {}.'.format(tmp_file))
+                logger.debug('Writing temp file to {}.'.format(tmp_file))
 
                 hash = sha1()
                 size = 0
@@ -51,22 +56,22 @@ class DefaultLayout(BaseNonRdfLayout):
                     f.write(buf)
                     size += len(buf)
         except:
-            self._logger.exception('File write failed on {}.'.format(tmp_file))
+            logger.exception('File write failed on {}.'.format(tmp_file))
             os.unlink(tmp_file)
             raise
         if size == 0:
-            self._logger.warn('Zero-file size received.')
+            logger.warn('Zero-file size received.')
 
         # Move temp file to final destination.
         uuid = hash.hexdigest()
         dst = self.local_path(uuid)
-        self._logger.debug('Saving file to disk: {}'.format(dst))
+        logger.debug('Saving file to disk: {}'.format(dst))
         if not os.access(os.path.dirname(dst), os.X_OK):
             os.makedirs(os.path.dirname(dst))
 
         # If the file exists already, don't bother rewriting it.
         if os.path.exists(dst):
-            self._logger.info(
+            logger.info(
                     'File exists on {}. Not overwriting.'.format(dst))
             os.unlink(tmp_file)
         else:
@@ -92,7 +97,7 @@ class DefaultLayout(BaseNonRdfLayout):
         @param uuid (string) The resource UUID. This corresponds to the content
         checksum.
         '''
-        self._logger.debug('Generating path from uuid: {}'.format(uuid))
+        logger.debug('Generating path from uuid: {}'.format(uuid))
         bl = self.config['pairtree_branch_length']
         bc = self.config['pairtree_branches']
         term = len(uuid) if bc==0 else min(bc*bl, len(uuid))

+ 9 - 10
lakesuperior/store/ldp_rs/rsrc_centric_layout.py

@@ -19,14 +19,15 @@ from lakesuperior.env import env
 from lakesuperior.store.ldp_rs.lmdb_store import TxnManager
 
 
-Lmdb = plugin.register('Lmdb', Store,
-        'lakesuperior.store.ldp_rs.lmdb_store', 'LmdbStore')
-
 META_GR_URI = nsc['fcsystem']['meta']
 HIST_GR_URI = nsc['fcsystem']['histmeta']
 PTREE_GR_URI = nsc['fcsystem']['pairtree']
 VERS_CONT_LABEL = 'fcr:versions'
 
+Lmdb = plugin.register('Lmdb', Store,
+        'lakesuperior.store.ldp_rs.lmdb_store', 'LmdbStore')
+logger = logging.getLogger(__name__)
+
 
 class RsrcCentricLayout:
     '''
@@ -50,8 +51,6 @@ class RsrcCentricLayout:
     look for
     `lakesuperior.store.rdf.simple_layout.SimpleLayout`.
     '''
-
-    _logger = logging.getLogger(__name__)
     _graph_uids = ('fcadmin', 'fcmain', 'fcstruct')
 
     # @TODO Move to a config file?
@@ -169,13 +168,13 @@ class RsrcCentricLayout:
         '''
         Delete all graphs and insert the basic triples.
         '''
-        self._logger.info('Deleting all data from the graph store.')
+        logger.info('Deleting all data from the graph store.')
         store = self.ds.store
         if getattr(store, 'is_txn_open', False):
             store.rollback()
         store.destroy(store.path)
 
-        self._logger.info('Initializing the graph store with system data.')
+        logger.info('Initializing the graph store with system data.')
         store.open()
         with TxnManager(store, True):
             with open('data/bootstrap/rsrc_centric_layout.sparql', 'r') as f:
@@ -245,7 +244,7 @@ class RsrcCentricLayout:
         if incl_inbound and len(gr):
             gr += self.get_inbound_rel(nsc['fcres'][uid])
 
-        #self._logger.debug('Found resource: {}'.format(
+        #logger.debug('Found resource: {}'.format(
         #        gr.serialize(format='turtle').decode('utf-8')))
 
         rsrc = Resource(gr, nsc['fcres'][uid])
@@ -387,7 +386,7 @@ class RsrcCentricLayout:
                 (nsc['fcmain'][uid], nsc['foaf'].primaryTopic,
                 nsc['fcres'][uid]))
         gr = self.ds.graph(nsc['fcmain'][uid])
-        self._logger.debug('Updating graph {} with statements: {}'.format(
+        logger.debug('Updating graph {} with statements: {}'.format(
             nsc['fcmain'][uid], qry))
 
         return gr.update(qry)
@@ -407,7 +406,7 @@ class RsrcCentricLayout:
 
         # remove children.
         if children:
-            self._logger.debug('Purging children for /{}'.format(uid))
+            logger.debug('Purging children for /{}'.format(uid))
             for rsrc_uri in self.get_descendants(uid, False):
                 self.forget_rsrc(uid_fn(rsrc_uri), inbound, False)
             # Remove structure graph.

+ 3 - 3
lakesuperior/toolbox.py

@@ -13,13 +13,13 @@ from lakesuperior.dictionaries.namespaces import ns_collection as nsc
 from lakesuperior.globals import ROOT_RSRC_URI
 
 
+logger = logging.getLogger(__name__)
+
+
 class Toolbox:
     '''
     Utility class to translate and generate strings and other objects.
     '''
-
-    _logger = logging.getLogger(__name__)
-
     def replace_term_domain(self, term, search, replace):
         '''
         Replace the domain of a term.