Stefano Cossu 5 سال پیش
والد
کامیت
4bb0556b3f

+ 6 - 7
lakesuperior/model/base.pxd

@@ -16,14 +16,13 @@ cdef enum:
     TRP_KLEN = 3 * sizeof(Key)
     QUAD_KLEN = 4 * sizeof(Key)
 
-cdef bytes buffer_dump(Buffer* buf)
-
-# "NULL" key, a value that is never user-provided. Used to mark special
-# values (e.g. deleted records).
-cdef Key NULL_KEY = 0
+    # "NULL" key, a value that is never user-provided. Used to mark special
+    # values (e.g. deleted records).
+    NULL_KEY = 0
+    # Value of first key inserted in an empty term database.
+    FIRST_KEY = 1
 
-# Value of first key inserted in an empty term database.
-cdef Key FIRST_KEY = 1
+cdef bytes buffer_dump(Buffer* buf)
 
 # "NULL" triple, a value that is never user-provided. Used to mark special
 # values (e.g. deleted records).

+ 2 - 1
lakesuperior/model/graph/graph.pxd

@@ -17,7 +17,8 @@ ctypedef void (*lookup_callback_fn_t)(
 cdef class Graph:
     cdef:
         lmdb_triplestore.LmdbTriplestore store
-        Keyset keys
+        public Keyset keys
+        public object uri
 
         cc.key_compare_ft term_cmp_fn
         cc.key_compare_ft trp_cmp_fn

+ 63 - 49
lakesuperior/model/graph/graph.pyx

@@ -49,7 +49,7 @@ cdef class Graph:
     """
 
     def __cinit__(
-        self, store=None, size_t ct=0, str uri=None, set data=set()
+        self, store=None, size_t capacity=0, uri=None, set data=set()
     ):
         """
         Initialize the graph, optionally from Python/RDFlib data.
@@ -59,59 +59,58 @@ cdef class Graph:
             this is the default application store
             (``env.app_globals.rdf_store``).
 
-        :param size_t ct: Initial number of allocated triples.
+        :param size_t capacity: Initial number of allocated triples.
 
         :param str uri: If specified, the graph becomes a named graph and can
             utilize the :py:meth:`value()` method and special slicing notation.
 
-        :param set data: If specified, ``ct`` is ignored and an initial key
+        :param set data: If specified, ``capacity`` is ignored and an initial key
             set is created from a set of 3-tuples of :py:class:``rdflib.Term``
             instances.
         """
+        self.uri = rdflib.URIRef(uri) if uri else None
+
+        self.store = store or env.app_globals.rdf_store
 
-        if not store:
-            store = env.app_globals.rdf_store
         # Initialize empty data set.
         if data:
             # Populate with provided Python set.
             self.keys = Keyset(len(data))
             self.add_triples(data)
         else:
-            self.keys = Keyset(ct)
+            self.keys = Keyset(capacity)
 
 
     ## PROPERTIES ##
 
-    @property
-    def uri(self):
-        """
-        Get resource identifier as a RDFLib URIRef.
+    property data:
+        def __get__(self):
+            """
+            Triple data as a Python/RDFlib set.
 
-        :rtype: rdflib.URIRef.
-        """
-        return rdflib.URIRef(self.id)
+            :rtype: set
+            """
+            cdef TripleKey spok
 
+            ret = set()
 
-    @property
-    def data(self):
-        """
-        Triple data as a Python/RDFlib set.
-
-        :rtype: set
-        """
-        cdef TripleKey spok
+            self.keys.seek()
+            while self.keys.get_next(&spok):
+                ret.add((
+                    self.store.from_key(spok[0]),
+                    self.store.from_key(spok[1]),
+                    self.store.from_key(spok[2])
+                ))
 
-        ret = set()
+            return ret
 
-        self.seek()
-        while self.keys.get_next(&spok):
-            ret.keys.add((
-                self.store.from_key(spok[0]),
-                self.store.from_key(spok[1]),
-                self.store.from_key(spok[2])
-            ))
 
-        return ret
+    property capacity:
+        def __get__(self):
+            """
+            Total capacity of the underlying Keyset, in number of triples.
+            """
+            return self.keys.capacity
 
 
     ## MAGIC METHODS ##
@@ -133,7 +132,7 @@ cdef class Graph:
         This includes the subject URI, number of triples contained and the
         memory address of the instance.
         """
-        id_repr = f' id={self.id},' if self.id else ''
+        id_repr = f' uri={self.uri},' if self.uri else ''
         return (
                 f'<{self.__class__.__name__} @0x{id(self):02x}{id_repr} '
             f'length={len(self)}>'
@@ -252,7 +251,7 @@ cdef class Graph:
         if isinstance(item, slice):
             s, p, o = item.start, item.stop, item.step
             return self._slice(s, p, o)
-        elif self.id and isinstance(item, rdflib.Node):
+        elif self.uri and isinstance(item, rdflib.Node):
             # If a Node is given, return all values for that predicate.
             return self._slice(self.uri, item, None)
         else:
@@ -261,7 +260,7 @@ cdef class Graph:
 
     def __hash__(self):
         """ TODO Bogus """
-        return self.id
+        return self.uri
 
 
     ## BASIC PYTHON-ACCESSIBLE SET OPERATIONS ##
@@ -276,7 +275,7 @@ cdef class Graph:
             the first found result is returned.
         :rtype: rdflib.term.Node
         """
-        if not self.id:
+        if not self.uri:
             raise ValueError('Cannot use `value` on a non-named graph.')
 
         # TODO use slice.
@@ -284,7 +283,7 @@ cdef class Graph:
 
         if strict and len(values) > 1:
             raise RuntimeError('More than one value found for {}, {}.'.format(
-                    self.id, p))
+                    self.uri, p))
 
         for ret in values:
             return ret
@@ -310,14 +309,20 @@ cdef class Graph:
 
         :param iterable triples: iterable of 3-tuple triples.
         """
-        cdef TripleKey spok
+        cdef:
+            Key sk, pk, ok
+            TripleKey spok = [sk, pk, ok]
 
         for s, p, o in triples:
-            spok = [
-                self.store.to_key(s),
-                self.store.to_key(p),
-                self.store.to_key(o),
-            ]
+            logger.info(f'Adding {s} {p} {o} to store: {self.store}')
+            sk = self.store.to_key(s)
+            logger.info(f'sk: {sk}')
+            pk = self.store.to_key(p)
+            logger.info(f'pk: {pk}')
+            ok = self.store.to_key(o)
+            logger.info(f'ok: {ok}')
+
+            logger.info(f'spok: {sk} {pk} {ok}')
             self.keys.add(&spok, True)
 
 
@@ -341,7 +346,7 @@ cdef class Graph:
         :param str uri: URI of the new graph. This should be different from
             the original.
         """
-        cdef Graph new_gr = Graph(self.store, self.ct, uri=uri)
+        cdef Graph new_gr = Graph(self.store, self.capacity, uri=uri)
 
         new_gr.keys = self.keys.copy()
 
@@ -353,7 +358,7 @@ cdef class Graph:
         :param str uri: URI of the new graph. This should be different from
             the original.
         """
-        return Graph(self.store, self.ct, uri=uri)
+        return Graph(self.store, self.capacity, uri=uri)
 
 
     cpdef void set(self, tuple trp) except *:
@@ -374,7 +379,7 @@ cdef class Graph:
 
         :rtype: rdflib.Graph
         """
-        gr = Graph(identifier=self.id)
+        gr = Graph(identifier=self.uri)
         for trp in self.data:
             gr.add(trp)
 
@@ -387,12 +392,14 @@ cdef class Graph:
 
         This behaves like the rdflib.Graph slicing policy.
         """
+        logger.info(f'Slicing: {s} {p} {o}')
         # If no terms are unbound, check for containment.
         if s is not None and p is not None and o is not None: # s p o
             return (s, p, o) in self
 
         # If some terms are unbound, do a lookup.
         res = self.lookup((s, p, o))
+        logger.info(f'Slicing results: {res}')
         if s is not None:
             if p is not None: # s p ?
                 return {r[2] for r in res}
@@ -433,7 +440,7 @@ cdef class Graph:
             Graph res_gr = self.empty_copy()
 
         self._match_ptn_callback(pattern, res_gr, add_trp_callback, NULL)
-        res_gr.data.resize()
+        res_gr.keys.resize()
 
         return res_gr
 
@@ -456,6 +463,8 @@ cdef class Graph:
 
         s, p, o = pattern
 
+        logger.info(f'Match Callback pattern: {pattern}')
+
         # Decide comparison logic outside the loop.
         if s is not None and p is not None and o is not None:
             # Shortcut for 3-term match.
@@ -472,28 +481,33 @@ cdef class Graph:
         if s is not None:
             k1 = self.store.to_key(s)
             if p is not None:
-                cmp_fn = cb.lookup_skpk_cmp_fn
                 k2 = self.store.to_key(p)
+                cmp_fn = cb.lookup_skpk_cmp_fn
+                logger.info('SKPK')
             elif o is not None:
-                cmp_fn = cb.lookup_skok_cmp_fn
                 k2 = self.store.to_key(o)
+                cmp_fn = cb.lookup_skok_cmp_fn
+                logger.info('SKOK')
             else:
                 cmp_fn = cb.lookup_sk_cmp_fn
         elif p is not None:
             k1 = self.store.to_key(p)
             if o is not None:
-                cmp_fn = cb.lookup_pkok_cmp_fn
                 k2 = self.store.to_key(o)
+                cmp_fn = cb.lookup_pkok_cmp_fn
             else:
                 cmp_fn = cb.lookup_pk_cmp_fn
         elif o is not None:
-            cmp_fn = cb.lookup_ok_cmp_fn
             k1 = self.store.to_key(o)
+            cmp_fn = cb.lookup_ok_cmp_fn
         else:
             cmp_fn = cb.lookup_none_cmp_fn
 
+        logger.info(f'k1: {k1} k2: {k2}')
         # Iterate over serialized triples.
+        self.keys.seek()
         while self.keys.get_next(&spok):
+            logger.info('Verifying spok: {spok}')
             if cmp_fn(&spok, k1, k2):
                 callback_fn(gr, &spok, ctx)
 

+ 8 - 6
lakesuperior/model/graph/term.pyx

@@ -139,7 +139,9 @@ cdef int serialize_from_rdflib(
         elif isinstance(term_obj, BNode):
             _term.type = LSUP_TERM_TYPE_BNODE
         else:
-            raise ValueError(f'Unsupported term type: {type(term_obj)}')
+            raise ValueError(
+                f'Unsupported term type: {term_obj} {type(term_obj)}'
+            )
 
     serialize(&_term, data, pool)
 
@@ -148,17 +150,17 @@ cdef object to_rdflib(const Term *term):
     """
     Return an RDFLib term.
     """
-    cdef str data = (<bytes>term[0].data).decode()
+    cdef str data = (<bytes>term.data).decode()
     if term[0].type == LSUP_TERM_TYPE_LITERAL:
         return Literal(
             data,
-            datatype=term[0].datatype if not term[0].lang else None,
-            lang=term[0].lang or None
+            datatype=term.datatype if not term.lang else None,
+            lang=term.lang or None
         )
     else:
-        if term[0].type == LSUP_TERM_TYPE_URIREF:
+        if term.type == LSUP_TERM_TYPE_URIREF:
             return URIRef(data)
-        elif term[0].type == LSUP_TERM_TYPE_BNODE:
+        elif term.type == LSUP_TERM_TYPE_BNODE:
             return BNode(data)
         else:
             raise IOError(f'Unknown term type code: {term[0].type}')

+ 1 - 1
lakesuperior/model/structures/keyset.pxd

@@ -9,7 +9,7 @@ ctypedef bint (*key_cmp_fn_t)(
 cdef class Keyset:
     cdef:
         TripleKey* data
-        size_t ct
+        size_t capacity
         size_t _cur # Index cursor used to look up values.
         size_t _free_i # Index of next free slot.
         float expand_ratio # By how much storage is automatically expanded when

+ 16 - 16
lakesuperior/model/structures/keyset.pyx

@@ -15,16 +15,16 @@ cdef class Keyset:
     """
     Pre-allocated array (not set, as the name may suggest) of ``TripleKey``s.
     """
-    def __cinit__(self, size_t ct=0, expand_ratio=.5):
+    def __cinit__(self, size_t capacity=0, expand_ratio=.5):
         """
         Initialize and allocate memory for the data set.
 
-        :param size_t ct: Number of elements to be accounted for.
+        :param size_t capacity: Number of elements to be accounted for.
         """
-        self.ct = ct
+        self.capacity = capacity
         self.expand_ratio = expand_ratio
-        self.data = <TripleKey*>PyMem_Malloc(self.ct * TRP_KLEN)
-        if ct and not self.data:
+        self.data = <TripleKey*>PyMem_Malloc(self.capacity * TRP_KLEN)
+        if capacity and not self.data:
             raise MemoryError('Error allocating Keyset data.')
 
         self._cur = 0
@@ -54,7 +54,7 @@ cdef class Keyset:
         """
         Size of the object as the number of occupied data slots.
 
-        Note that this is different from :py:data:`ct`_, which indicates
+        Note that this is different from :py:data:`capacity`_, which indicates
         the number of allocated items in memory.
         """
         return self._free_i
@@ -95,11 +95,11 @@ cdef class Keyset:
         if val[0] == NULL_TRP or (check_dup and self.contains(val)):
             return
 
-        if self._free_i >= self.threshod:
+        if self._free_i >= self.capacity:
             if self.expand_ratio > 0:
                 # In some edge casees, a very small ratio may round down to a
                 # zero increase, so the baseline increase is 1 element.
-                self.resize(1 + <size_t>(self.ct * (1 + self.expand_ratio)))
+                self.resize(1 + <size_t>(self.capacity * (1 + self.expand_ratio)))
             else:
                 raise MemoryError('No space left in key set.')
 
@@ -143,8 +143,8 @@ cdef class Keyset:
         """
         Copy a Keyset.
         """
-        cdef Keyset new_ks = Keyset(self.ct, expand_ratio=self.expand_ratio)
-        memcpy(new_ks.data, self.data, self.ct * TRP_KLEN)
+        cdef Keyset new_ks = Keyset(self.capacity, expand_ratio=self.expand_ratio)
+        memcpy(new_ks.data, self.data, self.capacity * TRP_KLEN)
         new_ks.seek()
 
         return new_ks
@@ -159,7 +159,7 @@ cdef class Keyset:
         """
         cdef:
             TripleKey val
-            Keyset new_ks = Keyset(self.ct, self.expand_ratio)
+            Keyset new_ks = Keyset(self.capacity, self.expand_ratio)
 
         self.seek()
         while self.get_next(&val):
@@ -191,7 +191,7 @@ cdef class Keyset:
             raise MemoryError('Could not reallocate Keyset data.')
 
         self.data = tmp
-        self.ct = size
+        self.capacity = size
         self.seek()
 
 
@@ -210,7 +210,7 @@ cdef class Keyset:
         """
         cdef:
             TripleKey spok
-            Keyset ret = Keyset(self.ct)
+            Keyset ret = Keyset(self.capacity)
             Key k1, k2
             key_cmp_fn_t cmp_fn
 
@@ -286,7 +286,7 @@ cdef Keyset subtract(Keyset ks1, Keyset ks2):
     """
     cdef:
         TripleKey val
-        Keyset ks3 = Keyset(ks1.ct)
+        Keyset ks3 = Keyset(ks1.capacity)
 
     ks1.seek()
     while ks1.get_next(&val):
@@ -306,7 +306,7 @@ cdef Keyset intersect(Keyset ks1, Keyset ks2):
     """
     cdef:
         TripleKey val
-        Keyset ks3 = Keyset(ks1.ct)
+        Keyset ks3 = Keyset(ks1.capacity)
 
     ks1.seek()
     while ks1.get_next(&val):
@@ -326,7 +326,7 @@ cdef Keyset xor(Keyset ks1, Keyset ks2):
     """
     cdef:
         TripleKey val
-        Keyset ks3 = Keyset(ks1.ct + ks2.ct)
+        Keyset ks3 = Keyset(ks1.capacity + ks2.capacity)
 
     ks1.seek()
     while ks1.get_next(&val):

+ 2 - 38
lakesuperior/store/ldp_rs/lmdb_store.py

@@ -212,43 +212,7 @@ class LmdbStore(LmdbTriplestore, Store):
                 context = None
             else:
                 context = context.identifier
-                #logger.debug('Converted graph into URI: {}'.format(context))
+        elif isinstance(context, str):
+            context = URIRef(context)
 
         return context
-
-
-    ## Convenience methods—not necessary for functioning but useful for
-    ## debugging.
-
-    #def _keys_in_ctx(self, pk_ctx):
-    #    """
-    #    Convenience method to list all keys in a context.
-
-    #    :param bytes pk_ctx: Pickled context URI.
-
-    #    :rtype: Iterator(tuple)
-    #    :return: Generator of triples.
-    #    """
-    #    with self.cur('c:spo') as cur:
-    #        if cur.set_key(pk_ctx):
-    #            tkeys = cur.iternext_dup()
-    #            return {self._key_to_triple(tk) for tk in tkeys}
-    #        else:
-    #            return set()
-
-
-    #def _ctx_for_key(self, tkey):
-    #    """
-    #    Convenience method to list all contexts that a key is in.
-
-    #    :param bytes tkey: Triple key.
-
-    #    :rtype: Iterator(rdflib.URIRef)
-    #    :return: Generator of context URIs.
-    #    """
-    #    with self.cur('spo:c') as cur:
-    #        if cur.set_key(tkey):
-    #            ctx = cur.iternext_dup()
-    #            return {self._unpickle(c) for c in ctx}
-    #        else:
-    #            return set()

+ 6 - 6
lakesuperior/store/ldp_rs/lmdb_triplestore.pxd

@@ -23,26 +23,26 @@ cdef:
 cdef class LmdbTriplestore(BaseLmdbStore):
     cpdef dict stats(self)
     cpdef size_t _len(self, context=*) except -1
-    cpdef add(self, triple, context=*, quoted=*)
-    cpdef add_graph(self, graph)
+    cpdef void add(self, triple, context=*, quoted=*) except *
+    cpdef void add_graph(self, graph) except *
     cpdef void _remove(self, tuple triple_pattern, context=*) except *
     cpdef void _remove_graph(self, object gr_uri) except *
     cpdef tuple all_namespaces(self)
+    cpdef Graph triple_keys(self, tuple triple_pattern, context=*, uri=*)
 
     cdef:
         void _index_triple(self, int op, TripleKey spok) except *
-        Graph triple_keys(self, tuple triple_pattern, str context=*, str uri=*)
         void _all_term_keys(self, term_type, cc.HashSet** tkeys) except *
-        void lookup_term(self, const Key* tk, Buffer* data) except *
+        void lookup_term(self, const Key tk, Buffer* data) except *
         Graph _lookup(self, tuple triple_pattern)
         Graph _lookup_1bound(self, unsigned char idx, Key luk)
         Graph _lookup_2bound(
             self, unsigned char idx1, unsigned char idx2, DoubleKey tks
         )
         object from_key(self, const Key tk)
-        Key to_key(self, term) except -1
+        Key to_key(self, term) except? 0
         void all_contexts(self, Key** ctx, size_t* sz, triple=*) except *
         Key _append(
                 self, Buffer *value,
                 unsigned char *dblabel=*, lmdb.MDB_txn *txn=*,
-                unsigned int flags=*)
+                unsigned int flags=*) except? 0

+ 26 - 23
lakesuperior/store/ldp_rs/lmdb_triplestore.pyx

@@ -195,7 +195,7 @@ cdef class LmdbTriplestore(BaseLmdbStore):
 
     # Triple and graph methods.
 
-    cpdef add(self, triple, context=None, quoted=False):
+    cpdef void add(self, triple, context=None, quoted=False) except *:
         """
         Add a triple and start indexing.
 
@@ -277,7 +277,7 @@ cdef class LmdbTriplestore(BaseLmdbStore):
         self._index_triple(IDX_OP_ADD, [spock[0], spock[1], spock[2]])
 
 
-    cpdef add_graph(self, c):
+    cpdef void add_graph(self, c) except *:
         """
         Add a graph (context) to the database.
 
@@ -287,6 +287,7 @@ cdef class LmdbTriplestore(BaseLmdbStore):
 
         :param rdflib.URIRef graph: URI of the named graph to add.
         """
+        logger.exception('Called add_graph.')
         cdef:
             lmdb.MDB_txn *_txn
             Buffer _sc
@@ -644,8 +645,8 @@ cdef class LmdbTriplestore(BaseLmdbStore):
             self._cur_close(cur)
 
 
-    cdef Graph triple_keys(
-        self, tuple triple_pattern, str context=None, str uri=None
+    cpdef Graph triple_keys(
+        self, tuple triple_pattern, context=None, uri=None
     ):
         """
         Top-level lookup method.
@@ -671,7 +672,7 @@ cdef class LmdbTriplestore(BaseLmdbStore):
                 ck = self.to_key(context)
             except KeyNotFoundError:
                 # Context not found.
-                return Graph(self)
+                return Graph(self, uri=uri)
 
             icur = self._cur_open('c:spo')
 
@@ -686,7 +687,7 @@ cdef class LmdbTriplestore(BaseLmdbStore):
                             tk = self.to_key(term)
                         except KeyNotFoundError:
                             # A term key was not found.
-                            return Graph(self)
+                            return Graph(self, uri=uri)
                         spok[i] = tk
                     data_v.mv_data = spok
                     data_v.mv_size = TRP_KLEN
@@ -696,8 +697,8 @@ cdef class LmdbTriplestore(BaseLmdbStore):
                     except KeyNotFoundError:
                         # Triple not found.
                         #logger.debug('spok / ck pair not found.')
-                        return Graph(self)
-                    ret = Graph(self, 1)
+                        return Graph(self, uri=uri)
+                    ret = Graph(self, 1, uri=uri)
                     ret.keys.add(&spok)
 
                     return ret
@@ -710,10 +711,10 @@ cdef class LmdbTriplestore(BaseLmdbStore):
                             icur, &key_v, &data_v, lmdb.MDB_SET))
                     except KeyNotFoundError:
                         # Triple not found.
-                        return Graph(self)
+                        return Graph(self, uri=uri)
 
                     _check(lmdb.mdb_cursor_count(icur, &ct))
-                    ret = Graph(self, ct)
+                    ret = Graph(self, ct, uri=uri)
 
                     _check(lmdb.mdb_cursor_get(
                         icur, &key_v, &data_v, lmdb.MDB_GET_MULTIPLE))
@@ -735,13 +736,13 @@ cdef class LmdbTriplestore(BaseLmdbStore):
                     try:
                         res = self._lookup(triple_pattern)
                     except KeyNotFoundError:
-                        return Graph(self)
+                        return Graph(self, uri=uri)
 
                     key_v.mv_data = &ck
                     key_v.mv_size = KLEN
                     data_v.mv_size = TRP_KLEN
 
-                    flt_res = Graph(self, res.ct)
+                    flt_res = Graph(self, res.capacity, uri=uri)
                     res.seek()
                     while res.keys.get_next(&spok):
                         data_v.mv_data = spok
@@ -764,7 +765,7 @@ cdef class LmdbTriplestore(BaseLmdbStore):
             try:
                 res = self._lookup(triple_pattern)
             except KeyNotFoundError:
-                return Graph(self)
+                return Graph(self, uri=uri)
             return res
 
 
@@ -1181,7 +1182,7 @@ cdef class LmdbTriplestore(BaseLmdbStore):
 
     # Key conversion methods.
 
-    cdef inline void lookup_term(self, const Key* tk, Buffer* data) except *:
+    cdef inline void lookup_term(self, const Key tk, Buffer* data) except *:
         """
         look up a term by key.
 
@@ -1191,14 +1192,14 @@ cdef class LmdbTriplestore(BaseLmdbStore):
         cdef:
             lmdb.MDB_val key_v, data_v
 
-        key_v.mv_data = tk
+        key_v.mv_data = &tk
         key_v.mv_size = KLEN
 
         _check(
             lmdb.mdb_get(
                 self.txn, self.get_dbi('t:st'), &key_v, &data_v
             ),
-            f'Error getting data for key \'{tk[0]}\'.'
+            f'Error getting data for key \'{tk}\'.'
         )
         data.addr = data_v.mv_data
         data.sz = data_v.mv_size
@@ -1211,14 +1212,16 @@ cdef class LmdbTriplestore(BaseLmdbStore):
         :param Key key: The key to be converted.
         """
         cdef Buffer pk_t
+        logger.info(f'From key:{tk}')
 
-        self.lookup_term(&tk, &pk_t)
+        self.lookup_term(tk, &pk_t)
+        logger.info(f'from_key buffer: {buffer_dump(&pk_t)}')
 
         # TODO Make Term a class and return that.
         return deserialize_to_rdflib(&pk_t)
 
 
-    cdef inline Key to_key(self, term) except -1:
+    cdef Key to_key(self, term) except? 0:
         """
         Convert a term into a key and insert it in the term key store.
 
@@ -1233,7 +1236,7 @@ cdef class LmdbTriplestore(BaseLmdbStore):
             Buffer pk_t
             Key tk
 
-        #logger.debug(f'Serializing term: {term}')
+        logger.info(f'Serializing term: {term}')
         serialize_from_rdflib(term, &pk_t)
         hash128(&pk_t, &thash)
         key_v.mv_data = thash
@@ -1247,14 +1250,15 @@ cdef class LmdbTriplestore(BaseLmdbStore):
             return (<Key*>data_v.mv_data)[0]
 
         except KeyNotFoundError:
+            logger.info(f'Adding term {term} to store.')
             # If key is not in the store, add it.
             if self.is_txn_rw:
                 # Use existing R/W transaction.
-                #logger.debug('Working in existing RW transaction.')
+                logger.info('Working in existing RW transaction.')
                 _txn = self.txn
             else:
                 # Open new R/W transaction.
-                #logger.debug('Opening a temporary RW transaction.')
+                logger.info('Opening a temporary RW transaction.')
                 _check(lmdb.mdb_txn_begin(self.dbenv, NULL, 0, &_txn))
 
             try:
@@ -1281,7 +1285,7 @@ cdef class LmdbTriplestore(BaseLmdbStore):
         self, Buffer *value,
         unsigned char *dblabel=b'', lmdb.MDB_txn *txn=NULL,
         unsigned int flags=0
-    ):
+        ) except? 0:
         """
         Append one or more keys and values to the end of a database.
 
@@ -1307,7 +1311,6 @@ cdef class LmdbTriplestore(BaseLmdbStore):
             new_idx = FIRST_KEY
         else:
             new_idx = (<Key*>key_v.mv_data)[0] + 1
-            logger.debug(f'New index value: {new_idx}')
         finally:
             self._cur_close(cur)
 

+ 2 - 1
lakesuperior/store/ldp_rs/rsrc_centric_layout.py

@@ -217,10 +217,11 @@ class RsrcCentricLayout:
         fname = path.join(
                 basedir, 'data', 'bootstrap', 'rsrc_centric_layout.sparql')
         with store.txn_ctx(True):
+            #import pdb; pdb.set_trace()
             with open(fname, 'r') as f:
                 data = Template(f.read())
                 self.ds.update(data.substitute(timestamp=arrow.utcnow()))
-            #import pdb; pdb.set_trace()
+        with store.txn_ctx():
             imr = self.get_imr('/', incl_inbound=False, incl_children=True)
 
         #gr = Graph(identifier=imr.uri)

+ 9 - 2
setup.py

@@ -82,6 +82,8 @@ extensions = [
             path.join('lakesuperior', 'model', f'base.{ext}'),
         ],
         include_dirs=include_dirs,
+        extra_compile_args=['-g'],
+        extra_link_args=['-g'],
         #extra_compile_args=['-fopenmp'],
         #extra_link_args=['-fopenmp']
     ),
@@ -96,6 +98,8 @@ extensions = [
             path.join('lakesuperior', 'model', 'structures', f'*.{ext}'),
         ],
         include_dirs=include_dirs,
+        extra_compile_args=['-g'],
+        extra_link_args=['-g'],
         #extra_compile_args=['-fopenmp'],
         #extra_link_args=['-fopenmp']
     ),
@@ -112,6 +116,8 @@ extensions = [
             path.join('lakesuperior', 'store', f'base_lmdb_store.{ext}'),
         ],
         include_dirs=include_dirs,
+        extra_compile_args=['-g'],
+        extra_link_args=['-g'],
     ),
     Extension(
         'lakesuperior.model.graph.*',
@@ -143,8 +149,8 @@ extensions = [
                 'lakesuperior', 'store', 'ldp_rs', f'lmdb_triplestore.{ext}'),
         ],
         include_dirs=include_dirs,
-        extra_compile_args=['-fopenmp'],
-        extra_link_args=['-fopenmp']
+        extra_compile_args=['-g', '-fopenmp'],
+        extra_link_args=['-g', '-fopenmp']
     ),
 ]
 
@@ -179,6 +185,7 @@ if USE_CYTHON:
             'boundscheck': False,
             'wraparound': False,
             'profile': True,
+            'embedsignature': True
         }
     )
 

+ 8 - 5
tests/0_data_structures/test_graph.py

@@ -1,3 +1,4 @@
+import pdb
 import pytest
 
 from shutil import rmtree
@@ -54,16 +55,18 @@ class TestGraphInit:
         assert len(gr) == 0
 
 
-    def test_init_triples(self, trp):
+    def test_init_triples(self, trp, store):
         """
         Test creation using a Python set.
         """
-        gr = Graph(data=set(trp))
+        with store.txn_ctx(True):
+            pdb.set_trace()
+            gr = Graph(store, data=set(trp))
 
-        assert len(gr) == 6
+            assert len(gr) == 6
 
-        for t in trp:
-            assert t in gr
+            for t in trp:
+                assert t in gr
 
 
 @pytest.mark.usefixtures('trp')