Jelajahi Sumber

Rearrange modules.

scossu 2 minggu lalu
induk
melakukan
e6cf4ddea9
6 mengubah file dengan 93 tambahan dan 67 penghapusan
  1. 4 3
      README.md
  2. 6 6
      pkar.lua
  3. 2 1
      pocket_archive-scm-1.rockspec
  4. 16 34
      src/generator.lua
  5. 63 0
      src/repo.lua
  6. 2 23
      src/submission.lua

+ 4 - 3
README.md

@@ -225,15 +225,15 @@ functional and available for use by the intended audience.
   - ✓ Index
   - ✓ Resource
   - ✓ Static assets
--  Non-HTML generators
+-  Non-HTML generators
   - ✓ RDF (turtle)
   - ✓ Transformers
   - ✓ JS search engine index
--  CLI
+-  CLI
   - ✓ Init archive
   - ✓ Deposit
   - ✓ Generate site
-  -  Generate LL (single resource)
+  -  Generate LL (single resource)
   - ✓ Generate RDF (single resource)
 - ⚒ Front end
   - ⚒ JS search engine
@@ -254,6 +254,7 @@ functional and available for use by the intended audience.
   - Local overrides
   - Relatioships inference
 - htmlgen option for local file or webserver URL generation
+- Improve search indexing
 - CLI
   - Generate LL (multi)
   - Generate RDF (multi)

+ 6 - 6
pkar.lua

@@ -10,7 +10,8 @@ local graph = require "volksdata.graph"
 
 local pkar = require "pocket_archive"
 local sub = require "pocket_archive.submission"
-local hgen = require "pocket_archive.html_generator"
+local repo = require "pocket_archive.repo"
+local gen = require "pocket_archive.generator"
 
 
 cli.locale "en_US"  -- TODO set with multilingual support.
@@ -33,8 +34,7 @@ list = cli.command {
     "List all resource IDs.",
 
     function()
-        local gr = graph.new(pkar.store, term.DEFAULT_CTX)
-        for _, s in pairs(gr:unique_terms(triple.POS_S)) do
+        for _, s in pairs(repo.gr:unique_terms(triple.POS_S)) do
             print(nsm.denormalize_uri(s.data))
         end
     end,
@@ -58,7 +58,7 @@ deposit = cli.command {
 gen_site = cli.command {
     "Generate a static site from the archive.",
 
-    function(args) hgen.generate_site() end
+    function(args) gen.generate_site() end
 }
 
 gen_rdf = cli.command {
@@ -76,7 +76,7 @@ gen_rdf = cli.command {
 
     function(args)
         local s = term.new_iriref_ns(args.id)
-        print(hgen.generate_rdf(s, args.format))
+        print(repo.serialize_rsrc(s, args.format))
     end,
 }
 
@@ -90,7 +90,7 @@ gen_ll = cli.command {
 
     function(args)
         local s = term.new_iriref_ns(args.id)
-        print(hgen.generate_ll(s))
+        print(gen.generate_ll(s))
     end
 }
 

+ 2 - 1
pocket_archive-scm-1.rockspec

@@ -37,7 +37,8 @@ build = {
         ["pocket_archive.model"] = "src/model.lua",
         ["pocket_archive.validator"] = "src/validator.lua",
         ["pocket_archive.submission"] = "src/submission.lua",
-        ["pocket_archive.html_generator"] = "src/html_generator.lua",
+        ["pocket_archive.repo"] = "src/repo.lua",
+        ["pocket_archive.generator"] = "src/generator.lua",
         ["pocket_archive.transformers"] = "src/transformers.lua",
         ["pocket_archive.monocypher"] = {
             "ext/monocypher/monocypher.c",

+ 16 - 34
src/html_generator.lua → src/generator.lua

@@ -14,6 +14,7 @@ local graph = require "volksdata.graph"
 local pkar = require "pocket_archive"
 local logger = pkar.logger
 local model = require "pocket_archive.model"
+local repo = require "pocket_archive.repo"
 local transformers = require "pocket_archive.transformers"
 
 local dbg = require "debugger"
@@ -22,9 +23,6 @@ local dbg = require "debugger"
 -- "nil" table - for missing key fallback in chaining.
 local NT = {}
 
--- Default store graph to search all triples.
-local gr = graph.new(pkar.store, term.DEFAULT_CTX)
-
 -- All resource subjects.
 local subjects
 
@@ -75,7 +73,7 @@ end
 
 
 local function get_tn_url(s)
-    if gr:attr(s, pkar.RDF_TYPE)[pkar.FILE_T.hash] then
+    if repo.gr:attr(s, pkar.RDF_TYPE)[pkar.FILE_T.hash] then
         -- The subject is a file.
         tn_fname = (s.data:gsub(pkar.PAR_NS, "") .. ".jpg")  -- FIXME do not hardcode.
         return plpath.join(
@@ -85,7 +83,7 @@ local function get_tn_url(s)
     -- Recurse through all first children until one with a thumbnail, or a
     -- leaf without children, is found.
     local first_child
-    _, first_child = next(gr:attr(s, pkar.FIRST_P))
+    _, first_child = next(repo.gr:attr(s, pkar.FIRST_P))
     if first_child then return get_tn_url(first_child) end
 end
 
@@ -96,7 +94,7 @@ local function generate_dres(s, mconf)
     local children = {}
     local title
     -- Metadata
-    local attrs = gr:connections(s, term.LINK_OUTBOUND)
+    local attrs = repo.gr:connections(s, term.LINK_OUTBOUND)
     for p, ots in pairs(attrs) do
         local pname = nsm.denormalize_uri(p.data)
         p_label = ((mconf.properties or NT)[pname] or NT).label
@@ -116,10 +114,10 @@ local function generate_dres(s, mconf)
 
                 -- Fallback labels.
                 local label
-                _, label = next(gr:attr(child_s, pkar.DC_TITLE_P))
+                _, label = next(repo.gr:attr(child_s, pkar.DC_TITLE_P))
                 if label then label = label.data
                 else
-                    _, label = next(gr:attr(child_s, pkar.PATH_P))
+                    _, label = next(repo.gr:attr(child_s, pkar.PATH_P))
                     if label then label = plpath.basename(label.data)
                         else label = child_s.data end
                 end
@@ -135,7 +133,7 @@ local function generate_dres(s, mconf)
                     })
                     logger:debug("Child label for ", child_s.data, ": ", ll[#ll].label or "nil")
                     -- There can only be one "next"
-                    _, child_s = next(gr:attr(child_s, pkar.NEXT_P))
+                    _, child_s = next(repo.gr:attr(child_s, pkar.NEXT_P))
                 end
                 table.insert(children, ll)
             end
@@ -195,7 +193,7 @@ local function generate_ores(s, mconf)
     local techmd = {}
     local rel = {}
     -- Metadata
-    local attrs = gr:connections(s, term.LINK_OUTBOUND)
+    local attrs = repo.gr:connections(s, term.LINK_OUTBOUND)
     for p, ots in pairs(attrs) do
         local pname = nsm.denormalize_uri(p.data)
         p_label = ((mconf.properties or NT)[pname] or NT).label
@@ -286,14 +284,6 @@ local function generate_ores(s, mconf)
 end
 
 
-M.get_graph = function(s)
-    out_gr = graph.new(nil, s.data)
-    gr:copy(out_gr, s)
-
-    return out_gr
-end
-
-
 M.generate_res_idx = function(s, mconf)
     local rrep = {
         id = nsm.denormalize_uri(s.data),
@@ -301,7 +291,7 @@ M.generate_res_idx = function(s, mconf)
         href = pkar.gen_pairtree("/res", s.data, ".html", true),
     }
 
-    local attrs = gr:connections(s, term.LINK_OUTBOUND)
+    local attrs = repo.gr:connections(s, term.LINK_OUTBOUND)
 
     local function format_value(pname, o)
         logger:debug("Adding value to " .. pname .. ": " .. ((o or NT).data or "nil"))
@@ -339,16 +329,8 @@ M.generate_res_idx = function(s, mconf)
 end
 
 
-M.generate_rdf = function(s, format)
-    local res_gr = M.get_graph(s)
-    logger:debug("Serializing graph: ", s.data)
-
-    return res_gr:encode(format or "ttl")
-end
-
-
 M.generate_ll = function(s)
-    local res_gr = M.get_graph(s)
+    local res_gr = repo.get_rsrc(s)
     tdata = {}
     for p, ots in pairs(res_gr:connections(s, term.LINK_OUTBOUND)) do
         pname = nsm.denormalize_uri(p.data)
@@ -374,13 +356,13 @@ end
 
 M.generate_resource = function(s)
     local res_type
-    _, res_type = next(gr:attr(s, pkar.CONTENT_TYPE_P))
+    _, res_type = next(repo.gr:attr(s, pkar.CONTENT_TYPE_P))
     local mconf = model.models[res_type.data]
 
     -- Generate RDF/Turtle doc.
     local res_path = pkar.gen_pairtree(M.res_dir, s.data, ".ttl")
     local ofh = assert(io.open(res_path, "w"))
-    ofh:write(M.generate_rdf(s, "ttl"))
+    ofh:write(repo.serialze_rsrc(s, "ttl"))
     ofh:close()
 
     -- Generate HTML doc.
@@ -401,7 +383,7 @@ end
 
 M.generate_resources = function()
     -- Look up if subjects are already populated.
-    subjects = subjects or gr:unique_terms(triple.POS_S)
+    subjects = subjects or repo.gr:unique_terms(triple.POS_S)
 
     -- Initialize the JSON template with an opening brace.
     local ofh = assert(io.open(index_path, "w"))
@@ -431,14 +413,14 @@ end
 M.generate_idx = function()
     local obj_idx = {}
     -- Get all subject of type: Artifact.
-    s_ts = gr:term_set(
+    s_ts = repo.gr:term_set(
         pkar.RDF_TYPE, triple.POS_P,
         term.new_iriref_ns("pas:Artifact"), triple.POS_O
     )
     for _, s in pairs(s_ts) do
         local title, created
-        _, title = next(gr:attr(s, pkar.DC_TITLE_P))
-        _, created = next(gr:attr(s, pkar.DC_CREATED_P))
+        _, title = next(repo.gr:attr(s, pkar.DC_TITLE_P))
+        _, created = next(repo.gr:attr(s, pkar.DC_CREATED_P))
 
         local obj = {
             href = pkar.gen_pairtree("/res", s.data, ".html", true),

+ 63 - 0
src/repo.lua

@@ -0,0 +1,63 @@
+--[[
+RDF repository services.
+--]]
+
+local nsm = require "volksdata.namespace"
+local term = require "volksdata.term"
+local triple = require "volksdata.triple"
+local graph = require "volksdata.graph"
+
+local pkar = require "pocket_archive"
+local logger = pkar.logger
+local validator = require "pocket_archive.validator"
+
+
+-- "nil" table - for missing key fallback in chaining.
+local NT = {}
+
+
+local M = {
+    -- Default store graph to search all triples.
+    gr = graph.new(pkar.store, term.DEFAULT_CTX),
+}
+
+
+M.get_rsrc = function(s)
+    out_gr = graph.new(nil, s.data)
+    M.gr:copy(out_gr, s)
+
+    return out_gr
+end
+
+
+M.serialize_rsrc = function(s, format)
+    local res_gr = M.get_rsrc(s)
+    logger:debug("Serializing graph: ", s.data)
+
+    return res_gr:encode(format or "ttl")
+end
+
+
+M.store_updates = function(tmp_gr, s)
+    -- TODO use a transaction when volksdata_lua supports it.
+    logger:debug("Graph: ", tmp_gr:encode("ttl"))
+
+    local val_report = validator.validate(tmp_gr, s)
+    if val_report.max_level == "ERROR" then error(
+        "Validation raised errors: " .. pp.write(val_report))
+    elseif val_report.max_level == "WARN" then logger:warn(
+        "Validation raised warnings: " .. pp.write(val_report))
+    elseif val_report.max_level == "NOTICE" then logger:warn(
+        "Validation raised notices: " .. pp.write(val_report)) end
+
+    local stored_gr = graph.new(pkar.store, term.DEFAULT_CTX)
+
+    logger:debug("Removing stored triples.")
+    stored_gr:remove(s)
+
+    logger:info("Storing triples.")
+    return tmp_gr:copy(stored_gr)
+end
+
+
+return M

+ 2 - 23
src/submission.lua

@@ -13,6 +13,7 @@ local graph = require "volksdata.graph"
 local pkar = require "pocket_archive"
 local model = require "pocket_archive.model"
 local mc = require "pocket_archive.monocypher"
+local repo = require "pocket_archive.repo"
 local transformers = require "pocket_archive.transformers"
 local validator = require "pocket_archive.validator"
 
@@ -245,28 +246,6 @@ M.rsrc_to_graph = function(rsrc)
 end
 
 
-M.store_updates = function(tmp_gr, s)
-    -- TODO use a transaction when volksdata_lua supports it.
-    logger:debug("Graph: ", tmp_gr:encode("ttl"))
-
-    local val_report = validator.validate(tmp_gr, s)
-    if val_report.max_level == "ERROR" then error(
-        "Validation raised errors: " .. pp.write(val_report))
-    elseif val_report.max_level == "WARN" then logger:warn(
-        "Validation raised warnings: " .. pp.write(val_report))
-    elseif val_report.max_level == "NOTICE" then logger:warn(
-        "Validation raised notices: " .. pp.write(val_report)) end
-
-    local stored_gr = graph.new(pkar.store, term.DEFAULT_CTX)
-
-    logger:debug("Removing stored triples.")
-    stored_gr:remove(s)
-
-    logger:info("Storing triples.")
-    return tmp_gr:copy(stored_gr)
-end
-
-
 M.deposit = function(sip)
     for i, rsrc in ipairs(sip) do
         -- TODO Wrap this chunk into a txn. Each row is atomic.
@@ -335,7 +314,7 @@ M.deposit = function(sip)
         tstamp = os.date("!%Y-%m-%dT%TZ")
         rsrc["dc:created"] = tstamp
         rsrc["dc:modified"] = tstamp
-        M.store_updates(M.rsrc_to_graph(rsrc))
+        repo.store_updates(M.rsrc_to_graph(rsrc))
     end
 
     -- Remove processing directory.