Merge branch 'feat/datascript-storage' into refactor/worker-search

pull/10683/head
Tienson Qin 2023-12-12 19:27:39 +08:00
commit f550dc8325
36 changed files with 172 additions and 501 deletions

View File

@ -3,7 +3,7 @@
"version": "1.0.0",
"private": true,
"devDependencies": {
"@logseq/nbb-logseq": "^1.2.173"
"@logseq/nbb-logseq": "logseq/nbb-logseq#feat-db-v1"
},
"scripts": {
"test": "yarn nbb-logseq -cp test -m nextjournal.test-runner"

View File

@ -2,10 +2,9 @@
# yarn lockfile v1
"@logseq/nbb-logseq@^1.2.173":
version "1.2.173"
resolved "https://registry.yarnpkg.com/@logseq/nbb-logseq/-/nbb-logseq-1.2.173.tgz#27a52c350f06ac9c337d73687738f6ea8b2fc3f3"
integrity sha512-ABKPtVnSOiS4Zpk9+UTaGcs5H6EUmRADr9FJ0aEAVpa0WfAyvUbX/NgkQGMe1kKRv3EbIuLwaxfy+txr31OtAg==
"@logseq/nbb-logseq@logseq/nbb-logseq#feat-db-v1":
version "1.2.173-feat-db-v1"
resolved "https://codeload.github.com/logseq/nbb-logseq/tar.gz/e4910dfb12043404c97962d8faab3a946ab89f81"
dependencies:
import-meta-resolve "^2.1.0"

View File

@ -3,7 +3,7 @@
"version": "1.0.0",
"private": true,
"devDependencies": {
"@logseq/nbb-logseq": "^1.2.173"
"@logseq/nbb-logseq": "logseq/nbb-logseq#feat-db-v1"
},
"dependencies": {
"better-sqlite3": "8.0.1"

View File

@ -2,8 +2,6 @@
"Primary ns to interact with DB graphs with node.js based CLIs"
(:require [logseq.db.sqlite.db :as sqlite-db]
[logseq.db.sqlite.restore :as sqlite-restore]
[cljs-bean.core :as bean]
[datascript.core :as d]
["fs" :as fs]
["path" :as node-path]))
@ -16,10 +14,4 @@
"Reads a given sqlite db and returns a datascript connection of its contents.
The sqlite db is assumed to have already been opened by sqlite-db/open-db!"
[db-name]
(let [{:keys [uuid->db-id-map conn]}
(sqlite-restore/restore-initial-data (bean/->js (sqlite-db/get-initial-data db-name)))
db (sqlite-restore/restore-other-data
conn
(sqlite-db/get-other-data db-name [])
uuid->db-id-map)]
(d/conn-from-db db)))
(sqlite-restore/restore-initial-data (sqlite-db/get-initial-data db-name)))

View File

@ -4,9 +4,11 @@
["better-sqlite3" :as sqlite3]
[clojure.string :as string]
[logseq.db.sqlite.util :as sqlite-util]
[datascript.storage :refer [IStorage]]
[cljs.cache :as cache]
;; FIXME: datascript.core has to come before datascript.storage or else nbb fails
[datascript.core :as d]
[datascript.storage :refer [IStorage]]
;; Disable until used as it effects nbb
;; [cljs.cache :as cache]
[goog.object :as gobj]
[logseq.db.frontend.schema :as db-schema]
[clojure.edn :as edn]))
@ -92,9 +94,8 @@
(gobj/get "content")))))
(defn sqlite-storage
[repo {:keys [threshold]
:or {threshold 4096}}]
(let [_cache (cache/lru-cache-factory {} :threshold threshold)]
[repo _ #_{:keys [threshold] :or {threshold 4096}}]
(let [_cache nil #_(cache/lru-cache-factory {} :threshold threshold)]
(reify IStorage
(-store [_ addr+data-seq]
(let [data (->>

View File

@ -1,123 +1,9 @@
(ns logseq.db.sqlite.restore
"Fns to restore a sqlite database to a datascript one"
(:require [cognitect.transit :as transit]
[cljs-bean.core :as bean]
[clojure.string :as string]
[datascript.core :as d]
[goog.object :as gobj]
"Fns to restore data from a sqlite database to a datascript one"
(:require [datascript.core :as d]
[logseq.db.frontend.schema :as db-schema]))
(def ^:private t-reader (transit/reader :json))
(defn- uuid-string?
[s]
(and (string? s)
(= (count s) 36)
(string/includes? s "-")))
(defn- eav->datom
[uuid->db-id-map [e a v]]
(let [v' (cond
(and (= :block/uuid a) (string? v))
(uuid v)
(and (coll? v) (= :block/uuid (first v)) (string? (second v)))
(get uuid->db-id-map (second v) v)
:else
v)]
(d/datom e a v')))
(defn restore-other-data
"Given an existing datascript connection and additional sqlite data, returns a
new datascript db with the two combined"
[conn data uuid->db-id-map & [{:keys [init-db-fn] :or {init-db-fn d/init-db}}]]
(let [datoms (transient (set (d/datoms @conn :eavt)))]
(doseq [block data]
(let [uuid (gobj/get block "uuid")
eid (get uuid->db-id-map uuid)
_ (when (nil? eid)
(prn "Error: block without eid ")
(js/console.dir block))
_ (assert eid (str "Can't find eid " eid ", block: " block))
avs (->> (gobj/get block "datoms")
(transit/read t-reader))]
(doseq [[a v] avs]
(when (not (#{:block/uuid :page_uuid} a))
(let [datom (eav->datom uuid->db-id-map [eid a v])]
(conj! datoms datom))))))
(let [all-datoms (persistent! datoms)
new-db (init-db-fn all-datoms db-schema/schema-for-db-based-graph)]
new-db)))
(defn- datoms-str->eav-vec
"Given a block's `datoms` transit string and an associated entity id, returns
a vector of eav triples"
[datoms-str eid]
(->> datoms-str
(transit/read t-reader)
;; Remove :page_uuid as it's a transient attribute used during restore but not in the UI
(remove #(= :page_uuid (first %)))
(mapv (partial apply vector eid))))
(defn- restore-initial-data*
"Builds up most datom vectors including all that are assigned new db ids"
[assign-id-to-uuid-fn all-pages all-blocks init-data]
(let [pages-eav-coll (doall (mapcat (fn [page]
(let [eid (assign-id-to-uuid-fn (:uuid page))]
(datoms-str->eav-vec (:datoms page) eid)))
all-pages))
all-blocks' (doall
(keep (fn [b]
(let [eid (assign-id-to-uuid-fn (:uuid b))]
(when (and (uuid-string? (:uuid b))
(uuid-string? (:page_uuid b)))
[[eid :block/uuid (:uuid b)]
[eid :block/page [:block/uuid (:page_uuid b)]]])))
all-blocks))
init-data' (doall
(keep (fn [b]
(let [eid (assign-id-to-uuid-fn (:uuid b))]
(if (and (uuid-string? (:uuid b))
(= 5 (:type b)))
[[eid :block/uuid (:uuid b)]
[eid :block/unknown? true]]
(datoms-str->eav-vec (:datoms b) eid))))
init-data))]
{:pages-eav-coll pages-eav-coll
:all-blocks' all-blocks'
:init-data' init-data'}))
(defn restore-initial-data
"Given initial sqlite data, returns a datascript connection and other data
needed for subsequent restoration"
[data & [{:keys [conn-from-datoms-fn] :or {conn-from-datoms-fn d/conn-from-datoms}}]]
(let [{:keys [all-pages all-blocks journal-blocks init-data]} (bean/->clj data)
uuid->db-id-tmap (transient (hash-map))
*next-db-id (atom 100001)
assign-id-to-uuid-fn (fn [uuid-str]
(or
(get uuid->db-id-tmap uuid-str)
(let [id @*next-db-id]
(conj! uuid->db-id-tmap [uuid-str id])
(swap! *next-db-id inc)
id)))
{:keys [pages-eav-coll all-blocks' init-data']}
(restore-initial-data* assign-id-to-uuid-fn all-pages all-blocks init-data)
uuid->db-id-map (persistent! uuid->db-id-tmap)
journal-blocks' (mapv
(fn [b]
(let [eid (get uuid->db-id-map (:uuid b))]
(datoms-str->eav-vec (:datoms b) eid)))
journal-blocks)
blocks-eav-colls (->> (concat all-blocks' journal-blocks' init-data')
(apply concat))
all-eav-coll (doall (concat pages-eav-coll blocks-eav-colls))
datoms (map (partial eav->datom uuid->db-id-map)
all-eav-coll)
db-conn (conn-from-datoms-fn datoms db-schema/schema-for-db-based-graph)]
{:conn db-conn
:uuid->db-id-map uuid->db-id-map
:journal-blocks journal-blocks
:datoms-count (count datoms)}))
"Given initial sqlite data, returns a datascript connection"
[datoms]
(d/conn-from-datoms datoms db-schema/schema-for-db-based-graph))

View File

@ -2,32 +2,7 @@
"Utils fns for backend sqlite db"
(:require [cljs-time.coerce :as tc]
[cljs-time.core :as t]
[clojure.string :as string]
[cognitect.transit :as transit]
[logseq.db.frontend.schema :as db-schema]))
(defn- type-of-block
"
TODO: use :block/type
FIXME: 4 isn't used. Delete it?
| value | meaning |
|-------+------------------------------------------------|
| 1 | normal block |
| 2 | page block |
| 3 | init data, (config.edn, custom.js, custom.css) |
| 4 | db schema |
| 5 | unknown type |
| 6 | property block |
| 7 | macro |
"
[block]
(cond
(:block/page block) 1
(some #{:file/content :schema/version :db/type} (keys block)) 3
(contains? (:block/type block) "property") 6
(:block/name block) 2
(contains? (set (:block/type block)) "macro") 7
:else 5))
[clojure.string :as string]))
(defonce db-version-prefix "logseq_db_")
@ -36,40 +11,6 @@
[]
(tc/to-long (t/now)))
(defn ds->sqlite-block
"Convert a datascript block to a sqlite map in preparation for a sqlite-db fn"
[b]
{:uuid (str (:block/uuid b))
:type (type-of-block b)
:page_uuid (str (:page_uuid b))
:page_journal_day (:block/journal-day b)
:name (or (:file/path b) (:block/name b))
:content (or (:file/content b) (:block/content b))
:datoms (:datoms b)
:created_at (or (:block/created-at b) (time-ms))
:updated_at (or (:block/updated-at b) (time-ms))})
(defn block-map->datoms-str
"Given a block map and all existing blocks, return the block as transit data
to be stored in the `datoms` column. This is currently only used in testing"
[blocks m]
(let [t-writer (transit/writer :json)]
(->> (dissoc m :db/id)
;; This fn should match pipeline/datom->av-vector
(map (fn m->av-vector [[a v]]
[a v]
(cond
(contains? db-schema/card-one-ref-type-attributes a)
[a [:block/uuid (str (some #(when (= (:db/id %) (:db/id v)) (:block/uuid %)) blocks))]]
(contains? db-schema/card-many-ref-type-attributes a)
[a (seq
(map (fn [{db-id :db/id}]
[:block/uuid (some #(when (= db-id (:db/id %)) (:block/uuid %)) blocks)])
v))]
:else [a v])))
(transit/write t-writer))))
(defn block-with-timestamps
"Adds updated-at timestamp and created-at if it doesn't exist"
[block]

View File

@ -2,9 +2,9 @@
(:require [cljs.test :refer [deftest async use-fixtures is testing]]
["fs" :as fs]
["path" :as node-path]
[cljs-bean.core :as bean]
[datascript.core :as d]
[logseq.db.sqlite.db :as sqlite-db]
[logseq.db.sqlite.util :as sqlite-util]))
[logseq.db.sqlite.restore :as sqlite-restore]))
(use-fixtures
:each
@ -22,120 +22,17 @@
(fs/mkdirSync (node-path/join dir db-name) #js {:recursive true}))
(deftest get-initial-data
(testing "Fetches file block"
(testing "Fetches a defined block"
(create-graph-dir "tmp/graphs" "test-db")
(sqlite-db/open-db! "tmp/graphs" "test-db")
(let [blocks (mapv sqlite-util/ds->sqlite-block
[{:block/uuid (random-uuid)
:file/path "logseq/config.edn"
:file/content "{:foo :bar}"}])
_ (sqlite-db/upsert-blocks! "test-db" (bean/->js blocks))]
(is (= {:content "{:foo :bar}"
:name "logseq/config.edn"
:type 3}
(-> (sqlite-db/get-initial-data "test-db")
:init-data
bean/->clj
first
(select-keys [:content :name :type])))
"Correct file with content is found"))))
(deftest upsert-blocks!
(let [page-uuid (random-uuid)
block-uuid (random-uuid)
created-at 1688054127299]
(create-graph-dir "tmp/graphs" "test-db")
(sqlite-db/open-db! "tmp/graphs" "test-db")
(testing "Creates a journal block"
(let [blocks (mapv sqlite-util/ds->sqlite-block
[{:block/uuid page-uuid
:block/journal-day 20230629
:block/name "jun 29th, 2023"
:block/created-at created-at
:block/updated-at created-at}
{:block/content "test"
:block/uuid block-uuid
:block/page {:db/id 100022}
:block/created-at created-at
:block/updated-at created-at
:page_uuid page-uuid}])
_ (sqlite-db/upsert-blocks! "test-db" (bean/->js blocks))
db-data (sqlite-db/get-initial-data "test-db")]
(is (= {:uuid (str page-uuid) :page_journal_day 20230629
:name "jun 29th, 2023" :type 2
:created_at created-at}
(-> db-data
:all-pages
first
bean/->clj
(select-keys [:uuid :page_journal_day :type :name :created_at])))
"New journal page is saved")
(is (= {:content "test" :name nil
:uuid (str block-uuid) :type 1
:created_at created-at}
(-> db-data
:journal-blocks
first
bean/->clj
(select-keys [:uuid :type :content :name :created_at])))
"New journal block content is saved")
(is (= [{:uuid (str block-uuid) :page_uuid (str page-uuid)}]
(-> db-data :all-blocks bean/->clj))
"Correct block and page uuid pairs exist")))
(testing "Updates a block"
(let [updated-at 1688072416134
blocks (mapv sqlite-util/ds->sqlite-block
[{:block/uuid page-uuid
:block/journal-day 20230629
:block/name "jun 29th, 2023"
:block/created-at created-at
:block/updated-at updated-at}
{:block/content "test edit"
:block/uuid block-uuid
:block/page {:db/id 100022}
:block/created-at created-at
:block/updated-at updated-at
:page_uuid page-uuid}])
_ (sqlite-db/upsert-blocks! "test-db" (bean/->js blocks))
db-data (sqlite-db/get-initial-data "test-db")]
(is (= {:uuid (str page-uuid) :updated_at updated-at :created_at created-at}
(-> db-data
:all-pages
first
bean/->clj
(select-keys [:uuid :updated_at :created_at])))
"Updated page has correct timestamps")
(is (= {:content "test edit" :created_at created-at :updated_at updated-at}
(-> db-data
:journal-blocks
first
bean/->clj
(select-keys [:content :created_at :updated_at])))
"Updated block has correct content and timestamps")))))
(deftest get-other-data
(testing "Retrieves a normal page block"
(create-graph-dir "tmp/graphs" "test-db")
(sqlite-db/open-db! "tmp/graphs" "test-db")
(let [page-uuid (random-uuid)
block-uuid (random-uuid)
blocks (mapv sqlite-util/ds->sqlite-block
[{:block/uuid page-uuid
:block/name "some page"}
{:block/content "test"
:block/uuid block-uuid
:block/page {:db/id 100022}
:page_uuid page-uuid}])]
(sqlite-db/upsert-blocks! "test-db" (bean/->js blocks))
(is (= {:content "test" :uuid (str block-uuid)
:page_uuid (str page-uuid) :type 1}
(-> (sqlite-db/get-other-data "test-db" [])
bean/->clj
first
(select-keys [:content :page_uuid :type :uuid])))
"New page block is fetched with get-other-data"))))
(let [blocks [{:block/uuid (random-uuid)
:file/path "logseq/config.edn"
:file/content "{:foo :bar}"}]
_ (sqlite-db/transact! "test-db" blocks {})]
(is (= blocks
(->> (sqlite-db/get-initial-data "test-db")
sqlite-restore/restore-initial-data
deref
(d/q '[:find (pull ?b [:block/uuid :file/path :file/content]) :where [?b :file/content]])
(map first)))
"Correct file with content is found"))))

View File

@ -2,11 +2,9 @@
(:require [cljs.test :refer [deftest async use-fixtures is testing]]
["fs" :as fs]
["path" :as node-path]
[cljs-bean.core :as bean]
[datascript.core :as d]
[logseq.db.sqlite.db :as sqlite-db]
[logseq.db.sqlite.restore :as sqlite-restore]
[logseq.db.sqlite.util :as sqlite-util]))
[logseq.db.sqlite.restore :as sqlite-restore]))
(use-fixtures
:each
@ -41,50 +39,13 @@
:block/uuid block-uuid
:block/page {:db/id 100001}
:block/created-at created-at
:block/updated-at created-at
:page_uuid page-uuid}]
blocks (mapv #(sqlite-util/ds->sqlite-block
(assoc % :datoms (sqlite-util/block-map->datoms-str frontend-blocks %)))
frontend-blocks)
_ (sqlite-db/upsert-blocks! "test-db" (bean/->js blocks))
{:keys [conn]} (sqlite-restore/restore-initial-data (bean/->js (sqlite-db/get-initial-data "test-db")))]
(is (= (map #(dissoc % :page_uuid) frontend-blocks)
:block/updated-at created-at}]
_ (sqlite-db/transact! "test-db" frontend-blocks {})
conn (-> (sqlite-db/get-initial-data "test-db")
sqlite-restore/restore-initial-data)]
(is (= frontend-blocks
(->> (d/q '[:find (pull ?b [*])
:where [?b :block/created-at]]
@conn)
(map first)))
"Datascript db matches data inserted into sqlite from simulated frontend"))))
(deftest restore-other-data
(testing "Restore a page with its block"
(create-graph-dir "tmp/graphs" "test-db")
(sqlite-db/open-db! "tmp/graphs" "test-db")
(let [page-uuid (random-uuid)
block-uuid (random-uuid)
created-at (js/Date.now)
frontend-blocks [{:db/id 100001
:block/uuid page-uuid
:block/name "some page"
:block/created-at created-at}
{:db/id 100002
:block/content "test"
:block/uuid block-uuid
:block/page {:db/id 100001}
:page_uuid page-uuid
:block/created-at created-at}]
blocks (mapv #(sqlite-util/ds->sqlite-block
(assoc % :datoms (sqlite-util/block-map->datoms-str frontend-blocks %)))
frontend-blocks)
_ (sqlite-db/upsert-blocks! "test-db" (bean/->js blocks))
{:keys [uuid->db-id-map conn]}
(sqlite-restore/restore-initial-data (bean/->js (sqlite-db/get-initial-data "test-db")))
new-db (sqlite-restore/restore-other-data
conn
(sqlite-db/get-other-data "test-db" [])
uuid->db-id-map)]
(is (= (map #(dissoc % :page_uuid) frontend-blocks)
(->> (d/q '[:find (pull ?b [*])
:where [?b :block/created-at]]
new-db)
(map first)))
"Datascript db matches data inserted into sqlite from simulated frontend"))))

7
deps/db/yarn.lock vendored
View File

@ -2,10 +2,9 @@
# yarn lockfile v1
"@logseq/nbb-logseq@^1.2.173":
version "1.2.173"
resolved "https://registry.yarnpkg.com/@logseq/nbb-logseq/-/nbb-logseq-1.2.173.tgz#27a52c350f06ac9c337d73687738f6ea8b2fc3f3"
integrity sha512-ABKPtVnSOiS4Zpk9+UTaGcs5H6EUmRADr9FJ0aEAVpa0WfAyvUbX/NgkQGMe1kKRv3EbIuLwaxfy+txr31OtAg==
"@logseq/nbb-logseq@logseq/nbb-logseq#feat-db-v1":
version "1.2.173-feat-db-v1"
resolved "https://codeload.github.com/logseq/nbb-logseq/tar.gz/e4910dfb12043404c97962d8faab3a946ab89f81"
dependencies:
import-meta-resolve "^2.1.0"

View File

@ -3,7 +3,7 @@
"version": "1.0.0",
"private": true,
"devDependencies": {
"@logseq/nbb-logseq": "^1.2.173",
"@logseq/nbb-logseq": "logseq/nbb-logseq#feat-db-v1",
"better-sqlite3": "8.0.1"
},
"dependencies": {

View File

@ -6,8 +6,6 @@
[datascript.core :as d]
[logseq.db.sqlite.db :as sqlite-db]
[logseq.db.sqlite.cli :as sqlite-cli]
[logseq.db.sqlite.util :as sqlite-util]
[cljs-bean.core :as bean]
[logseq.db :as ldb]
[clojure.set :as set]
["fs" :as fs]
@ -115,7 +113,6 @@
(mapv #(merge %
{:db/id (new-db-id)
:block/uuid (random-uuid)
:page_uuid page-uuid
:block/format :markdown
:block/path-refs [{:db/id page-id}]
:block/page {:db/id page-id}
@ -132,10 +129,7 @@
[dir db-name pages-to-blocks]
(sqlite-db/open-db! dir db-name)
(let [frontend-blocks (create-frontend-blocks pages-to-blocks)
blocks (mapv #(sqlite-util/ds->sqlite-block
(assoc % :datoms (sqlite-util/block-map->datoms-str frontend-blocks %)))
frontend-blocks)
_ (sqlite-db/upsert-blocks! db-name (bean/->js blocks))
_ (sqlite-db/transact! db-name frontend-blocks {})
conn (sqlite-cli/read-graph db-name)]
(ldb/create-default-pages! conn {:db-graph? true})
@conn))

View File

@ -2,10 +2,9 @@
# yarn lockfile v1
"@logseq/nbb-logseq@^1.2.173":
version "1.2.173"
resolved "https://registry.yarnpkg.com/@logseq/nbb-logseq/-/nbb-logseq-1.2.173.tgz#27a52c350f06ac9c337d73687738f6ea8b2fc3f3"
integrity sha512-ABKPtVnSOiS4Zpk9+UTaGcs5H6EUmRADr9FJ0aEAVpa0WfAyvUbX/NgkQGMe1kKRv3EbIuLwaxfy+txr31OtAg==
"@logseq/nbb-logseq@logseq/nbb-logseq#feat-db-v1":
version "1.2.173-feat-db-v1"
resolved "https://codeload.github.com/logseq/nbb-logseq/tar.gz/e4910dfb12043404c97962d8faab3a946ab89f81"
dependencies:
import-meta-resolve "^2.1.0"

View File

@ -3,7 +3,7 @@
"version": "1.0.0",
"private": true,
"devDependencies": {
"@logseq/nbb-logseq": "^1.2.173"
"@logseq/nbb-logseq": "logseq/nbb-logseq#feat-db-v1"
},
"dependencies": {
"better-sqlite3": "8.0.1"

View File

@ -5,34 +5,24 @@
* Deleted blocks don't update effected :block/tx-id"
(:require [datascript.core :as d]
[logseq.db.sqlite.db :as sqlite-db]
[logseq.db.sqlite.util :as sqlite-util]
[cljs-bean.core :as bean]
[logseq.outliner.datascript-report :as ds-report]
[logseq.outliner.pipeline :as outliner-pipeline]))
(defn- invoke-hooks
"Modified copy of frontend.modules.outliner.pipeline/invoke-hooks that doesn't
handle :block/tx-id"
[conn {:keys [db-after] :as tx-report}]
[conn tx-report]
(when (not (get-in tx-report [:tx-meta :replace?]))
(let [{:keys [blocks]} (ds-report/get-blocks-and-pages tx-report)
block-path-refs-tx (outliner-pipeline/compute-block-path-refs-tx tx-report blocks)
db-after' (if (seq block-path-refs-tx)
(:db-after (d/transact! conn block-path-refs-tx {:replace? true}))
db-after)
deleted-block-uuids (set (outliner-pipeline/filter-deleted-blocks (:tx-data tx-report)))
upsert-blocks (outliner-pipeline/build-upsert-blocks blocks deleted-block-uuids db-after')]
{:blocks upsert-blocks
:deleted-block-uuids deleted-block-uuids})))
block-path-refs-tx (outliner-pipeline/compute-block-path-refs-tx tx-report blocks)]
(d/transact! conn block-path-refs-tx {:replace? true})
;; frontend also passes original tx-report
tx-report)))
(defn- update-sqlite-db
"Modified copy of :db-transact-data defmethod in electron.handler"
[db-name {:keys [blocks deleted-block-uuids]}]
(when (seq deleted-block-uuids)
(sqlite-db/delete-blocks! db-name deleted-block-uuids))
(when (seq blocks)
(let [blocks' (mapv sqlite-util/ds->sqlite-block blocks)]
(sqlite-db/upsert-blocks! db-name (bean/->js blocks')))))
"Same as :db-transact-data defmethod in electron.handler"
[db-name tx-report]
(sqlite-db/transact! db-name (:tx-data tx-report) (:tx-meta tx-report)))
(defn add-listener
"Adds a listener to the datascript connection to persist changes to the given

View File

@ -2,10 +2,9 @@
# yarn lockfile v1
"@logseq/nbb-logseq@^1.2.173":
version "1.2.173"
resolved "https://registry.yarnpkg.com/@logseq/nbb-logseq/-/nbb-logseq-1.2.173.tgz#27a52c350f06ac9c337d73687738f6ea8b2fc3f3"
integrity sha512-ABKPtVnSOiS4Zpk9+UTaGcs5H6EUmRADr9FJ0aEAVpa0WfAyvUbX/NgkQGMe1kKRv3EbIuLwaxfy+txr31OtAg==
"@logseq/nbb-logseq@logseq/nbb-logseq#feat-db-v1":
version "1.2.173-feat-db-v1"
resolved "https://codeload.github.com/logseq/nbb-logseq/tar.gz/e4910dfb12043404c97962d8faab3a946ab89f81"
dependencies:
import-meta-resolve "^2.1.0"

View File

@ -3,7 +3,7 @@
"version": "1.0.0",
"private": true,
"devDependencies": {
"@logseq/nbb-logseq": "^1.2.173",
"@logseq/nbb-logseq": "logseq/nbb-logseq#feat-db-v1",
"mldoc": "^1.5.1"
},
"dependencies": {

View File

@ -2,10 +2,9 @@
# yarn lockfile v1
"@logseq/nbb-logseq@^1.2.173":
version "1.2.173"
resolved "https://registry.yarnpkg.com/@logseq/nbb-logseq/-/nbb-logseq-1.2.173.tgz#27a52c350f06ac9c337d73687738f6ea8b2fc3f3"
integrity sha512-ABKPtVnSOiS4Zpk9+UTaGcs5H6EUmRADr9FJ0aEAVpa0WfAyvUbX/NgkQGMe1kKRv3EbIuLwaxfy+txr31OtAg==
"@logseq/nbb-logseq@logseq/nbb-logseq#feat-db-v1":
version "1.2.173-feat-db-v1"
resolved "https://codeload.github.com/logseq/nbb-logseq/tar.gz/e4910dfb12043404c97962d8faab3a946ab89f81"
dependencies:
import-meta-resolve "^2.1.0"

View File

@ -101,7 +101,7 @@
"@logseq/capacitor-file-sync": "5.0.1",
"@logseq/diff-merge": "0.2.2",
"@logseq/react-tweet-embed": "1.3.1-1",
"@logseq/sqlite-wasm": "^0.0.4",
"@logseq/sqlite-wasm": "=0.0.6",
"@radix-ui/colors": "^0.1.8",
"@sentry/react": "^6.18.2",
"@sentry/tracing": "^6.18.2",

View File

@ -3,11 +3,11 @@
"version": "1.0.0",
"private": true,
"devDependencies": {
"@logseq/nbb-logseq": "^1.2.173"
"@logseq/nbb-logseq": "logseq/nbb-logseq#feat-db-v1"
},
"dependencies": {
"better-sqlite3": "8.0.1",
"fs-extra": "9.1.0",
"mldoc": "^1.5.1",
"better-sqlite3": "8.0.1"
"mldoc": "^1.5.1"
}
}

View File

@ -2,10 +2,9 @@
# yarn lockfile v1
"@logseq/nbb-logseq@^1.2.173":
version "1.2.173"
resolved "https://registry.yarnpkg.com/@logseq/nbb-logseq/-/nbb-logseq-1.2.173.tgz#27a52c350f06ac9c337d73687738f6ea8b2fc3f3"
integrity sha512-ABKPtVnSOiS4Zpk9+UTaGcs5H6EUmRADr9FJ0aEAVpa0WfAyvUbX/NgkQGMe1kKRv3EbIuLwaxfy+txr31OtAg==
"@logseq/nbb-logseq@logseq/nbb-logseq#feat-db-v1":
version "1.2.173-feat-db-v1"
resolved "https://codeload.github.com/logseq/nbb-logseq/tar.gz/e4910dfb12043404c97962d8faab3a946ab89f81"
dependencies:
import-meta-resolve "^2.1.0"

View File

@ -98,9 +98,8 @@
(defn commit!
[message]
(p/do!
(run-git! #js ["config" "core.quotepath" "false"])
(run-git! #js ["commit" "-m" message])))
(p/let [_ (run-git! #js ["config" "core.quotepath" "false"])]
(run-git! #js ["commit" "-m" message])))
(defn add-all-and-commit!
([]

View File

@ -39,8 +39,7 @@
(notification/show! error :error))]
(if (config/db-based-graph? repo)
(->
(p/do!
(persistent-db/<export-db repo {})
(p/let [_ (persistent-db/<export-db repo {})]
(ipc/ipc "persistent-dbs-saved"))
(p/catch error-handler))
;; TODO: Move all file based graphs to use the above persist approach

View File

@ -12,7 +12,8 @@
[clojure.string :as string]
[goog.object :as gobj]
[frontend.components.onboarding.setups :as setups]
[frontend.util.text :as text-util]))
[frontend.util.text :as text-util]
[frontend.util :as util]))
;; Can't name this component as `frontend.components.import` since shadow-cljs
;; will complain about it.
@ -46,7 +47,7 @@
:error))))
(defn- lsq-import-handler
[e & {:keys [sqlite?]}]
[e & {:keys [sqlite? graph-name]}]
(let [file (first (array-seq (.-files (.-target e))))
file-name (some-> (gobj/get file "name")
(string/lower-case))
@ -54,9 +55,7 @@
json? (string/ends-with? file-name ".json")]
(cond
sqlite?
(let [graph-name (-> (js/prompt "Please specify a name for the new graph:")
str
string/trim)
(let [graph-name (string/trim graph-name)
all-graphs (->> (state/get-repos)
(map #(text-util/get-graph-name-from-path (:url %)))
set)]
@ -72,7 +71,8 @@
(set! (.-onload reader)
(fn []
(let [buffer (.-result ^js reader)]
(import-handler/import-from-sqlite-db! buffer graph-name finished-cb))))
(import-handler/import-from-sqlite-db! buffer graph-name finished-cb)
(state/close-modal!))))
(set! (.-onerror reader) (fn [e] (js/console.error e)))
(set! (.-onabort reader) (fn [e]
(prn :debug :aborted)
@ -120,6 +120,30 @@
(notification/show! "Please choose a OPML file."
:error))))
(rum/defcs set-graph-name-dialog
< rum/reactive
(rum/local "" ::input)
[state sqlite-input-e opts]
(let [*input (::input state)
on-submit #(lsq-import-handler sqlite-input-e (assoc opts :graph-name @*input))]
[:div.container
[:div.sm:flex.sm:items-start
[:div.mt-3.text-center.sm:mt-0.sm:text-left
[:h3#modal-headline.leading-6.font-medium
"New graph name:"]]]
[:input.form-input.block.w-full.sm:text-sm.sm:leading-5.my-2.mb-4
{:auto-focus true
:on-change (fn [e]
(reset! *input (util/evalue e)))
:on-key-press (fn [e]
(when (= "Enter" (util/ekey e))
(on-submit)))}]
[:div.mt-5.sm:mt-4.flex
(ui/button "Submit"
{:on-click on-submit})]]))
(rum/defc importer < rum/reactive
[{:keys [query-params]}]
(if (state/sub :graph/importing)
@ -150,7 +174,8 @@
{:id "import-sqlite-db"
:type "file"
:on-change (fn [e]
(lsq-import-handler e {:sqlite? true}))}]]
(state/set-modal!
#(set-graph-name-dialog e {:sqlite? true})))}]]
[:label.action-input.flex.items-center.mx-2.my-2
[:span.as-flex-center [:i (svg/logo 28)]]

View File

@ -71,10 +71,8 @@
(.exportFile ^js pool path))))
(defn- <import-db
[repo data]
(p/let [^js pool (<get-opfs-pool repo)]
(when pool
(.importDb ^js pool (get-repo-path repo) data))))
[^js pool repo data]
(.importDb ^js pool (get-repo-path repo) data))
(defn upsert-addr-content!
"Upsert addr+data-seq"
@ -112,25 +110,24 @@
(-restore [_ addr]
(restore-data-from-addr repo addr)))))
(defn- clean-db!
[repo db search]
(when (or db search)
(swap! *sqlite-conns dissoc repo)
(swap! *datascript-conns dissoc repo)
(.close ^Object db)
(.close ^Object search)))
(defn- close-db-aux!
[repo ^Object db ^Object search]
(swap! *sqlite-conns dissoc repo)
(swap! *datascript-conns dissoc repo)
(swap! *opfs-pools dissoc repo)
(when db (.close db))
(when search (.close search)))
(defn- close-other-dbs!
[repo]
(doseq [[r {:keys [db search]}] @*sqlite-conns]
(when-not (= repo r)
(swap! *opfs-pools dissoc r)
(clean-db! r db search))))
(close-db-aux! r db search))))
(defn- close-db!
[repo]
(let [{:keys [db search]} (@*sqlite-conns repo)]
(clean-db! repo db search)))
(close-db-aux! repo db search)))
(defn- create-or-open-db!
[repo]
@ -187,18 +184,10 @@
_ (p/all (map (fn [file] (.remove file)) files))]
(p/all (map (fn [dir] (.remove dir)) dirs)))))
(defn- <get-pool-files
[^js pool]
(.getFileNames pool))
(defn- remove-vfs!
[repo]
(p/let [^js pool (<get-opfs-pool repo)]
(when pool
(p/let [files (<get-pool-files pool)
_ (p/all (map (fn [file] (.unlink pool file)) files))
_ (.wipeFiles pool)]
(.removeVfs ^js pool)))))
[^js pool]
(when pool
(.removeVfs ^js pool)))
(defn- get-search-db
[repo]
@ -234,6 +223,7 @@
(string/replace-first (.-name file) ".logseq-pool-" "")))
all-files)
distinct)]
(prn :debug :all-files (map #(.-name %) all-files))
(prn :debug :all-files-count (count (filter
#(= (.-kind %) "file")
all-files)))
@ -267,9 +257,9 @@
(unsafeUnlinkDB
[_this repo]
(p/let [_ (close-db! repo)
_ (remove-vfs! repo)]
(swap! *opfs-pools dissoc repo)
(p/let [pool (get-opfs-pool repo)
_ (close-db! repo)
result (remove-vfs! pool)]
nil))
(exportDB
@ -280,7 +270,7 @@
[this repo data]
(when-not (string/blank? repo)
(p/let [pool (<get-opfs-pool repo)]
(<import-db repo data))))
(<import-db pool repo data))))
;; Search
(search-blocks

View File

@ -179,9 +179,8 @@
(str (config/get-repo-dir repo) "/" %))
[old-path new-path])
new-dir (path/dirname new-path)]
(p/do!
(mkdir-if-not-exists new-dir)
(protocol/copy! (get-fs old-path) repo old-path new-path)))))
(p/let [_ (mkdir-if-not-exists new-dir)]
(protocol/copy! (get-fs old-path) repo old-path new-path)))))

View File

@ -409,8 +409,7 @@
(get-files [_this dir]
(get-files dir))
(watch-dir! [_this dir _options]
(p/do!
(.unwatch mobile-util/fs-watcher)
(.watch mobile-util/fs-watcher (clj->js {:path dir}))))
(p/let [_ (.unwatch mobile-util/fs-watcher)]
(.watch mobile-util/fs-watcher (clj->js {:path dir}))))
(unwatch-dir! [_this _dir]
(.unwatch mobile-util/fs-watcher)))

View File

@ -99,9 +99,8 @@
(defn await-get-nfs-file-handle
"for accessing File handle outside, ensuring user granted."
[repo handle-path]
(p/do!
(await-permission-granted repo)
(get-nfs-file-handle handle-path)))
(p/let [_ (await-permission-granted repo)]
(get-nfs-file-handle handle-path)))
(defn- readdir-and-reload-all-handles
"Return list of filenames"

View File

@ -173,20 +173,19 @@
db-content (if-not db-empty?
(db/get-file repo file-rpath)
"")]
(p/do!
(cond
(and file-exists?
db-empty?)
(handle-add-and-change! repo file-rpath file-content db-content file-mtime false)
(p/let [_ (cond
(and file-exists?
db-empty?)
(handle-add-and-change! repo file-rpath file-content db-content file-mtime false)
(and file-exists?
(not db-empty?)
(not= file-content db-content))
(handle-add-and-change! repo file-rpath file-content db-content file-mtime true))
(and file-exists?
(not db-empty?)
(not= file-content db-content))
(handle-add-and-change! repo file-rpath file-content db-content file-mtime true))]
(ui-handler/re-render-root!)
(ui-handler/re-render-root!)
[file-rpath]))))))
[file-rpath]))))))
(defn load-graph-files!
"This fn replaces the former initial fs watcher"

View File

@ -77,10 +77,9 @@
(defn restore-and-setup!
[repo repos]
(when repo
(-> (p/do!
(db-restore/restore-graph! repo)
(repo-config-handler/start {:repo repo})
(op-mem-layer/<init-load-from-indexeddb! repo))
(-> (p/let [_ (db-restore/restore-graph! repo)
_ (repo-config-handler/start {:repo repo})]
(op-mem-layer/<init-load-from-indexeddb! repo))
(p/then
(fn []
(db-listener/listen-and-persist! repo)

View File

@ -1879,9 +1879,8 @@
(cond
(and (= content "1. ") (= last-input-char " ") input-id edit-block
(not (own-order-number-list? edit-block)))
(p/do!
(state/pub-event! [:editor/toggle-own-number-list edit-block])
(state/set-edit-content! input-id ""))
(p/let [_ (state/pub-event! [:editor/toggle-own-number-list edit-block])]
(state/set-edit-content! input-id ""))
(and (= last-input-char commands/command-trigger)
(or (re-find #"(?m)^/" (str (.-value input))) (start-of-new-word? input pos)))

View File

@ -5,7 +5,6 @@
[frontend.external :as external]
[frontend.handler.file :as file-handler]
[frontend.handler.repo :as repo-handler]
[frontend.handler.search :as search-handler]
[frontend.state :as state]
[frontend.date :as date]
[frontend.config :as config]
@ -224,12 +223,11 @@
(defn import-from-sqlite-db!
[buffer bare-graph-name finished-ok-handler]
(let [graph (str config/db-version-prefix bare-graph-name)]
(-> (do
(persist-db/<import-db graph buffer)
(state/set-current-repo! graph)
(-> (p/let [_ (persist-db/<import-db graph buffer)]
(repo-handler/restore-and-setup-repo! graph))
(p/then
(fn [_result]
(state/set-current-repo! graph)
(finished-ok-handler)))
(p/catch
(fn [e]

View File

@ -405,13 +405,21 @@
(defn- assign-temp-id
[blocks replace-empty-target? target-block]
(map-indexed (fn [idx block]
;; TODO: block uuid changed, this could be a problem for rtc
(let [replacing-block? (and replace-empty-target? (zero? idx))
db-id (if replacing-block?
(:db/id target-block)
(dec (- idx)))]
(assoc block :db/id db-id))) blocks))
(->> (map-indexed (fn [idx block]
(let [replacing-block? (and replace-empty-target? (zero? idx))]
(if replacing-block?
(let [db-id (or (:db/id block) (dec (- idx)))]
(if (seq (:block/_parent target-block)) ; target-block has children
;; update block properties
[(assoc block
:db/id (:db/id target-block)
:block/uuid (:block/uuid target-block))]
[[:db/retractEntity (:db/id target-block)] ; retract target-block first
(assoc block
:db/id db-id
:block/left (:db/id (:block/left target-block)))]))
[(assoc block :db/id (dec (- idx)))]))) blocks)
(apply concat)))
(defn- find-outdented-block-prev-hop
[outdented-block blocks]

View File

@ -4,7 +4,8 @@
[frontend.persist-db.protocol :as protocol]
[promesa.core :as p]
[frontend.state :as state]
[frontend.util :as util]))
[frontend.util :as util]
[frontend.config :as config]))
(defonce opfs-db (browser/->InBrowser))
@ -44,7 +45,9 @@
{:pre [(<= (count repo) 56)]}
(p/do!
(let [current-repo (state/get-current-repo)]
(when (and (util/electron?) (not= repo current-repo))
(when (and (util/electron?)
(not= repo current-repo)
(config/db-based-graph? current-repo))
;; switch graph
(<export-db current-repo {}))
(protocol/<new (get-impl) repo))))

View File

@ -31,9 +31,8 @@
worker (js/Worker. (str worker-url "?electron=" (util/electron?)))
sqlite (Comlink/wrap worker)]
(reset! *sqlite sqlite)
(-> (p/do!
(.init sqlite)
(ask-persist-permission!))
(-> (p/let [_ (.init sqlite)]
(ask-persist-permission!))
(p/catch (fn [error]
(prn :debug "Can't init SQLite wasm")
(js/console.error error)

View File

@ -552,10 +552,10 @@
resolved "https://registry.yarnpkg.com/@logseq/react-tweet-embed/-/react-tweet-embed-1.3.1-1.tgz#119d22be8234de006fc35c3fa2a36f85634c5be6"
integrity sha512-9O0oHs5depCvh6ZQvwtl1xb7B80YG5rUfY10uSUat5itOlcE3IWaYYpe6p/tcHErqHWnWgkXHitAB9M29FMbQg==
"@logseq/sqlite-wasm@^0.0.4":
version "0.0.4"
resolved "https://registry.yarnpkg.com/@logseq/sqlite-wasm/-/sqlite-wasm-0.0.4.tgz#7d9d9fb6034149d503fa66237b1a5f5206107f15"
integrity sha512-aV1nhgUPIrv36E9J7GRKLAFuqPKJbSFxuhrzbxNAhHS8Wn0XO/0++UcikwCOK9aKnbgqfoRgX+VSxiFhwfno2g==
"@logseq/sqlite-wasm@=0.0.6":
version "0.0.6"
resolved "https://registry.yarnpkg.com/@logseq/sqlite-wasm/-/sqlite-wasm-0.0.6.tgz#df44513298e323caa0fea02c4179145df65371e8"
integrity sha512-goo4vnxlq8H/Ea/8zhsEMG+9zGBRMSwazMdYAZyH6WaZbQGsCCoGcsGIgJOJsCDLVnMKqJL5h6WtbbR1JJis6g==
"@mapbox/node-pre-gyp@^1.0.0":
version "1.0.11"