Merge branch 'master' into feat/whiteboards-onboarding

pull/7440/head
Konstantinos Kaloutas 2022-11-28 12:56:41 +02:00
commit ef9075a684
70 changed files with 1180 additions and 634 deletions

View File

@ -35,6 +35,14 @@ jobs:
distribution: 'zulu'
java-version: ${{ env.JAVA_VERSION }}
- name: Cache clojure deps
uses: actions/cache@v2
with:
path: |
~/.m2/repository
~/.gitlibs
key: ${{ runner.os }}-clojure-lib-${{ hashFiles('**/deps.edn') }}
- name: Setup clojure
uses: DeLaGuardo/setup-clojure@3.5
with:
@ -65,3 +73,5 @@ jobs:
APP_STORE_CONNECT_API_KEY_KEY: ${{ secrets.APP_STORE_CONNECT_API_KEY_KEY }}
APP_STORE_CONNECT_API_KEY_IS_KEY_CONTENT_BASE64: true
SLACK_URL: ${{ secrets.SLACK_URL }}
MATCH_PASSWORD: ${{ secrets.MATCH_PASSWORD }}
MATCH_GIT_BASIC_AUTHORIZATION: ${{ secrets.MATCH_GIT_BASIC_AUTHORIZATION }}

View File

@ -92,7 +92,7 @@ If you would like to contribute by solving an open issue, please fork this repos
Once you push your code to your fork you we'll be able to open a PR into Logseq repository. For more info you can follow this guide from [Github docs](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork)
And here a list of some [good firt issues](https://github.com/logseq/logseq/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22)!
And here a list of some [good first issues](https://github.com/logseq/logseq/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22)!
## Thanks

View File

@ -32,3 +32,5 @@ logseq.graph-parser.property/->block-content
logseq.graph-parser.property/property-value-from-content
;; API
logseq.graph-parser.whiteboard/page-block->tldr-page
;; API
logseq.graph-parser/get-blocks-to-delete

View File

@ -6,11 +6,77 @@
[logseq.graph-parser.util :as gp-util]
[logseq.graph-parser.date-time-util :as date-time-util]
[logseq.graph-parser.config :as gp-config]
[logseq.db.schema :as db-schema]
[clojure.string :as string]
[clojure.set :as set]))
(defn- retract-blocks-tx
[blocks retain-uuids]
(mapcat (fn [{uuid :block/uuid eid :db/id}]
(if (and uuid (contains? retain-uuids uuid))
(map (fn [attr] [:db.fn/retractAttribute eid attr]) db-schema/retract-attributes)
[[:db.fn/retractEntity eid]]))
blocks))
(defn- get-file-page
"Copy of db/get-file-page. Too basic to couple to main app"
[db file-path]
(ffirst
(d/q
'[:find ?page-name
:in $ ?path
:where
[?file :file/path ?path]
[?page :block/file ?file]
[?page :block/original-name ?page-name]]
db
file-path)))
(defn- get-page-blocks-no-cache
"Copy of db/get-page-blocks-no-cache. Too basic to couple to main app"
[db page {:keys [pull-keys]
:or {pull-keys '[*]}}]
(let [sanitized-page (gp-util/page-name-sanity-lc page)
page-id (:db/id (d/entity db [:block/name sanitized-page]))]
(when page-id
(let [datoms (d/datoms db :avet :block/page page-id)
block-eids (mapv :e datoms)]
(d/pull-many db pull-keys block-eids)))))
(defn get-blocks-to-delete
"Returns the transactional operations to retract blocks belonging to the
given page name and file path. This function is required when a file is being
parsed from disk; before saving the parsed, blocks from the previous version
of that file need to be retracted.
The 'Page' parsed from the new file version is passed separately from the
file-path, as the page name can be set via properties in the file, and thus
can change between versions. If it has changed, existing blocks for both the
old and new page name will be retracted.
Blocks are by default fully cleared via retractEntity. However, a collection
of block UUIDs to retain can be passed, and any blocks with matching uuids
will instead have their attributes cleared individually via
'retractAttribute'. This will preserve block references to the retained
UUIDs."
[db file-page file-path retain-uuid-blocks]
(let [existing-file-page (get-file-page db file-path)
pages-to-clear (distinct (filter some? [existing-file-page (:block/name file-page)]))
blocks (mapcat (fn [page]
(get-page-blocks-no-cache db page {:pull-keys [:db/id :block/uuid]}))
pages-to-clear)
retain-uuids (set (keep :block/uuid retain-uuid-blocks))]
(retract-blocks-tx (distinct blocks) retain-uuids)))
(defn parse-file
"Parse file and save parsed data to the given db. Main parse fn used by logseq app"
"Parse file and save parsed data to the given db. Main parse fn used by logseq app.
Options available:
* :new? - Boolean which indicates if this file already exists. Default is true.
* :delete-blocks-fn - Optional fn which is called with the new page, file and existing block uuids
which may be referenced elsewhere.
* :skip-db-transact? - Boolean which skips transacting in order to batch transactions. Default is false
* :extract-options - Options map to pass to extract/extract"
[conn file content {:keys [new? delete-blocks-fn extract-options skip-db-transact?]
:or {new? true
delete-blocks-fn (constantly [])
@ -31,20 +97,20 @@
blocks []
ast []}}
(cond (contains? gp-config/mldoc-support-formats format)
(extract/extract file content extract-options')
(extract/extract file content extract-options')
(gp-config/whiteboard? file)
(extract/extract-whiteboard-edn file content extract-options')
(gp-config/whiteboard? file)
(extract/extract-whiteboard-edn file content extract-options')
:else nil)
delete-blocks (delete-blocks-fn (first pages) file)
:else nil)
block-ids (map (fn [block] {:block/uuid (:block/uuid block)}) blocks)
delete-blocks (delete-blocks-fn @conn (first pages) file block-ids)
block-refs-ids (->> (mapcat :block/refs blocks)
(filter (fn [ref] (and (vector? ref)
(= :block/uuid (first ref)))))
(map (fn [ref] {:block/uuid (second ref)}))
(seq))
;; To prevent "unique constraint" on datascript
;; To prevent "unique constraint" on datascript
block-ids (set/union (set block-ids) (set block-refs-ids))
pages (extract/with-ref-pages pages blocks)
pages-index (map #(select-keys % [:block/name]) pages)]

View File

@ -49,7 +49,8 @@ TODO: Fail fast when process exits 1"
(mapv
(fn [{:file/keys [path content]}]
(let [{:keys [ast]}
(graph-parser/parse-file conn path content {:extract-options extract-options})]
(graph-parser/parse-file conn path content (merge {:extract-options extract-options}
(:parse-file-options options)))]
{:file path :ast ast}))
files)))
@ -59,12 +60,14 @@ TODO: Fail fast when process exits 1"
as it can't assume that the metadata in logseq/ is up to date. Directory is
assumed to be using git. This fn takes the following options:
* :verbose - When enabled prints more information during parsing. Defaults to true
* :files - Specific files to parse instead of parsing the whole directory"
* :files - Specific files to parse instead of parsing the whole directory
* :conn - Database connection to use instead of creating new one
* :parse-file-options - Options map to pass to graph-parser/parse-file"
([dir]
(parse-graph dir {}))
([dir options]
(let [files (or (:files options) (build-graph-files dir))
conn (ldb/start-conn)
conn (or (:conn options) (ldb/start-conn))
config (read-config dir)
_ (when-not (:files options) (println "Parsing" (count files) "files..."))
asts (parse-files conn files (merge options {:config config}))]

View File

@ -74,7 +74,7 @@
(throw (js/Error "Testing unexpected failure")))]
(try
(graph-parser/parse-file conn "foo.md" "- id:: 628953c1-8d75-49fe-a648-f4c612109098"
{:delete-blocks-fn (fn [page _file]
{:delete-blocks-fn (fn [_db page _file _uuids]
(reset! deleted-page page))})
(catch :default _)))
(is (= nil @deleted-page)

View File

@ -37,11 +37,12 @@ import { IsMac, createRandomPage, newBlock, newInnerBlock, randomString, lastBlo
await page.waitForSelector('[placeholder="Search or create page"]')
await page.fill('[placeholder="Search or create page"]', 'Einführung in die Allgemeine Sprachwissenschaft' + rand)
await page.waitForTimeout(500)
await page.waitForTimeout(2000) // wait longer for search contents to render
const results = await page.$$('#ui__ac-inner>div')
expect(results.length).toEqual(3) // 2 blocks + 1 page
expect(results.length).toBeGreaterThan(3) // 2 blocks + 1 page + 2 page content
await page.keyboard.press("Escape")
await page.keyboard.press("Escape")
await page.waitForTimeout(1000) // wait for modal disappear
})
async function alias_test(page: Page, page_name: string, search_kws: string[]) {

View File

@ -107,5 +107,7 @@
<true/>
<key>UIViewControllerBasedStatusBarAppearance</key>
<true/>
<key>ITSAppUsesNonExemptEncryption</key>
<false/>
</dict>
</plist>

View File

@ -18,20 +18,29 @@ default_platform(:ios)
platform :ios do
desc "Push a new beta build to TestFlight"
lane :beta do
# Set from env
app_store_connect_api_key
setup_ci
increment_build_number(
app_store_connect_api_key(
key_id: ENV["APP_STORE_CONNECT_API_KEY_KEY_ID"],
issuer_id: ENV["APP_STORE_CONNECT_API_KEY_ISSUER_ID"],
key_filepath: ENV["APP_STORE_CONNECT_API_KEY_KEY_FILEPATH"],
)
sync_code_signing(type: "appstore", readonly: true)
build_number = increment_build_number(
xcodeproj: "App.xcodeproj",
build_number: latest_testflight_build_number + 1,
skip_info_plist: true
)
# Ref: https://docs.fastlane.tools/advanced/fastlane/#directory-behavior
sh("../../../scripts/patch-xcode-project.sh")
build_app(
workspace: "App.xcworkspace",
configuration: "Release",
destination: "generic/platform=iOS",
scheme: "Logseq"
scheme: "Logseq",
configuration: "Release",
)
upload_to_testflight(
@ -39,6 +48,6 @@ platform :ios do
skip_waiting_for_build_processing: true,
)
slack(message: "App successfully uploaded to TestFlight 🎉!")
slack(message: "App Build (#{build_number}) successfully uploaded to TestFlight 🎉!")
end
end

View File

@ -0,0 +1,13 @@
git_url("https://github.com/logseq/certificates.git")
storage_mode("git")
type("appstore") # The default type, can be: appstore, adhoc, enterprise or development
app_identifier(["com.logseq.logseq", "com.logseq.logseq.ShareViewController"])
# username("user@fastlane.tools") # Your Apple Developer Portal username
# For all available options run `fastlane match --help`
# Remove the # in the beginning of the line to enable the other options
# The docs are available on https://docs.fastlane.tools/actions/match

View File

@ -520,7 +520,6 @@ i.ti {
h1.title {
margin-bottom: 1.5rem;
color: var(--ls-title-text-color, #222);
font-family: -apple-system, system-ui, var(--ls-font-family), sans-serif;
font-size: var(--ls-page-title-size, 36px);
font-weight: 500;
}

47
scripts/patch-xcode-project.sh Executable file
View File

@ -0,0 +1,47 @@
#!/bin/bash
# This script patches the iOS project to use the correct codesigning and provisioning profiles.
set -e
set -o pipefail
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
cd ${SCRIPT_DIR}/../ios/App
ls -lah App.xcodeproj/project.pbxproj
FILE="App.xcodeproj/project.pbxproj"
/usr/libexec/PlistBuddy -c 'Set :objects:504EC2FC1FED79650016851F:attributes:TargetAttributes:504EC3031FED79650016851F:ProvisioningStyle Manual' $FILE
/usr/libexec/PlistBuddy -c 'Set :objects:504EC2FC1FED79650016851F:attributes:TargetAttributes:5FFF7D6927E343FA00B00DA8:ProvisioningStyle Manual' $FILE
/usr/libexec/PlistBuddy -c 'Set :objects:504EC3171FED79650016851F:buildSettings:CODE_SIGN_STYLE Manual' $FILE
/usr/libexec/PlistBuddy -c 'Add :objects:504EC3171FED79650016851F:buildSettings:"CODE_SIGN_IDENTITY[sdk=iphoneos*]" String "iPhone Distribution"' $FILE
/usr/libexec/PlistBuddy -c 'Set :objects:504EC3171FED79650016851F:buildSettings:DEVELOPMENT_TEAM ""' $FILE
/usr/libexec/PlistBuddy -c 'Add :objects:504EC3171FED79650016851F:buildSettings:"DEVELOPMENT_TEAM[sdk=iphoneos*]" String K378MFWK59' $FILE
/usr/libexec/PlistBuddy -c 'Add :objects:504EC3171FED79650016851F:buildSettings:PROVISIONING_PROFILE_SPECIFIER String ""' $FILE
/usr/libexec/PlistBuddy -c 'Add :objects:504EC3171FED79650016851F:buildSettings:"PROVISIONING_PROFILE_SPECIFIER[sdk=iphoneos*]" String "match AppStore com.logseq.logseq"' $FILE
/usr/libexec/PlistBuddy -c 'Set :objects:504EC3181FED79650016851F:buildSettings:CODE_SIGN_STYLE Manual' $FILE
/usr/libexec/PlistBuddy -c 'Add :objects:504EC3181FED79650016851F:buildSettings:"CODE_SIGN_IDENTITY[sdk=iphoneos*]" String "iPhone Distribution"' $FILE
/usr/libexec/PlistBuddy -c 'Set :objects:504EC3181FED79650016851F:buildSettings:DEVELOPMENT_TEAM ""' $FILE
/usr/libexec/PlistBuddy -c 'Add :objects:504EC3181FED79650016851F:buildSettings:"DEVELOPMENT_TEAM[sdk=iphoneos*]" String K378MFWK59' $FILE
/usr/libexec/PlistBuddy -c 'Add :objects:504EC3181FED79650016851F:buildSettings:PROVISIONING_PROFILE_SPECIFIER String ""' $FILE
/usr/libexec/PlistBuddy -c 'Add :objects:504EC3181FED79650016851F:buildSettings:"PROVISIONING_PROFILE_SPECIFIER[sdk=iphoneos*]" String "match AppStore com.logseq.logseq"' $FILE
/usr/libexec/PlistBuddy -c 'Set :objects:5FFF7D7627E343FA00B00DA8:buildSettings:CODE_SIGN_STYLE Manual' $FILE
/usr/libexec/PlistBuddy -c 'Add :objects:5FFF7D7627E343FA00B00DA8:buildSettings:"CODE_SIGN_IDENTITY[sdk=iphoneos*]" String "iPhone Distribution"' $FILE
/usr/libexec/PlistBuddy -c 'Set :objects:5FFF7D7627E343FA00B00DA8:buildSettings:DEVELOPMENT_TEAM ""' $FILE
/usr/libexec/PlistBuddy -c 'Add :objects:5FFF7D7627E343FA00B00DA8:buildSettings:"DEVELOPMENT_TEAM[sdk=iphoneos*]" String K378MFWK59' $FILE
/usr/libexec/PlistBuddy -c 'Add :objects:5FFF7D7627E343FA00B00DA8:buildSettings:PROVISIONING_PROFILE_SPECIFIER String ""' $FILE
/usr/libexec/PlistBuddy -c 'Add :objects:5FFF7D7627E343FA00B00DA8:buildSettings:"PROVISIONING_PROFILE_SPECIFIER[sdk=iphoneos*]" String "match AppStore com.logseq.logseq.ShareViewController"' $FILE
/usr/libexec/PlistBuddy -c 'Set :objects:5FFF7D7727E343FA00B00DA8:buildSettings:CODE_SIGN_STYLE Manual' $FILE
/usr/libexec/PlistBuddy -c 'Add :objects:5FFF7D7727E343FA00B00DA8:buildSettings:"CODE_SIGN_IDENTITY[sdk=iphoneos*]" String "iPhone Distribution"' $FILE
/usr/libexec/PlistBuddy -c 'Set :objects:5FFF7D7727E343FA00B00DA8:buildSettings:DEVELOPMENT_TEAM ""' $FILE
/usr/libexec/PlistBuddy -c 'Add :objects:5FFF7D7727E343FA00B00DA8:buildSettings:"DEVELOPMENT_TEAM[sdk=iphoneos*]" String K378MFWK59' $FILE
/usr/libexec/PlistBuddy -c 'Add :objects:5FFF7D7727E343FA00B00DA8:buildSettings:PROVISIONING_PROFILE_SPECIFIER String ""' $FILE
/usr/libexec/PlistBuddy -c 'Add :objects:5FFF7D7727E343FA00B00DA8:buildSettings:"PROVISIONING_PROFILE_SPECIFIER[sdk=iphoneos*]" String "match AppStore com.logseq.logseq.ShareViewController"' $FILE
echo Patch OK!

View File

@ -35,7 +35,8 @@
:redef false}}
:closure-defines {goog.debug.LOGGING_ENABLED true
frontend.config/ENABLE-PLUGINS #shadow/env ["ENABLE_PLUGINS" :as :bool :default true]
frontend.config/ENABLE-FILE-SYNC-PRODUCTION #shadow/env ["ENABLE_FILE_SYNC_PRODUCTION" :as :bool :default true]}
frontend.config/ENABLE-FILE-SYNC-PRODUCTION #shadow/env ["ENABLE_FILE_SYNC_PRODUCTION" :as :bool :default true]
frontend.config/TEST #shadow/env ["CI" :as :bool :default false]}
;; NOTE: electron, browser/mobile-app use different asset-paths.
;; For browser/mobile-app devs, assets are located in /static/js(via HTTP root).

View File

@ -285,28 +285,47 @@
(async/put! state/persistent-dbs-chan true)
true)
;; Search related IPCs
(defmethod handle :search-blocks [_window [_ repo q opts]]
(search/search-blocks repo q opts))
(defmethod handle :rebuild-blocks-indice [_window [_ repo data]]
(defmethod handle :search-pages [_window [_ repo q opts]]
(search/search-pages repo q opts))
(defmethod handle :rebuild-indice [_window [_ repo block-data page-data]]
(search/truncate-blocks-table! repo)
;; unneeded serialization
(search/upsert-blocks! repo (bean/->js data))
(search/upsert-blocks! repo (bean/->js block-data))
(search/truncate-pages-table! repo)
(search/upsert-pages! repo (bean/->js page-data))
[])
(defmethod handle :transact-blocks [_window [_ repo data]]
(let [{:keys [blocks-to-remove-set blocks-to-add]} data]
;; Order matters! Same id will delete then upsert sometimes.
(when (seq blocks-to-remove-set)
(search/delete-blocks! repo blocks-to-remove-set))
(when (seq blocks-to-add)
;; unneeded serialization
(search/upsert-blocks! repo (bean/->js blocks-to-add)))))
(defmethod handle :truncate-blocks [_window [_ repo]]
(search/truncate-blocks-table! repo))
(defmethod handle :transact-pages [_window [_ repo data]]
(let [{:keys [pages-to-remove-set pages-to-add]} data]
;; Order matters! Same id will delete then upsert sometimes.
(when (seq pages-to-remove-set)
(search/delete-pages! repo pages-to-remove-set))
(when (seq pages-to-add)
;; unneeded serialization
(search/upsert-pages! repo (bean/->js pages-to-add)))))
(defmethod handle :truncate-indice [_window [_ repo]]
(search/truncate-blocks-table! repo)
(search/truncate-pages-table! repo))
(defmethod handle :remove-db [_window [_ repo]]
(search/delete-db! repo))
;; ^^^^
;; Search related IPCs End
(defn clear-cache!
[window]

View File

@ -1,4 +1,5 @@
(ns electron.search
"Provides both page level and block level index"
(:require ["path" :as path]
["fs-extra" :as fs]
["better-sqlite3" :as sqlite3]
@ -31,25 +32,52 @@
(when db
(.prepare db sql)))
(defn add-triggers!
(defn add-blocks-fts-triggers!
"Table bindings of blocks tables and the blocks FTS virtual tables"
[db]
(let [triggers ["CREATE TRIGGER IF NOT EXISTS blocks_ad AFTER DELETE ON blocks
BEGIN
DELETE from blocks_fts where rowid = old.id;
END;"
(let [triggers [;; add
"CREATE TRIGGER IF NOT EXISTS blocks_ad AFTER DELETE ON blocks
BEGIN
DELETE from blocks_fts where rowid = old.id;
END;"
;; insert
"CREATE TRIGGER IF NOT EXISTS blocks_ai AFTER INSERT ON blocks
BEGIN
INSERT INTO blocks_fts (rowid, uuid, content, page)
VALUES (new.id, new.uuid, new.content, new.page);
END;
"
BEGIN
INSERT INTO blocks_fts (rowid, uuid, content, page)
VALUES (new.id, new.uuid, new.content, new.page);
END;"
;; update
"CREATE TRIGGER IF NOT EXISTS blocks_au AFTER UPDATE ON blocks
BEGIN
DELETE from blocks_fts where rowid = old.id;
INSERT INTO blocks_fts (rowid, uuid, content, page)
VALUES (new.id, new.uuid, new.content, new.page);
END;"
]]
BEGIN
DELETE from blocks_fts where rowid = old.id;
INSERT INTO blocks_fts (rowid, uuid, content, page)
VALUES (new.id, new.uuid, new.content, new.page);
END;"]]
(doseq [trigger triggers]
(let [stmt (prepare db trigger)]
(.run ^object stmt)))))
(defn add-pages-fts-triggers!
"Table bindings of pages tables and the pages FTS virtual tables"
[db]
(let [triggers [;; add
"CREATE TRIGGER IF NOT EXISTS pages_ad AFTER DELETE ON pages
BEGIN
DELETE from pages_fts where rowid = old.id;
END;"
;; insert
"CREATE TRIGGER IF NOT EXISTS pages_ai AFTER INSERT ON pages
BEGIN
INSERT INTO pages_fts (rowid, uuid, content)
VALUES (new.id, new.uuid, new.content);
END;"
;; update
"CREATE TRIGGER IF NOT EXISTS pages_au AFTER UPDATE ON pages
BEGIN
DELETE from pages_fts where rowid = old.id;
INSERT INTO pages_fts (rowid, uuid, content)
VALUES (new.id, new.uuid, new.content);
END;"]]
(doseq [trigger triggers]
(let [stmt (prepare db trigger)]
(.run ^object stmt)))))
@ -68,6 +96,19 @@
(let [stmt (prepare db "CREATE VIRTUAL TABLE IF NOT EXISTS blocks_fts USING fts5(uuid, content, page)")]
(.run ^object stmt)))
(defn create-pages-table!
[db]
(let [stmt (prepare db "CREATE TABLE IF NOT EXISTS pages (
id INTEGER PRIMARY KEY,
uuid TEXT NOT NULL,
content TEXT NOT NULL)")]
(.run ^object stmt)))
(defn create-pages-fts-table!
[db]
(let [stmt (prepare db "CREATE VIRTUAL TABLE IF NOT EXISTS pages_fts USING fts5(uuid, content)")]
(.run ^object stmt)))
(defn get-search-dir
[]
(let [path (.getPath ^object app "userData")]
@ -96,7 +137,10 @@
(try (let [db (sqlite3 db-full-path nil)]
(create-blocks-table! db)
(create-blocks-fts-table! db)
(add-triggers! db)
(create-pages-table! db)
(create-pages-fts-table! db)
(add-blocks-fts-triggers! db)
(add-pages-fts-triggers! db)
(swap! databases assoc db-sanitized-name db))
(catch :default e
(logger/error (str e ": " db-name))
@ -111,6 +155,36 @@
(doseq [db-name dbs]
(open-db! db-name)))))
(defn- clj-list->sql
"Turn clojure list into SQL list
'(1 2 3 4)
->
\"('1','2','3','4')\""
[ids]
(str "(" (->> (map (fn [id] (str "'" id "'")) ids)
(string/join ", ")) ")"))
(defn upsert-pages!
[repo pages]
(if-let [db (get-db repo)]
;; TODO: what if a CONFLICT on uuid
(let [insert (prepare db "INSERT INTO pages (id, uuid, content) VALUES (@id, @uuid, @content) ON CONFLICT (id) DO UPDATE SET content = @content")
insert-many (.transaction ^object db
(fn [pages]
(doseq [page pages]
(.run ^object insert page))))]
(insert-many pages))
(do
(open-db! repo)
(upsert-pages! repo pages))))
(defn delete-pages!
[repo ids]
(when-let [db (get-db repo)]
(let [sql (str "DELETE from pages WHERE id IN " (clj-list->sql ids))
stmt (prepare db sql)]
(.run ^object stmt))))
(defn upsert-blocks!
[repo blocks]
(if-let [db (get-db repo)]
@ -128,9 +202,7 @@
(defn delete-blocks!
[repo ids]
(when-let [db (get-db repo)]
(let [ids (->> (map (fn [id] (str "'" id "'")) ids)
(string/join ", "))
sql (str "DELETE from blocks WHERE id IN (" ids ")")
(let [sql (str "DELETE from blocks WHERE id IN " (clj-list->sql ids))
stmt (prepare db sql)]
(.run ^object stmt))))
@ -150,19 +222,35 @@
(.all ^object stmt input limit))
:keywordize-keys true)))
(defn- get-match-inputs
[q]
(let [match-input (-> q
(string/replace " and " " AND ")
(string/replace " & " " AND ")
(string/replace " or " " OR ")
(string/replace " | " " OR ")
(string/replace " not " " NOT "))]
(if (not= q match-input)
[(string/replace match-input "," "")]
[q
(str "\"" match-input "\"")])))
(defn distinct-by
[f col]
(reduce
(fn [acc x]
(if (some #(= (f x) (f %)) acc)
acc
(vec (conj acc x))))
[]
col))
(defn search-blocks
":page - the page to specificly search on"
[repo q {:keys [limit page]}]
(when-let [database (get-db repo)]
(when-not (string/blank? q)
(let [match-input (-> q
(string/replace " and " " AND ")
(string/replace " & " " AND ")
(string/replace " or " " OR ")
(string/replace " | " " OR ")
(string/replace " not " " NOT "))
match-input (if (not= q match-input)
(string/replace match-input "," "")
(str "\"" match-input "\""))
(let [match-inputs (get-match-inputs q)
non-match-input (str "%" (string/replace q #"\s+" "%") "%")
limit (or limit 20)
select "select rowid, uuid, content, page from blocks_fts where "
@ -172,12 +260,62 @@
" content match ? order by rank limit ?")
non-match-sql (str select
pg-sql
" content like ? limit ?")]
" content like ? limit ?")
matched-result (->>
(map
(fn [match-input]
(search-blocks-aux database match-sql match-input page limit))
match-inputs)
(apply concat))]
(->>
(concat
(search-blocks-aux database match-sql match-input page limit)
(search-blocks-aux database non-match-sql non-match-input page limit))
(distinct)
(concat matched-result
(search-blocks-aux database non-match-sql non-match-input page limit))
(distinct-by :id)
(take limit)
(vec))))))
(defn- search-pages-res-unpack
[arr]
(let [[rowid uuid content snippet] arr]
{:id rowid
:uuid uuid
:content content
:snippet snippet}))
(defn- search-pages-aux
[database sql input limit]
(let [stmt (prepare database sql)]
(map search-pages-res-unpack (-> (.raw ^object stmt)
(.all input limit)
(js->clj)))))
(defn search-pages
[repo q {:keys [limit]}]
(when-let [database (get-db repo)]
(when-not (string/blank? q)
(let [match-inputs (get-match-inputs q)
non-match-input (str "%" (string/replace q #"\s+" "%") "%")
limit (or limit 20)
;; https://www.sqlite.org/fts5.html#the_highlight_function
;; the 2nd column in pages_fts (content)
;; pfts_2lqh is a key for retrieval
;; highlight and snippet only works for some matching with high rank
snippet-aux "snippet(pages_fts, 1, '$pfts_2lqh>$', '$<pfts_2lqh$', '...', 32)"
select (str "select rowid, uuid, content, " snippet-aux " from pages_fts where ")
match-sql (str select
" content match ? order by rank limit ?")
non-match-sql (str select
" content like ? limit ?")
matched-result (->>
(map
(fn [match-input]
(search-pages-aux database match-sql match-input limit))
match-inputs)
(apply concat))]
(->>
(concat matched-result
(search-pages-aux database non-match-sql non-match-input limit))
(distinct-by :id)
(take limit)
(vec))))))
@ -191,6 +329,16 @@
"delete from blocks_fts;")]
(.run ^object stmt))))
(defn truncate-pages-table!
[repo]
(when-let [database (get-db repo)]
(let [stmt (prepare database
"delete from pages;")
_ (.run ^object stmt)
stmt (prepare database
"delete from pages_fts;")]
(.run ^object stmt))))
(defn delete-db!
[repo]
(when-let [database (get-db repo)]
@ -205,9 +353,3 @@
(when-let [database (get-db repo)]
(let [stmt (prepare database sql)]
(.all ^object stmt))))
(comment
(def repo (first (keys @databases)))
(query repo
"select * from blocks_fts")
(delete-db! repo))

View File

@ -158,7 +158,10 @@
(fn [args]
(let [{:keys [url title content page append]} (bean/->clj args)
insert-today? (get-in (state/get-config)
[:quick-capture-options :insert-today]
[:quick-capture-options :insert-today?]
false)
redirect-page? (get-in (state/get-config)
[:quick-capture-options :redirect-page?]
false)
today-page (when (state/enable-journals?)
(string/lower-case (date/today)))
@ -195,7 +198,7 @@
(do
(when (not= page (state/get-current-page))
(page-handler/create! page {:redirect? true}))
(page-handler/create! page {:redirect? redirect-page?}))
(editor-handler/api-insert-new-block! content {:page page
:edit-block? true
:replace-empty-target? true}))))))

View File

@ -385,8 +385,12 @@
share-fn (fn [event]
(util/stop event)
(when (mobile-util/native-platform?)
(.share Share #js {:url path
:title "Open file with your favorite app"})))]
;; File URL must be legal, so filename muse be URI-encoded
(let [[rel-dir basename] (util/get-dir-and-basename href)
basename (js/encodeURIComponent basename)
asset-url (str repo-dir rel-dir "/" basename)]
(.share Share (clj->js {:url asset-url
:title "Open file with your favorite app"})))))]
(cond
(contains? config/audio-formats ext)
@ -401,7 +405,7 @@
[:a.asset-ref.is-plaintext {:href (rfe/href :file {:path path})
:on-click (fn [_event]
(p/let [result (fs/read-file repo-dir path)]
(db/set-file-content! repo path result )))}
(db/set-file-content! repo path result)))}
title]
(= ext :pdf)
@ -2493,12 +2497,14 @@
(rum/defc breadcrumb-separator [] [:span.mx-2.opacity-50 "➤"])
(defn breadcrumb
"block-id - uuid of the target block of breadcrumb. page uuid is also acceptable"
[config repo block-id {:keys [show-page? indent? end-separator? level-limit _navigating-block]
:or {show-page? true
level-limit 3}
:as opts}]
(let [parents (db/get-block-parents repo block-id (inc level-limit))
page (db/get-block-page repo block-id)
page (or (db/get-block-page repo block-id) ;; only return for block uuid
(model/query-block-by-uuid block-id)) ;; return page entity when received page uuid
page-name (:block/name page)
page-original-name (:block/original-name page)
show? (or (seq parents) show-page? page-name)

View File

@ -13,6 +13,7 @@
[frontend.context.i18n :refer [t]]
[rum.core :as rum]
[frontend.handler.file-sync :as file-sync-handler]
[frontend.fs.sync :as sync]
[frontend.handler.notification :as notification]))
(defn- ask-for-re-index
@ -28,15 +29,26 @@
(defn- <close-modal-on-done
"Ask users to re-index when the modal is exited"
[sync?]
(async/go (state/close-settings!)
(async/<! (async/timeout 100)) ;; modal race condition requires investigation
(if sync?
(notification/show!
[:div "Please re-index this graph after all the changes are synced."]
:warning
false)
(ask-for-re-index))))
[sync? rename-items]
(async/go
(state/close-modal!)
(async/<! (async/timeout 100)) ;; modal race condition requires investigation
(let [renamed-paths (keep (fn [{:keys [file file-name target]}]
(when (not= file-name target)
(sync/relative-path (:file/path file)))) rename-items)
graph-txid (second @sync/graphs-txid)]
(when (and (seq renamed-paths) sync? graph-txid)
(async/<!
(sync/<delete-remote-files-control
sync/remoteapi
graph-txid
renamed-paths))))
(if sync?
(notification/show!
[:div "Please re-index this graph after all the changes are synced."]
:warning
false)
(ask-for-re-index))))
(rum/defc legacy-warning
[repo *target-format *dir-format *solid-format]
@ -123,10 +135,11 @@
(merge ret {:page page :file file}))))
(remove nil?))
sync? (file-sync-handler/current-graph-sync-on?)
<rename-all #(async/go (doseq [{:keys [file target status]} rename-items]
(when (not= status :unreachable)
(async/<! (p->c (page-handler/rename-file! file target (constantly nil) true)))))
(<close-modal-on-done sync?))]
<rename-all #(async/go
(doseq [{:keys [file target status]} rename-items]
(when (not= status :unreachable)
(async/<! (p->c (page-handler/rename-file! file target (constantly nil) true)))))
(<close-modal-on-done sync? rename-items))]
(if (not-empty rename-items)
[:div ;; Normal UX stage 2: close stage 1 UI, show the action description as admolition
@ -154,12 +167,7 @@
rename-fn #(page-handler/rename-file! file target rm-item-fn)
rename-but [:a {:on-click rename-fn
:title (t :file-rn/apply-rename)}
[:span (t :file-rn/rename src-file-name tgt-file-name)]]
rename-but-sm (ui/button
(t :file-rn/rename-sm)
:on-click rename-fn
:class "text-sm p-1 mr-1"
:style {:word-break "normal"})]
[:span (t :file-rn/rename src-file-name tgt-file-name)]]]
[:tr {:key (:block/name page)}
[:td [:div [:p "📄 " old-title]]
(case status
@ -168,6 +176,5 @@
[:p (t :file-rn/otherwise-breaking) " \"" changed-title \"]]
:unreachable
[:div [:p "🔴 " (t :file-rn/unreachable-title changed-title)]]
[:div [:p "🟢 " (t :file-rn/optional-rename) rename-but]])]
[:td rename-but-sm]]))]]]
[:div [:p "🟢 " (t :file-rn/optional-rename) rename-but]])]]))]]]
[:div "🎉 " (t :file-rn/no-action)]))]]))

View File

@ -76,9 +76,7 @@
(not (contains? #{"Date picker" "Template" "Deadline" "Scheduled" "Upload an image"} command))))]
(editor-handler/insert-command! id command-steps
format
{:restore? restore-slash?})
(state/pub-event! [:instrument {:type :editor/command-triggered
:payload {:command command}}]))))
{:restore? restore-slash?}))))
:class
"black"}))))

View File

@ -264,10 +264,13 @@
(storage/set :ui/file-sync-active-file-list? list-active?)))
[list-active?])
[:div.cp__file-sync-indicator-progress-pane
{:ref *el-ref
:class (when (and syncing? progressing?) "is-progress-active")}
(let [idle-&-no-active? (and idle? no-active-files?)]
(let [idle-&-no-active? (and idle? no-active-files?)
waiting? (not (or (not online?)
idle-&-no-active?
syncing?))]
[:div.cp__file-sync-indicator-progress-pane
{:ref *el-ref
:class (when (and syncing? progressing?) "is-progress-active")}
[:div.a
[:div.al
[:strong
@ -285,30 +288,31 @@
:else "Waiting..."
)]]
[:div.ar
(when queuing? (sync-now))]])
(when queuing? (sync-now))]]
[:div.b.dark:text-gray-200
[:div.bl
[:span.flex.items-center
(if no-active-files?
[:span.opacity-100.pr-1 "Successfully processed"]
[:span.opacity-60.pr-1 "Processed"])]
(when-not waiting?
[:div.b.dark:text-gray-200
[:div.bl
[:span.flex.items-center
(if no-active-files?
[:span.opacity-100.pr-1 "Successfully processed"]
[:span.opacity-60.pr-1 "Processed"])]
(first tip-b&p)]
(first tip-b&p)]
[:div.br
[:small.opacity-50
(when syncing?
(calc-time-left))]]]
[:div.br
[:small.opacity-50
(when syncing?
(calc-time-left))]]])
[:div.c
(second tip-b&p)
(when (or history-files? (not no-active-files?))
[:span.inline-flex.ml-1.active:opacity-50
{:on-click #(set-list-active? (not list-active?))}
(if list-active?
(ui/icon "chevron-up" {:style {:font-size 24}})
(ui/icon "chevron-left" {:style {:font-size 24}}))])]]))
[:div.c
(second tip-b&p)
(when (or history-files? (not no-active-files?))
[:span.inline-flex.ml-1.active:opacity-50
{:on-click #(set-list-active? (not list-active?))}
(if list-active?
(ui/icon "chevron-up" {:style {:font-size 24}})
(ui/icon "chevron-left" {:style {:font-size 24}}))])]])))
(defn- sort-files
[files]
@ -409,7 +413,6 @@
(str "status-of-" (and (keyword? status) (name status)))])}
(when (and (not config/publishing?)
(user-handler/logged-in?))
(ui/dropdown-with-links
;; trigger
(fn [{:keys [toggle-fn]}]
@ -428,21 +431,21 @@
(ui/icon "cloud-off" {:size ui/icon-size})]))
;; links
(cond-> []
(cond-> (vec
(when-not (and no-active-files? idle?)
(cond
need-password?
[{:title [:div.file-item.flex.items-center.leading-none.pt-3
{:style {:margin-left -8}}
(ui/icon "lock" {:size 20}) [:span.pl-1.font-semibold "Password is required"]]
:options {:on-click fs-sync/sync-need-password!}}]
;; head of upcoming sync
(not no-active-files?)
[{:title [:div.file-item.is-first ""]
:options {:class "is-first-placeholder"}}])))
synced-file-graph?
(concat
(when-not (and no-active-files? idle?)
(cond
need-password?
[{:title [:div.file-item
(ui/icon "lock") "Password is required"]
:options {:on-click fs-sync/sync-need-password!}}]
;; head of upcoming sync
(not no-active-files?)
[{:title [:div.file-item.is-first ""]
:options {:class "is-first-placeholder"}}]))
(map (fn [f] {:title [:div.file-item
{:key (str "downloading-" f)}
(gp-util/safe-decode-uri-component f)]

View File

@ -196,6 +196,10 @@
.ti {
@apply translate-y-0;
&.ls-icon-thumb-up {
@apply translate-x-[0.5px];
}
}
> .a {
@ -464,7 +468,7 @@
&.strength-wrap {
@apply flex-wrap;
}
.strength-item {
@apply whitespace-nowrap flex items-center leading-none opacity-60;

View File

@ -118,7 +118,7 @@
(t :open-a-directory)
:on-click #(state/pub-event! [:graph/setup-a-repo]))])]]
(when (seq remote-graphs)
(when (and (file-sync/enable-sync?) login?)
[:div
[:hr]
[:div.flex.align-items.justify-between
@ -194,7 +194,8 @@
[state]
(let [multiple-windows? (::electron-multiple-windows? state)
current-repo (state/sub :git/current-repo)
login? (boolean (state/sub :auth/id-token))]
login? (boolean (state/sub :auth/id-token))
remotes-loading? (state/sub [:file-sync/remote-graphs :loading])]
(when (or login? current-repo)
(let [repos (state/sub [:me :repos])
remotes (state/sub [:file-sync/remote-graphs :graphs])
@ -229,7 +230,13 @@
:modal-class (util/hiccup->class
"origin-top-right.absolute.left-0.mt-2.rounded-md.shadow-lg")}
(> (count repos) 1) ; show switch to if there are multiple repos
(assoc :links-header [:div.font-medium.text-sm.opacity-60.px-4.pt-2.pb-1
"Switch to:"]))]
(assoc :links-header [:div.font-medium.text-sm.opacity-70.px-4.pt-2.pb-1.flex.flex-row.justify-between.items-center
[:div "Switch to:"]
(when (and (file-sync/enable-sync?) login?)
(if remotes-loading?
(ui/loading "")
[:a.flex {:title "Refresh remote graphs"
:on-click file-sync/load-session-graphs}
(ui/icon "refresh")]))]))]
(when (seq repos)
(ui/dropdown-with-links render-content links links-header))))))

View File

@ -22,7 +22,8 @@
[frontend.context.i18n :refer [t]]
[frontend.date :as date]
[reitit.frontend.easy :as rfe]
[frontend.modules.shortcut.core :as shortcut]))
[frontend.modules.shortcut.core :as shortcut]
[frontend.util.text :as text-util]))
(defn highlight-exact-query
[content q]
@ -62,12 +63,43 @@
(conj result [:span content])))]
[:p {:class "m-0"} elements]))))))
(defn highlight-page-content-query
"Return hiccup of highlighted page content FTS result"
[content q]
(when-not (or (string/blank? content) (string/blank? q))
[:div (loop [content content ;; why recur? because there might be multiple matches
result []]
(let [[b-cut hl-cut e-cut] (text-util/cut-by content "$pfts_2lqh>$" "$<pfts_2lqh$")
hiccups-add [(when-not (string/blank? b-cut)
[:span b-cut])
(when-not (string/blank? hl-cut)
[:mark.p-0.rounded-none hl-cut])]
hiccups-add (remove nil? hiccups-add)
new-result (concat result hiccups-add)]
(if-not (string/blank? e-cut)
(recur e-cut new-result)
new-result)))]))
(rum/defc search-result-item
[icon content]
[:.search-result
(ui/type-icon icon)
[:.self-center content]])
(rum/defc page-content-search-result-item
[repo uuid format snippet q search-mode]
[:div
(when (not= search-mode :page)
[:div {:class "mb-1" :key "parents"}
(block/breadcrumb {:id "block-search-block-parent"
:block? true
:search? true}
repo
(clojure.core/uuid uuid)
{:indent? false})])
[:div {:class "font-medium" :key "content"}
(highlight-page-content-query (search-handler/sanity-search-content format snippet) q)]])
(rum/defc block-search-result-item
[repo uuid format content q search-mode]
(let [content (search-handler/sanity-search-content format content)]
@ -157,6 +189,21 @@
(println "[Error] Block page missing: "
{:block-id block-uuid
:block (db/pull [:block/uuid block-uuid])})))
:page-content
(let [page-uuid (uuid (:block/uuid data))
page (model/get-block-by-uuid page-uuid)
page-name (:block/name page)]
(if page
(cond
(model/whiteboard-page? page-name)
(route/redirect-to-whiteboard! page-name)
:else
(route/redirect-to-page! page-name))
;; search indice outdated
(println "[Error] page missing: "
{:page-uuid page-uuid
:page page})))
nil)
(state/close-modal!))
@ -172,6 +219,19 @@
repo
(:db/id page)
:page)))
:page-content
(let [page-uuid (uuid (:block/uuid data))
page (model/get-block-by-uuid page-uuid)]
(if page
(state/sidebar-add-block!
repo
(:db/id page)
:page)
;; search indice outdated
(println "[Error] page missing: "
{:page-uuid page-uuid
:page page})))
:block
(let [block-uuid (uuid (:block/uuid data))
@ -254,10 +314,24 @@
(do (log/error "search result with non-existing uuid: " data)
(str "Cache is outdated. Please click the 'Re-index' button in the graph's dropdown menu."))))])
:page-content
(let [{:block/keys [snippet uuid]} data ;; content here is normalized
repo (state/sub :git/current-repo)
page (model/query-block-by-uuid uuid) ;; it's actually a page
format (db/get-page-format page)]
[:span {:data-block-ref uuid}
(search-result-item {:name "page"
:title (t :search-item/page)
:extension? true}
(if page
(page-content-search-result-item repo uuid format snippet search-q search-mode)
(do (log/error "search result with non-existing uuid: " data)
(str "Cache is outdated. Please click the 'Re-index' button in the graph's dropdown menu."))))])
nil)]))
(rum/defc search-auto-complete
[{:keys [engine pages files blocks has-more?] :as result} search-q all?]
[{:keys [engine pages files pages-content blocks has-more?] :as result} search-q all?]
(let [pages (when-not all? (map (fn [page]
(let [alias (model/get-redirect-page-name page)]
(cond->
@ -270,6 +344,7 @@
(remove nil? pages)))
files (when-not all? (map (fn [file] {:type :file :data file}) files))
blocks (map (fn [block] {:type :block :data block}) blocks)
pages-content (map (fn [pages-content] {:type :page-content :data pages-content}) pages-content)
search-mode (state/sub :search/mode)
new-page (if (or
(some? engine)
@ -284,13 +359,13 @@
[{:type :new-page}]))
result (cond
config/publishing?
(concat pages files blocks)
(concat pages files blocks) ;; Browser doesn't have page content FTS
(= :whiteboard/link search-mode)
(concat pages blocks)
(concat pages blocks pages-content)
:else
(concat new-page pages files blocks))
(concat new-page pages files blocks pages-content))
result (if (= search-mode :graph)
[{:type :graph-add-filter}]
result)

View File

@ -18,7 +18,8 @@
(reset! state/publishing? publishing?)
(def test? false)
(goog-define TEST false)
(def test? TEST)
(goog-define ENABLE-FILE-SYNC-PRODUCTION false)
@ -66,11 +67,6 @@
"http://localhost:3000"
(util/format "https://%s.com" app-name)))
(def api
(if dev?
"http://localhost:3000/api/v1/"
(str website "/api/v1/")))
(def asset-domain (util/format "https://asset.%s.com"
app-name))
@ -112,7 +108,8 @@
([input] (extname-of-supported?
input
[image-formats doc-formats audio-formats
video-formats markup-formats html-render-formats]))
video-formats markup-formats html-render-formats
(gp-config/text-formats)]))
([input formats]
(when-let [input (some->
(cond-> input
@ -131,7 +128,7 @@
*** Warning!!! ***
For UX logic only! Don't use for FS logic
iPad / Android Pad doesn't trigger!
Same as config/mobile?"
(when-not util/node-test?
(util/safe-re-find #"Mobi" js/navigator.userAgent)))

View File

@ -38,13 +38,13 @@
[frontend.db.model
blocks-count blocks-count-cache clean-export! delete-blocks get-pre-block
delete-file-blocks! delete-page-blocks delete-files delete-pages-by-files
delete-files delete-pages-by-files
filter-only-public-pages-and-blocks get-all-block-contents get-all-tagged-pages
get-all-templates get-block-and-children get-block-by-uuid get-block-children sort-by-left
get-block-parent get-block-parents parents-collapsed? get-block-referenced-blocks get-all-referenced-blocks-uuid
get-block-children-ids get-block-immediate-children get-block-page
get-custom-css get-date-scheduled-or-deadlines
get-file-blocks get-file-last-modified-at get-file get-file-page get-file-page-id file-exists?
get-file-last-modified-at get-file get-file-page get-file-page-id file-exists?
get-files get-files-blocks get-files-full get-journals-length get-pages-with-file
get-latest-journals get-page get-page-alias get-page-alias-names get-paginated-blocks
get-page-blocks-count get-page-blocks-no-cache get-page-file get-page-format get-page-properties

View File

@ -212,17 +212,6 @@
(conn/get-db repo-url) pred)
db-utils/seq-flatten)))
(defn get-file-blocks
[repo-url path]
(-> (d/q '[:find ?block
:in $ ?path
:where
[?file :file/path ?path]
[?p :block/file ?file]
[?block :block/page ?p]]
(conn/get-db repo-url) path)
db-utils/seq-flatten))
(defn set-file-last-modified-at!
[repo path last-modified-at]
(when (and repo path last-modified-at)
@ -274,6 +263,7 @@
(db-utils/entity [:block/uuid (if (uuid? id) id (uuid id))]))
(defn query-block-by-uuid
"Return block or page entity, depends on the uuid"
[id]
(db-utils/pull [:block/uuid (if (uuid? id) id (uuid id))]))
@ -785,6 +775,8 @@
react)))))
(defn get-page-blocks-no-cache
"Return blocks of the designated page, without using cache.
page - name / title of the page"
([page]
(get-page-blocks-no-cache (state/get-current-repo) page nil))
([repo-url page]
@ -1528,6 +1520,7 @@
assets (get-assets datoms)]
[@(d/conn-from-datoms datoms db-schema/schema) assets]))))
;; Deprecated?
(defn delete-blocks
[repo-url files _delete-page?]
(when (seq files)
@ -1538,21 +1531,6 @@
[files]
(mapv (fn [path] [:db.fn/retractEntity [:file/path path]]) files))
(defn delete-file-blocks!
[repo-url path]
(let [blocks (get-file-blocks repo-url path)]
(mapv (fn [eid] [:db.fn/retractEntity eid]) blocks)))
(defn delete-page-blocks
[repo-url page]
(when page
(when-let [db (conn/get-db repo-url)]
(let [page (db-utils/pull [:block/name (util/page-name-sanity-lc page)])]
(when page
(let [datoms (d/datoms db :avet :block/page (:db/id page))
block-eids (mapv :e datoms)]
(mapv (fn [eid] [:db.fn/retractEntity eid]) block-eids)))))))
(defn delete-pages-by-files
[files]
(let [pages (->> (mapv get-file-page files)

View File

@ -137,7 +137,6 @@
:file-rn/all-action "Apply all Actions!"
:file-rn/select-format "(Developer Mode Option, Dangerous!) Select filename format"
:file-rn/rename "rename file \"{1}\" to \"{2}\""
:file-rn/rename-sm "Rename"
:file-rn/apply-rename "Apply the file rename operation"
:file-rn/affected-pages "Affected Pages after the format change"
:file-rn/suggest-rename "Action required: "
@ -1278,7 +1277,6 @@
:file-rn/otherwise-breaking "Ou le titre deviendra"
:file-rn/re-index "La réindexation est fortement recommandée après que les fichiers aient été renommés, puis sur les autres postes après synchronisation."
:file-rn/rename "renommer le fichier \"{1}\" en \"{2}\""
:file-rn/rename-sm "Renommer"
:file-rn/select-confirm-proceed "Dev: format d'écriture"
:file-rn/select-format "(Option du Mode Developpeur, Danger !) Sélectionnez le format de nom de fichier"
:file-rn/suggest-rename "Action requise : "
@ -1306,7 +1304,7 @@
:left-side-bar/new-whiteboard "Nouveau tableau blanc"
:linked-references/filter-search "Rechercher dans les pages liées"
:on-boarding/add-graph "Ajouter un graphe"
:on-boarding/demo-graph "Il s'agit d'un graphe de démo, les changements ne seront pas enregistrés à moins que vous n'ouvrir un dossier local."
:on-boarding/demo-graph "Il s'agit d'un graphe de démo, les changements ne seront pas enregistrés à moins que vous n'ouvriez un dossier local."
:on-boarding/new-graph-desc-1 "Logseq supporte à la fois le Markdown et l'Org-mode. Vous pouvez ouvrir un dossier existant ou en créer un nouveau sur cet appareil. Vos données seront enregistrées uniquement sur cet appareil."
:on-boarding/new-graph-desc-2 "Après avoir ouvert votre dossier, cela créera 3 sous-dossiers :"
:on-boarding/new-graph-desc-3 "/journals - contient vos pages du journal"
@ -1414,8 +1412,8 @@
:settings-page/enable-tooltip "Astuces"
:settings-page/enable-whiteboards "Tableaux blancs"
:settings-page/export-theme "Exporter le theme"
:settings-page/filename-format "Format de nm de fichier"
:settings-page/git-commit-delay "Délai (secondes) des commit Git automatiques"
:settings-page/filename-format "Format de nom de fichier"
:settings-page/git-commit-delay "Délai (secondes) des commits Git automatiques"
:settings-page/git-confirm "Vous devez redémarrer l'application après avoir mis à jour le dossier Git"
:settings-page/git-desc "est utilisé pour gérer les versions de pages, vous pouvez cliquer sur..."
:settings-page/git-switcher-label "Activer les commits Git automatiques"
@ -1426,9 +1424,9 @@
:settings-page/preferred-outdenting "Mise en retrait logique"
:settings-page/shortcut-settings "Personnaliser les raccourcis"
:settings-page/show-brackets "Montrer les parenthèses, crochets et accolades"
:settings-page/spell-checker "Vérification autographique"
:settings-page/spell-checker "Vérification orthographique"
:settings-page/sync "Synchronisation"
:settings-page/tab-advanced "Advancé"
:settings-page/tab-advanced "Avancé"
:settings-page/tab-assets "Pièces-jointes"
:settings-page/tab-editor "Éditeur"
:settings-page/tab-features "Fonctionnalités"
@ -1590,7 +1588,6 @@
:file-rn/all-action "应用所有操作!"
:file-rn/select-format "(开发者模式选项,危险!) 选择文件名格式"
:file-rn/rename "重命名文件 \"{1}\" 到 \"{2}\""
:file-rn/rename-sm "重命名"
:file-rn/apply-rename "应用文件重命名操作"
:file-rn/affected-pages "格式改变后,影响的文件"
:file-rn/suggest-rename "需要的操作: "
@ -2275,7 +2272,6 @@
:file-rn/all-action "¡Aplicar todas las acciones!"
:file-rn/select-format "(Opción modo desarrollador, ¡peligroso!) Seccione el formato de nombre de archivo"
:file-rn/rename "Renombrar \"{1}\" a \"{2}\""
:file-rn/rename-sm "Renombrar"
:file-rn/apply-rename "Aplicar la operación de cambio de nombre de archivo"
:file-rn/affected-pages "Páginas afectadas después del cambio de formato"
:file-rn/suggest-rename "Acción necesaria: "
@ -4625,7 +4621,6 @@
:file-rn/all-action "Tüm Eylemleri Uygula!"
:file-rn/select-format "(Geliştirici Modu Seçeneği, Tehlikeli!) Dosya adı biçimini seçin"
:file-rn/rename "\"{1}\" dosyasını \"{2}\" olarak yeniden adlandır"
:file-rn/rename-sm "Yeniden adlandır"
:file-rn/apply-rename "Dosya yeniden adlandırma işlemini uygula"
:file-rn/affected-pages "Biçim değişikliğinden sonra Etkilenen Sayfalar"
:file-rn/suggest-rename "Eylem gereklidir: "

View File

@ -519,6 +519,8 @@
[:span.nu.flex.items-center.opacity-70
[:input {:ref *page-ref
:type "number"
:min 1
:max total-page-num
:class (util/classnames [{:is-long (> (util/safe-parse-int current-page-num) 999)}])
:default-value current-page-num
:on-mouse-enter #(.select ^js (.-target %))
@ -553,4 +555,4 @@
viewer-theme
{:t t
:hide-settings! #(set-settings-visible! false)
:select-theme! #(set-viewer-theme! %)}))]))
:select-theme! #(set-viewer-theme! %)}))]))

View File

@ -82,13 +82,11 @@
(p/let [opts (assoc opts
:error-handler
(fn [error]
(state/pub-event! [:instrument {:type :write-file/failed
:payload {:fs (type fs-record)
:user-agent (when js/navigator js/navigator.userAgent)
:path path
:content-length (count content)
:error-str (str error)
:error error}}])))
(state/pub-event! [:capture-error {:error error
:payload {:type :write-file/failed
:fs (type fs-record)
:user-agent (when js/navigator js/navigator.userAgent)
:content-length (count content)}}])))
_ (protocol/write-file! (get-fs dir) repo dir path content opts)]
(when (= bfs-record fs-record)
(db/set-file-last-modified-at! repo (config/get-file-path repo path) (js/Date.))))

View File

@ -229,9 +229,9 @@
:else
(do
(state/pub-event! [:instrument {:type :error/ios-path-missing-slashes
;; respect user's privacy
:path (gp-util/safe-subs path 10)}])
(state/pub-event! [:capture-error {:error (js/Error. "ios path missing slashes")
:payload {:type :error/ios-path-missing-slashes
:path (gp-util/safe-subs (str path) 12)}}])
path))
path))

View File

@ -494,7 +494,8 @@
reserved-paths (filter f paths)]
(when (seq reserved-paths)
(let [paths (if path-string? reserved-paths (map -relative-path reserved-paths))]
(state/pub-event! [:ui/notify-outdated-filename-format paths])
(when (seq paths)
(state/pub-event! [:ui/notify-outdated-filename-format paths]))
(prn "Skipped uploading those file paths with reserved chars: " paths)))
(vec (remove f paths))))
@ -750,7 +751,6 @@
(when (some-> r first :path (not= filepath))
(-> r first :path)))))
(defn <local-file-not-exist?
[graph-uuid irsapi base-path filepath]
(go
@ -778,6 +778,22 @@
(declare <rsapi-cancel-all-requests)
(defn- build-local-file-metadatas
[this graph-uuid result]
(loop [[[path metadata] & others] (js->clj result)
result #{}]
(if-not (and path metadata)
;; finish
result
(let [normalized-path (path-normalize path)
encryptedFname (if (not= path normalized-path)
(first (<! (<encrypt-fnames this graph-uuid [normalized-path])))
(get metadata "encryptedFname"))]
(recur others
(conj result
(->FileMetadata (get metadata "size") (get metadata "md5") normalized-path
encryptedFname (get metadata "mtime") false nil)))))))
(deftype RSAPI [^:mutable graph-uuid' ^:mutable private-key' ^:mutable public-key']
IToken
(<get-token [_this]
@ -795,26 +811,17 @@
(set! private-key' private-key)
(set! public-key' public-key)
(p->c (ipc/ipc "set-env" graph-uuid (if prod? "prod" "dev") private-key public-key)))
(<get-local-all-files-meta [_ graph-uuid base-path]
(<get-local-all-files-meta [this graph-uuid base-path]
(go
(let [r (<! (<retry-rsapi #(p->c (ipc/ipc "get-local-all-files-meta" graph-uuid base-path))))]
(if (instance? ExceptionInfo r)
r
(->> r
js->clj
(map (fn [[path metadata]]
(->FileMetadata (get metadata "size") (get metadata "md5") (path-normalize path)
(get metadata "encryptedFname") (get metadata "mtime") false nil)))
set)))))
(<get-local-files-meta [_ graph-uuid base-path filepaths]
(build-local-file-metadatas this graph-uuid r)))))
(<get-local-files-meta [this graph-uuid base-path filepaths]
(go
(let [r (<! (<retry-rsapi #(p->c (ipc/ipc "get-local-files-meta" graph-uuid base-path filepaths))))]
(assert (not (instance? ExceptionInfo r)) "get-local-files-meta shouldn't return exception")
(->> r
js->clj
(map (fn [[path metadata]]
(->FileMetadata (get metadata "size") (get metadata "md5") (path-normalize path)
(get metadata "encryptedFname") (get metadata "mtime") false nil)))))))
(build-local-file-metadatas this graph-uuid r))))
(<rename-local-file [_ graph-uuid base-path from to]
(<retry-rsapi #(p->c (ipc/ipc "rename-local-file" graph-uuid base-path
(path-normalize from)
@ -887,36 +894,22 @@
:secretKey secret-key
:publicKey public-key}))))
(<get-local-all-files-meta [_ graph-uuid base-path]
(<get-local-all-files-meta [this graph-uuid base-path]
(go
(let [r (<! (p->c (.getLocalAllFilesMeta mobile-util/file-sync (clj->js {:graphUUID graph-uuid
:basePath base-path}))))]
(if (instance? ExceptionInfo r)
r
(->> (.-result r)
js->clj
(map (fn [[path metadata]]
(->FileMetadata (get metadata "size") (get metadata "md5")
;; return decoded path, keep it consistent with RSAPI
(path-normalize path)
(get metadata "encryptedFname") (get metadata "mtime") false nil)))
set)))))
(build-local-file-metadatas this graph-uuid (.-result r))))))
(<get-local-files-meta [_ graph-uuid base-path filepaths]
(<get-local-files-meta [this graph-uuid base-path filepaths]
(go
(let [r (<! (p->c (.getLocalFilesMeta mobile-util/file-sync
(clj->js {:graphUUID graph-uuid
:basePath base-path
:filePaths filepaths}))))]
(assert (not (instance? ExceptionInfo r)) "get-local-files-meta shouldn't return exception")
(->> (.-result r)
js->clj
(map (fn [[path metadata]]
(->FileMetadata (get metadata "size") (get metadata "md5")
;; return decoded path, keep it consistent with RSAPI
(path-normalize path)
(get metadata "encryptedFname") (get metadata "mtime") false nil)))
set))))
(build-local-file-metadatas this graph-uuid (.-result r)))))
(<rename-local-file [_ graph-uuid base-path from to]
(p->c (.renameLocalFile mobile-util/file-sync
@ -1175,10 +1168,7 @@
(loop [[raw-path & other-paths] raw-paths]
(when raw-path
(let [normalized-path (path-normalize raw-path)]
(when (and (not= normalized-path raw-path)
(get path->encrypted-path-map normalized-path))
;; raw-path is un-normalized path and there are related normalized version one,
;; then filter out this raw-path
(when (not= normalized-path raw-path)
(println :filter-files-with-unnormalized-path raw-path)
(conj! *encrypted-paths-to-drop (get path->encrypted-path-map raw-path))))
(recur other-paths)))
@ -1228,7 +1218,7 @@
(mapv
#(->FileMetadata (:size %)
(:checksum %)
(path-normalize (get encrypted-path->path-map (:encrypted-path %)))
(get encrypted-path->path-map (:encrypted-path %))
(:encrypted-path %)
(:last-modified %)
true nil)
@ -1370,9 +1360,12 @@
IRemoteControlAPI
(<delete-remote-files-control [this graph-uuid filepaths]
(user/<wrap-ensure-id&access-token
(let [current-txid (:TXId (<! (<get-remote-graph this nil graph-uuid)))
files (<! (<encrypt-fnames rsapi graph-uuid filepaths))]
(<! (.<request this "delete_files" {:GraphUUID graph-uuid :TXId current-txid :Files files}))))))
(let [partitioned-files (partition-all 20 (<! (<encrypt-fnames rsapi graph-uuid filepaths)))]
(loop [[files & others] partitioned-files]
(when files
(let [current-txid (:TXId (<! (<get-remote-graph this nil graph-uuid)))]
(<! (.<request this "delete_files" {:GraphUUID graph-uuid :TXId current-txid :Files files}))
(recur others))))))))
(comment
(declare remoteapi)
@ -1753,7 +1746,6 @@
(<! (<get-local-files-meta
rsapi (:current-syncing-graph-uuid sync-state) dir [path])))
checksum (and (coll? files-meta) (some-> files-meta first :etag))]
(println :files-watch (->FileChangeEvent type dir path stat checksum))
(>! local-changes-chan (->FileChangeEvent type dir path stat checksum))))))))))
(defn local-changes-revised-chan-builder
@ -1782,7 +1774,9 @@
;; add 2 simulated file-watcher events
(>! ch (->FileChangeEvent "unlink" repo-dir (:old-path rename-event*) nil nil))
(>! ch (->FileChangeEvent "add" repo-dir (:new-path rename-event*)
{:mtime (tc/to-long (t/now))} "fake-checksum"))
{:mtime (tc/to-long (t/now))
:size 1 ; add a fake size
} "fake-checksum"))
(recur))
local-change
(cond
@ -2023,11 +2017,6 @@
(chan 1))
(def full-sync-mult (async/mult full-sync-chan))
(def stop-sync-chan
"offer `true` to this chan will stop current `SyncManager`"
(chan 1))
(def stop-sync-mult (async/mult stop-sync-chan))
(def remote->local-sync-chan
"offer `true` to this chan will trigger a remote->local sync"
(chan 1))
@ -2659,7 +2648,7 @@
*txid ^:mutable state ^:mutable remote-change-chan ^:mutable *ws *stopped? *paused?
^:mutable ops-chan
;; control chans
private-full-sync-chan private-stop-sync-chan private-remote->local-sync-chan
private-full-sync-chan private-remote->local-sync-chan
private-remote->local-full-sync-chan private-pause-resume-chan]
Object
(schedule [this next-state args reason]
@ -2694,30 +2683,30 @@
(set! ratelimit-local-changes-chan (<ratelimit local->remote-syncer local-changes-revised-chan))
(setup-local->remote! local->remote-syncer)
(async/tap full-sync-mult private-full-sync-chan)
(async/tap stop-sync-mult private-stop-sync-chan)
(async/tap remote->local-sync-mult private-remote->local-sync-chan)
(async/tap remote->local-full-sync-mult private-remote->local-full-sync-chan)
(async/tap pause-resume-mult private-pause-resume-chan)
(go-loop []
(let [{:keys [stop remote->local remote->local-full-sync local->remote-full-sync local->remote resume pause]}
(let [{:keys [remote->local remote->local-full-sync local->remote-full-sync local->remote resume pause stop]}
(async/alt!
private-stop-sync-chan {:stop true}
private-remote->local-full-sync-chan {:remote->local-full-sync true}
private-remote->local-sync-chan {:remote->local true}
private-full-sync-chan {:local->remote-full-sync true}
private-pause-resume-chan ([v] (if v {:resume true} {:pause true}))
remote-change-chan ([v] (println "remote change:" v) {:remote->local v})
ratelimit-local-changes-chan ([v]
(let [rest-v (util/drain-chan ratelimit-local-changes-chan)
vs (cons v rest-v)]
(println "local changes:" vs)
{:local->remote vs}))
(if (nil? v)
{:stop true}
(let [rest-v (util/drain-chan ratelimit-local-changes-chan)
vs (cons v rest-v)]
(println "local changes:" vs)
{:local->remote vs})))
(timeout (* 20 60 1000)) {:local->remote-full-sync true}
:priority true)]
(cond
stop
(do (util/drain-chan ops-chan)
(>! ops-chan {:stop true}))
nil
remote->local-full-sync
(do (util/drain-chan ops-chan)
(>! ops-chan {:remote->local-full-sync true})
@ -2772,8 +2761,9 @@
:data {:graph-uuid graph-uuid
:epoch (tc/to-epoch (t/now))}})
(go-loop []
(let [{:keys [resume]} (<! ops-chan)]
(if resume
(let [{:keys [resume] :as result} (<! ops-chan)]
(cond
resume
(let [{:keys [remote->local remote->local-full-sync local->remote local->remote-full-sync] :as resume-state}
(get @*resume-state graph-uuid)]
(resume-state--reset graph-uuid)
@ -2795,6 +2785,11 @@
:resume-state resume-state
:epoch (tc/to-epoch (t/now))}})
(<! (.schedule this ::idle nil :resume)))
(nil? result)
(<! (.schedule this ::stop nil nil))
:else
(recur)))))
(idle [this]
@ -2802,7 +2797,7 @@
(let [{:keys [stop remote->local local->remote local->remote-full-sync remote->local-full-sync pause] :as result}
(<! ops-chan)]
(cond
stop
(or stop (nil? result))
(<! (.schedule this ::stop nil nil))
remote->local
(<! (.schedule this ::remote->local {:remote remote->local} {:remote-changed remote->local}))
@ -2816,10 +2811,11 @@
(<! (.schedule this ::pause nil nil))
:else
(do
(state/pub-event! [:instrument {:type :sync/wrong-ops-chan-when-idle
:payload {:ops-chan-result result
:user-id user-uuid
:graph-id graph-uuid}}])
(state/pub-event! [:capture-error {:error (js/Error. "sync/wrong-ops-chan-when-idle")
:payload {:type :sync/wrong-ops-chan-when-idle
:ops-chan-result result
:user-id user-uuid
:graph-id graph-uuid}}])
nil)))))
(full-sync [this]
@ -2849,11 +2845,11 @@
(.schedule this ::stop nil nil)
unknown
(do
(state/pub-event! [:instrument {:type :sync/unknown
:payload {:error unknown
:event :local->remote-full-sync-failed
:user-id user-uuid
:graph-uuid graph-uuid}}])
(state/pub-event! [:capture-error {:error unknown
:payload {:type :sync/unknown
:event :local->remote-full-sync-failed
:user-id user-uuid
:graph-uuid graph-uuid}}])
(put-sync-event! {:event :local->remote-full-sync-failed
:data {:graph-uuid graph-uuid
:epoch (tc/to-epoch (t/now))}})
@ -2877,18 +2873,18 @@
(.schedule this ::pause nil nil))
unknown
(do
(state/pub-event! [:instrument {:type :sync/unknown
:payload {:event :remote->local-full-sync-failed
:graph-uuid graph-uuid
:user-id user-uuid
:error unknown}}])
(state/pub-event! [:capture-error {:error unknown
:payload {:event :remote->local-full-sync-failed
:type :sync/unknown
:graph-uuid graph-uuid
:user-id user-uuid}}])
(put-sync-event! {:event :remote->local-full-sync-failed
:data {:graph-uuid graph-uuid
:exp unknown
:epoch (tc/to-epoch (t/now))}})
(let [next-state (if (string/includes? (str (ex-cause unknown)) "404 Not Found")
;; TODO: this should never happen
::pause
::stop
;; if any other exception occurred, re-exec remote->local-full-sync
::remote->local-full-sync)]
(.schedule this next-state nil nil)))))))
@ -2922,11 +2918,11 @@
(.schedule this ::pause nil nil))
unknown
(do (prn "remote->local err" unknown)
(state/pub-event! [:instrument {:type :sync/unknown
:payload {:event :remote->local
:user-id user-uuid
:graph-uuid graph-uuid
:error unknown}}])
(state/pub-event! [:capture-error {:error unknown
:payload {:type :sync/unknown
:event :remote->local
:user-id user-uuid
:graph-uuid graph-uuid}}])
(.schedule this ::idle nil nil)))))))
(local->remote [this {local-changes :local}]
@ -2986,11 +2982,11 @@
unknown
(do
(debug/pprint "local->remote" unknown)
(state/pub-event! [:instrument {:type :sync/unknown
:payload {:event :local->remote
:user-id user-uuid
:graph-uuid graph-uuid
:error unknown}}])
(state/pub-event! [:capture-error {:error unknown
:payload {:event :local->remote
:type :sync/unknown
:user-id user-uuid
:graph-uuid graph-uuid}}])
(.schedule this ::idle nil nil))))))
IStoppable
(-stop! [_]
@ -2998,9 +2994,7 @@
(when-not @*stopped?
(vreset! *stopped? true)
(ws-stop! *ws)
(offer! private-stop-sync-chan true)
(async/untap full-sync-mult private-full-sync-chan)
(async/untap stop-sync-mult private-stop-sync-chan)
(async/untap remote->local-sync-mult private-remote->local-sync-chan)
(async/untap remote->local-full-sync-mult private-remote->local-full-sync-chan)
(async/untap pause-resume-mult private-pause-resume-chan)
@ -3008,14 +3002,9 @@
(stop-local->remote! local->remote-syncer)
(stop-remote->local! remote->local-syncer)
(<! (<rsapi-cancel-all-requests))
(debug/pprint ["stop sync-manager, graph-uuid" graph-uuid "base-path" base-path])
(swap! *sync-state sync-state--update-state ::stop)
(loop []
(if (not= ::stop state)
(do
(<! (timeout 100))
(recur))
(reset! current-sm-graph-uuid nil))))))
(reset! current-sm-graph-uuid nil)
(debug/pprint ["stop sync-manager, graph-uuid" graph-uuid "base-path" base-path]))))
IStopped?
(-stopped? [_]
@ -3041,7 +3030,7 @@
(.set-local->remote-syncer! remote->local-syncer local->remote-syncer)
(swap! *sync-state sync-state--update-current-syncing-graph-uuid graph-uuid)
(->SyncManager user-uuid graph-uuid base-path *sync-state local->remote-syncer remote->local-syncer remoteapi-with-stop
nil *txid nil nil nil *stopped? *paused? nil (chan 1) (chan 1) (chan 1) (chan 1) (chan 1))))
nil *txid nil nil nil *stopped? *paused? nil (chan 1) (chan 1) (chan 1) (chan 1))))
(defn sync-manager-singleton
[user-uuid graph-uuid base-path repo txid *sync-state]
@ -3128,50 +3117,51 @@
(defn <sync-start
[]
(go
(when (false? @*sync-entered?)
(reset! *sync-entered? true)
(let [*sync-state (atom (sync-state))
current-user-uuid (<! (user/<user-uuid))
;; put @graph-uuid & get-current-repo together,
;; prevent to get older repo dir and current graph-uuid.
_ (<! (p->c (persist-var/-load graphs-txid)))
[user-uuid graph-uuid txid] @graphs-txid
txid (or txid 0)
repo (state/get-current-repo)]
(when-not (instance? ExceptionInfo current-user-uuid)
(when (and repo
@network-online-cursor
user-uuid graph-uuid txid
(graph-sync-off? graph-uuid)
(user/logged-in?)
(not (config/demo-graph? repo)))
(try
(when-let [sm (sync-manager-singleton current-user-uuid graph-uuid
(config/get-repo-dir repo) repo
txid *sync-state)]
(when (check-graph-belong-to-current-user current-user-uuid user-uuid)
(if-not (<! (<check-remote-graph-exists graph-uuid)) ; remote graph has been deleted
(clear-graphs-txid! repo)
(do
(state/set-file-sync-state graph-uuid @*sync-state)
(state/set-file-sync-manager graph-uuid sm)
(when-not (false? (state/enable-sync?))
(go
(when (false? @*sync-entered?)
(reset! *sync-entered? true)
(let [*sync-state (atom (sync-state))
current-user-uuid (<! (user/<user-uuid))
;; put @graph-uuid & get-current-repo together,
;; prevent to get older repo dir and current graph-uuid.
_ (<! (p->c (persist-var/-load graphs-txid)))
[user-uuid graph-uuid txid] @graphs-txid
txid (or txid 0)
repo (state/get-current-repo)]
(when-not (instance? ExceptionInfo current-user-uuid)
(when (and repo
@network-online-cursor
user-uuid graph-uuid txid
(graph-sync-off? graph-uuid)
(user/logged-in?)
(not (config/demo-graph? repo)))
(try
(when-let [sm (sync-manager-singleton current-user-uuid graph-uuid
(config/get-repo-dir repo) repo
txid *sync-state)]
(when (check-graph-belong-to-current-user current-user-uuid user-uuid)
(if-not (<! (<check-remote-graph-exists graph-uuid)) ; remote graph has been deleted
(clear-graphs-txid! repo)
(do
(state/set-file-sync-state graph-uuid @*sync-state)
(state/set-file-sync-manager graph-uuid sm)
;; update global state when *sync-state changes
(add-watch *sync-state ::update-global-state
(fn [_ _ _ n]
(state/set-file-sync-state graph-uuid n)))
;; update global state when *sync-state changes
(add-watch *sync-state ::update-global-state
(fn [_ _ _ n]
(state/set-file-sync-state graph-uuid n)))
(state/set-state! [:file-sync/graph-state :current-graph-uuid] graph-uuid)
(state/set-state! [:file-sync/graph-state :current-graph-uuid] graph-uuid)
(.start sm)
(.start sm)
(offer! remote->local-full-sync-chan true)
(offer! full-sync-chan true)))))
(catch :default e
(prn "Sync start error: ")
(log/error :exception e)))))
(reset! *sync-entered? false)))))
(offer! remote->local-full-sync-chan true)
(offer! full-sync-chan true)))))
(catch :default e
(prn "Sync start error: ")
(log/error :exception e)))))
(reset! *sync-entered? false))))))
(defn- restart-if-stopped!
[is-active?]

View File

@ -73,11 +73,7 @@
(defn- instrument!
[]
(let [total (srs/get-srs-cards-total)]
(state/set-state! :srs/cards-due-count total)
(state/pub-event! [:instrument {:type :flashcards/count
:payload {:total (or total 0)}}])
(state/pub-event! [:instrument {:type :blocks/count
:payload {:total (db/blocks-count)}}])))
(state/set-state! :srs/cards-due-count total)))
(defn restore-and-setup!
[repos]

View File

@ -19,20 +19,20 @@
(when (not= file current-file)
current-file))))
(defn- get-delete-blocks [repo-url first-page file]
(let [delete-blocks (->
(concat
(db/delete-file-blocks! repo-url file)
(when first-page (db/delete-page-blocks repo-url (:block/name first-page))))
(distinct))]
(when-let [current-file (page-exists-in-another-file repo-url first-page file)]
(when (not= file current-file)
(let [error (str "Page already exists with another file: " current-file ", current file: " file ". Please keep only one of them and re-index your graph.")]
(state/pub-event! [:notification/show
{:content error
:status :error
:clear? false}]))))
delete-blocks))
(defn- validate-existing-file
[repo-url file-page file-path]
(when-let [current-file (page-exists-in-another-file repo-url file-page file-path)]
(when (not= file-path current-file)
(let [error (str "Page already exists with another file: " current-file ", current file: " file-path ". Please keep only one of them and re-index your graph.")]
(state/pub-event! [:notification/show
{:content error
:status :error
:clear? false}])))))
(defn- validate-and-get-blocks-to-delete
[repo-url db file-page file-path retain-uuid-blocks]
(validate-existing-file repo-url file-page file-path)
(graph-parser/get-blocks-to-delete db file-page file-path retain-uuid-blocks))
(defn reset-file!
"Main fn for updating a db with the results of a parsed file"
@ -62,7 +62,7 @@
new? (nil? (db/entity [:file/path file]))
options (merge (dissoc options :verbose)
{:new? new?
:delete-blocks-fn (partial get-delete-blocks repo-url)
:delete-blocks-fn (partial validate-and-get-blocks-to-delete repo-url)
:extract-options (merge
{:user-config (state/get-config)
:date-formatter (state/get-date-formatter)

View File

@ -15,10 +15,10 @@
(set-config! repo :file/name-format format))
(defn- calc-current-name
"If the file body is parsed as the same page name, but the page name has a
different file sanitization result under the current sanitization form, return
"If the file body is parsed as the same page name, but the page name has a
different file sanitization result under the current sanitization form, return
the new file name.
Return:
Return:
the file name for the page name under the current file naming rules, or `nil`
if no change of path happens"
[format file-body prop-title]
@ -33,7 +33,7 @@
(defn- calc-previous-name
"We want to recover user's title back under new file name sanity rules.
Return:
Return:
the file name for that page name under the current file naming rules,
and the new title if no action applied, or `nil` if no break change happens"
[old-format new-format file-body]
@ -72,7 +72,7 @@
[old-format new-format file-body prop-title]
;; dont rename journal page. officially it's stored as `yyyy_mm_dd`
;; If it's a journal file imported with custom :journal/page-title-format,
;; and it includes reserved characters, format config change / file renaming is required.
;; and it includes reserved characters, format config change / file renaming is required.
;; It's about user's own data management decision and should be handled
;; by user manually.
;; the 'expected' title of the user when updating from the previous format, or title will be broken in new format
@ -88,7 +88,7 @@
ret)))
(defn calc-rename-target
"Return the renaming status and new file body to recover the original title of the file in previous version.
"Return the renaming status and new file body to recover the original title of the file in previous version.
The return title should be the same as the title in the index file in the previous version.
return nil if no rename is needed.
page: the page entity
@ -96,6 +96,7 @@
old-format, new-format: the filename formats
Return:
{:status :informal | :breaking | :unreachable
:file-name original file name
:target the new file name
:old-title the old title
:changed-title the new title} | nil"
@ -113,6 +114,7 @@
manual-prop-title?
(fs-util/include-reserved-chars? file-body))
{:status :informal
:file-name file-body
:target (fs-util/file-name-sanity file-body new-format)
:old-title prop-title
:changed-title prop-title})))

View File

@ -17,13 +17,11 @@
[frontend.handler.block :as block-handler]
[frontend.handler.common :as common-handler]
[frontend.handler.export :as export]
[frontend.handler.image :as image-handler]
[frontend.handler.notification :as notification]
[frontend.handler.repeated :as repeated]
[frontend.handler.route :as route-handler]
[frontend.handler.assets :as assets-handler]
[frontend.idb :as idb]
[frontend.image :as image]
[frontend.mobile.util :as mobile-util]
[frontend.modules.outliner.core :as outliner-core]
[frontend.modules.outliner.transaction :as outliner-tx]
@ -1322,9 +1320,10 @@
(defn get-asset-file-link
[format url file-name image?]
(let [pdf? (and url (string/ends-with? (string/lower-case url) ".pdf"))]
(let [pdf? (and url (string/ends-with? (string/lower-case url) ".pdf"))
video? (and url (util/ext-of-video? url))]
(case (keyword format)
:markdown (util/format (str (when (or image? pdf?) "!") "[%s](%s)") file-name url)
:markdown (util/format (str (when (or image? video? pdf?) "!") "[%s](%s)") file-name url)
:org (if image?
(util/format "[[%s]]" url)
(util/format "[[%s][%s]]" url file-name))
@ -1464,7 +1463,7 @@
[id ^js files format uploading? drop-or-paste?]
(let [repo (state/get-current-repo)
block (state/get-edit-block)]
(if (config/local-db? repo)
(when (config/local-db? repo)
(-> (save-assets! block repo (js->clj files))
(p/then
(fn [res]
@ -1487,28 +1486,7 @@
(fn []
(reset! uploading? false)
(reset! *asset-uploading? false)
(reset! *asset-uploading-process 0))))
(image/upload
files
(fn [file file-name file-type]
(image-handler/request-presigned-url
file file-name file-type
uploading?
(fn [signed-url]
(insert-command! id
(get-asset-file-link format signed-url file-name true)
format
{:last-pattern (if drop-or-paste? "" (state/get-editor-command-trigger))
:restore? true})
(reset! *asset-uploading? false)
(reset! *asset-uploading-process 0))
(fn [e]
(let [process (* (/ (gobj/get e "loaded")
(gobj/get e "total"))
100)]
(reset! *asset-uploading? false)
(reset! *asset-uploading-process process)))))))))
(reset! *asset-uploading-process 0)))))))
;; Editor should track some useful information, like editor modes.
;; For example:

View File

@ -55,12 +55,14 @@
[frontend.components.file-sync :as file-sync]
[frontend.components.encryption :as encryption]
[frontend.components.conversion :as conversion-component]
[frontend.components.whiteboard :as whiteboard]
[goog.dom :as gdom]
[logseq.db.schema :as db-schema]
[promesa.core :as p]
[rum.core :as rum]
[logseq.graph-parser.config :as gp-config]
[frontend.components.whiteboard :as whiteboard]))
[cljs-bean.core :as bean]
["@sentry/react" :as Sentry]))
;; TODO: should we move all events here?
@ -75,7 +77,7 @@
(async/go (async/<! (p->c (persist-var/load-vars)))
(async/<! (sync/<sync-stop))))
(defmethod handle :user/login [[_]]
(defmethod handle :user/fetch-info-and-graphs [[_]]
(state/set-state! [:ui/loading? :login] false)
(async/go
(let [result (async/<! (sync/<user-info sync/remoteapi))]
@ -87,7 +89,8 @@
(state/set-state! :user/info result)
(let [status (if (user-handler/alpha-or-beta-user?) :welcome :unavailable)]
(when (and (= status :welcome) (user-handler/logged-in?))
(file-sync-handler/set-sync-enabled! true)
(when-not (false? (state/enable-sync?)) ; user turns it off
(file-sync-handler/set-sync-enabled! true))
(async/<! (file-sync-handler/load-session-graphs))
(p/let [repos (repo-handler/refresh-repos!)]
(when-let [repo (state/get-current-repo)]
@ -352,7 +355,8 @@
(state/set-modal! #(git-component/file-specific-version path hash content)))
;; Hook on a graph is ready to be shown to the user.
;; It's different from :graph/resotred, as :graph/restored is for window reloaded
;; It's different from :graph/restored, as :graph/restored is for window reloaded
;; FIXME: config may not be loaded when the graph is ready.
(defmethod handle :graph/ready
[[_ repo]]
(when (config/local-db? repo)
@ -362,10 +366,15 @@
(state/pub-event! [:graph/dir-gone dir]))))
;; FIXME: an ugly implementation for redirecting to page on new window is restored
(repo-handler/graph-ready! repo)
(when (and (util/electron?)
(not (config/demo-graph?))
(= :legacy (state/get-filename-format)))
(state/pub-event! [:ui/notify-outdated-filename-format []])))
(when-not config/test?
(js/setTimeout
(fn []
(let [filename-format (state/get-filename-format repo)]
(when (and (util/electron?)
(not (config/demo-graph?))
(not= filename-format :triple-lowbar))
(state/pub-event! [:ui/notify-outdated-filename-format []]))))
3000)))
(defmethod handle :notification/show [[_ {:keys [content status clear?]}]]
(notification/show! content status clear?))
@ -410,6 +419,10 @@
(js/console.error "instrument data-map should only contains [:type :payload]"))
(posthog/capture type payload))
(defmethod handle :capture-error [[_ {:keys [error payload]}]]
(Sentry/captureException error
(bean/->js {:extra payload})))
(defmethod handle :exec-plugin-cmd [[_ {:keys [pid cmd action]}]]
(commands/exec-plugin-simple-command! pid cmd action))
@ -456,7 +469,9 @@
(when-let [left-sidebar-node (gdom/getElement "left-sidebar")]
(set! (.. left-sidebar-node -style -bottom) "0px"))
(when-let [right-sidebar-node (gdom/getElementByClass "sidebar-item-list")]
(set! (.. right-sidebar-node -style -paddingBottom) "150px")))))
(set! (.. right-sidebar-node -style -paddingBottom) "150px"))
(when-let [toolbar (.querySelector main-node "#mobile-editor-toolbar")]
(set! (.. toolbar -style -bottom) 0)))))
(defn update-file-path [deprecated-repo current-repo deprecated-app-id current-app-id]
(let [files (db-model/get-files-entity deprecated-repo)
@ -764,13 +779,13 @@
"We suggest you upgrade now to avoid potential bugs."]
(when (seq paths)
[:p
"For example, the files below have reserved characters that can't be synced on some platforms."])]
]
"For example, the files below have reserved characters that can't be synced on some platforms."])]]
(ui/button
"Update filename format"
:on-click (fn []
(notification/clear-all!)
(state/set-modal!
"Update filename format"
:aria-label "Update filename format"
:on-click (fn []
(notification/clear-all!)
(state/set-modal!
(fn [_] (conversion-component/files-breaking-changed))
{:id :filename-format-panel :center? true})))
(when (seq paths)
@ -868,8 +883,8 @@
:else
(do
(state/pub-event! [:instrument {:type :file/parse-and-load-error
:payload error}])
(state/pub-event! [:capture-error {:error error
:payload {:type :file/parse-and-load-error}}])
[:li.my-1 {:key file}
[:a {:on-click #(js/window.apis.openPath file)} file]
[:p (.-message error)]]))))]
@ -886,9 +901,9 @@
(catch :default error
(let [type :handle-system-events/failed]
(js/console.error (str type) (clj->js payload) "\n" error)
(state/pub-event! [:instrument {:type type
:payload payload
:error error}])))))
(state/pub-event! [:capture-error {:error error
:payload {:type type
:payload payload}}])))))
(recur))
chan))

View File

@ -149,11 +149,9 @@
(println "Write file failed, path: " path ", content: " content)
(log/error :write/failed error)
(state/pub-event! [:instrument {:type :write-file/failed-for-alter-file
:payload {:path path
:content-length (count content)
:error-str (str error)
:error error}}])))
(state/pub-event! [:capture-error
{:error error
:payload {:type :write-file/failed-for-alter-file}}])))
result))
(defn set-file-content!
@ -178,11 +176,9 @@
(str error))
:status :error
:clear? false}])
(state/pub-event! [:instrument {:type :write-file/failed
:payload {:path path
:content-length (count content)
:error-str (str error)
:error error}}])
(state/pub-event! [:capture-error
{:error error
:payload {:type :write-file/failed}}])
(log/error :write-file/failed {:path path
:content content
:error error})))))))

View File

@ -2,7 +2,6 @@
(:require [clojure.string :as string]
[frontend.config :as config]
[frontend.fs :as fs]
[frontend.handler.notification :as notification]
[frontend.image :as image]
[frontend.state :as state]
[frontend.util :as util]
@ -51,46 +50,3 @@
(js/console.dir error))))))
(catch :default _e
nil))))
(defn request-presigned-url
[file filename mime-type uploading? url-handler on-processing]
(cond
(> (gobj/get file "size") (* 12 1024 1024))
(notification/show! [:p "Sorry, we don't support any file that's larger than 12MB."] :error)
:else
(do
(reset! uploading? true)
;; start uploading?
(util/post (str config/api "presigned_url")
{:filename filename
:mime-type mime-type}
(fn [{:keys [presigned-url s3-object-key] :as resp}]
(if presigned-url
(util/upload presigned-url
file
(fn [_result]
;; request cdn signed url
(util/post (str config/api "signed_url")
{:s3-object-key s3-object-key}
(fn [{:keys [signed-url]}]
(reset! uploading? false)
(if signed-url
(url-handler signed-url)
(prn "Something error, can't get a valid signed url.")))
(fn [_error]
(reset! uploading? false)
(prn "Something error, can't get a valid signed url."))))
(fn [error]
(reset! uploading? false)
(prn "upload failed.")
(js/console.dir error))
(fn [e]
(on-processing e)))
;; TODO: notification, or re-try
(do
(reset! uploading? false)
(prn "failed to get any presigned url, resp: " resp))))
(fn [_error]
;; (prn "Get token failed, error: " error)
(reset! uploading? false))))))

View File

@ -424,9 +424,9 @@
(on-success)))
(p/catch (fn [error]
(js/console.error error)
(state/pub-event! [:instrument {:type :db/persist-failed
:payload {:error-str (str error)
:error error}}])
(state/pub-event! [:capture-error
{:error error
:payload {:type :db/persist-failed}}])
(when on-error
(on-error)))))))

View File

@ -44,12 +44,14 @@
(:db/id (db/entity repo [:block/name (util/page-name-sanity-lc page-db-id)]))
page-db-id)
opts (if page-db-id (assoc opts :page (str page-db-id)) opts)]
(p/let [blocks (search/block-search repo q opts)]
(p/let [blocks (search/block-search repo q opts)
pages-content (search/page-content-search repo q opts)]
(let [result (merge
{:blocks blocks
:has-more? (= limit (count blocks))}
(when-not page-db-id
{:pages (search/page-search q)
{:pages-content pages-content
:pages (search/page-search q)
:files (search/file-search q)}))
search-key (if more? :search/more-result :search/result)]
(swap! state/state assoc search-key result)

View File

@ -133,24 +133,15 @@
(set-tokens! (:id_token (:body resp)) (:access_token (:body resp)))))))))
(defn restore-tokens-from-localstorage
"Restore id-token, access-token, refresh-token from localstorage,
and refresh id-token&access-token if necessary.
return nil when tokens are not available."
"Refresh id-token&access-token, pull latest repos, returns nil when tokens are not available."
[]
(println "restore-tokens-from-localstorage")
(let [id-token (js/localStorage.getItem "id-token")
access-token (js/localStorage.getItem "access-token")
refresh-token (js/localStorage.getItem "refresh-token")]
(let [refresh-token (js/localStorage.getItem "refresh-token")]
(when refresh-token
(set-tokens! id-token access-token refresh-token)
(when-not (or (nil? id-token) (nil? access-token)
(-> id-token parse-jwt almost-expired?)
(-> access-token parse-jwt almost-expired?))
(go
;; id-token or access-token expired
(<! (<refresh-id-token&access-token))
;; refresh remote graph list by pub login event
(when (user-uuid) (state/pub-event! [:user/login])))))))
(go
(<! (<refresh-id-token&access-token))
;; refresh remote graph list by pub login event
(when (user-uuid) (state/pub-event! [:user/fetch-info-and-graphs]))))))
(defn login-callback [code]
(state/set-state! [:ui/loading? :login] true)
@ -161,7 +152,7 @@
(-> resp
:body
(as-> $ (set-tokens! (:id_token $) (:access_token $) (:refresh_token $)))
(#(state/pub-event! [:user/login])))
(#(state/pub-event! [:user/fetch-info-and-graphs])))
(debug/pprint "login-callback" resp)))))
(defn logout []

View File

@ -1,8 +1,6 @@
(ns frontend.image
"Image related utility fns"
(:require ["/frontend/exif" :as exif]
[clojure.string :as string]
[frontend.date :as date]
[goog.object :as gobj]))
(defn reverse?
@ -70,19 +68,3 @@
(.createObjectURL (or (.-URL js/window)
(.-webkitURL js/window))
file))
;; (defn build-image
;; []
;; (let [img (js/Image.)]
;; ))
(defn upload
[files file-handler & {:keys [files-limit]
:or {files-limit 1}}]
(doseq [file (take files-limit (array-seq files))]
(let [file-type (gobj/get file "type")
ymd (->> (vals (date/year-month-day-padded))
(string/join "_"))
file-name (str ymd "_" (gobj/get file "name"))]
(when (= 0 (.indexOf file-type "image/"))
(file-handler file file-name file-type)))))

View File

@ -13,6 +13,8 @@
nil)))
(defn get-entity-from-db-after-or-before
"Get the entity from db after if possible; otherwise get entity from db before
Useful for fetching deleted elements"
[db-before db-after db-id]
(let [r (safe-pull db-after '[*] db-id)]
(if (= keys-of-deleted-entity (count r))
@ -21,6 +23,7 @@
r)))
(defn get-blocks-and-pages
"Calculate updated blocks and pages based on the db-before and db-after from tx-report"
[{:keys [db-before db-after tx-data tx-meta]}]
(let [updated-db-ids (-> (mapv first tx-data) (set))
result (reduce
@ -39,6 +42,7 @@
{:blocks #{}
:pages #{}}
updated-db-ids)
;; updated pages logged in tx-meta (usually from move op)
tx-meta-pages (->> [(:from-page tx-meta) (:target-page tx-meta)]
(remove nil?)
(map #(get-entity-from-db-after-or-before db-before db-after %))

View File

@ -3,8 +3,6 @@
[frontend.util :as util]
[frontend.config :as config]
["@sentry/react" :as Sentry]
["@sentry/tracing" :refer [BrowserTracing]]
["posthog-js" :as posthog]
[frontend.mobile.util :as mobile-util]))
(def config
@ -21,8 +19,8 @@
(mobile-util/native-platform?) "mobile"
:else "web")
:publishing config/publishing?}}
:integrations [(new posthog/SentryIntegration posthog "logseq" 5311485)
(new BrowserTracing)]
;; :integrations [(new posthog/SentryIntegration posthog "logseq" 5311485)
;; (new BrowserTracing)]
:debug config/dev?
:tracesSampleRate 1.0
:beforeSend (fn [^js event]

View File

@ -511,11 +511,12 @@
tx (insert-blocks-aux blocks' target-block' insert-opts)]
(if (some (fn [b] (or (nil? (:block/parent b)) (nil? (:block/left b)))) tx)
(do
(state/pub-event! [:instrument {:type :outliner/invalid-structure
:payload {:blocks blocks
:target-block target-block'
:opt opts
:data (mapv #(dissoc % :block/content) tx)}}])
(state/pub-event! [:capture-error {:error "Outliner invalid structure"
:payload {:type :outliner/invalid-structure
:blocks blocks
:target-block target-block'
:opt opts
:data (mapv #(dissoc % :block/content) tx)}}])
(throw (ex-info "Invalid outliner data"
{:opts insert-opts
:tx (vec tx)

View File

@ -14,7 +14,9 @@
[frontend.util :as util]
[frontend.util.property :as property]
[goog.object :as gobj]
[promesa.core :as p]))
[promesa.core :as p]
[clojure.set :as set]
[frontend.modules.datascript-report.core :as db-report]))
(defn get-engine
[repo]
@ -95,11 +97,24 @@
(when-not (string/blank? q)
(protocol/query engine q option)))))
(defn page-content-search
[repo q option]
(when-let [engine (get-engine repo)]
(let [q (util/search-normalize q (state/enable-search-remove-accents?))
q (if (util/electron?) q (escape-str q))]
(when-not (string/blank? q)
(protocol/query-page engine q option)))))
(defn- transact-blocks!
[repo data]
(when-let [engine (get-engine repo)]
(protocol/transact-blocks! engine data)))
(defn- transact-pages!
[repo data]
(when-let [engine (get-engine repo)]
(protocol/transact-pages! engine data)))
(defn exact-matched?
"Check if two strings points toward same search result"
[q match]
@ -124,7 +139,7 @@
q (clean-str q)]
(when-not (string/blank? q)
(let [indice (or (get-in @indices [repo :pages])
(search-db/make-pages-indice!))
(search-db/make-pages-title-indice!))
result (->> (.search indice q (clj->js {:limit limit}))
(bean/->clj))]
;; TODO: add indexes for highlights
@ -191,8 +206,48 @@
(let [result (fuzzy-search result q :limit limit)]
(vec result))))))))
(defn sync-search-indice!
[repo tx-report]
(defn- get-pages-from-datoms-impl
[pages]
(let [pages-result (db/pull-many '[:db/id :block/name :block/original-name] (set (map :e pages)))
pages-to-add-set (->> (filter :added pages)
(map :e)
(set))
pages-to-add (->> (filter (fn [page]
(contains? pages-to-add-set (:db/id page))) pages-result)
(map (fn [p] (or (:block/original-name p)
(:block/name p))))
(map search-db/original-page-name->index))
pages-to-remove-set (->> (remove :added pages)
(map :v))
pages-to-remove-id-set (->> (remove :added pages)
(map :e)
set)]
{:pages-to-add pages-to-add
:pages-to-remove-set pages-to-remove-set
:pages-to-add-id-set pages-to-add-set
:pages-to-remove-id-set pages-to-remove-id-set}))
(defn- get-blocks-from-datoms-impl
[blocks]
(when (seq blocks)
(let [blocks-result (->> (db/pull-many '[:db/id :block/uuid :block/format :block/content :block/page] (set (map :e blocks)))
(map (fn [b] (assoc b :block/page (get-in b [:block/page :db/id])))))
blocks-to-add-set (->> (filter :added blocks)
(map :e)
(set))
blocks-to-add (->> (filter (fn [block]
(contains? blocks-to-add-set (:db/id block)))
blocks-result)
(map search-db/block->index)
(remove nil?))
blocks-to-remove-set (->> (remove :added blocks)
(map :e)
(set))]
{:blocks-to-remove-set blocks-to-remove-set
:blocks-to-add blocks-to-add})))
(defn- get-direct-blocks-and-pages
[tx-report]
(let [data (:tx-data tx-report)
datoms (filter
(fn [datom]
@ -200,50 +255,78 @@
data)]
(when (seq datoms)
(let [datoms (group-by :a datoms)
pages (:block/name datoms)
blocks (:block/content datoms)]
(when (seq pages)
(let [pages-result (db/pull-many '[:db/id :block/name :block/original-name] (set (map :e pages)))
pages-to-add-set (->> (filter :added pages)
(map :e)
(set))
pages-to-add (->> (filter (fn [page]
(contains? pages-to-add-set (:db/id page))) pages-result)
(map (fn [p] (or (:block/original-name p)
(:block/name p))))
(map search-db/original-page-name->index))
pages-to-remove-set (->> (remove :added pages)
(map :v))]
(swap! search-db/indices update-in [repo :pages]
(fn [indice]
(when indice
(doseq [page-name pages-to-remove-set]
(.remove indice
(fn [page]
(= (util/safe-page-name-sanity-lc page-name)
(util/safe-page-name-sanity-lc (gobj/get page "original-name"))))))
(when (seq pages-to-add)
(doseq [page pages-to-add]
(.add indice (bean/->js page)))))
indice))))
blocks (:block/content datoms)
pages (:block/name datoms)]
(merge (get-blocks-from-datoms-impl blocks)
(get-pages-from-datoms-impl pages))))))
(when (seq blocks)
(let [blocks-result (->> (db/pull-many '[:db/id :block/uuid :block/format :block/content :block/page] (set (map :e blocks)))
(map (fn [b] (assoc b :block/page (get-in b [:block/page :db/id])))))
blocks-to-add-set (->> (filter :added blocks)
(map :e)
(set))
blocks-to-add (->> (filter (fn [block]
(contains? blocks-to-add-set (:db/id block)))
blocks-result)
(map search-db/block->index)
(remove nil?))
blocks-to-remove-set (->> (remove :added blocks)
(map :e)
(set))]
(transact-blocks! repo
{:blocks-to-remove-set blocks-to-remove-set
:blocks-to-add blocks-to-add})))))))
(defn- get-indirect-pages
"Return the set of pages that will have content updated"
[tx-report]
(let [data (:tx-data tx-report)
datoms (filter
(fn [datom]
(and (:added datom)
(contains? #{:file/content} (:a datom))))
data)]
(when (seq datoms)
(->> datoms
(mapv (fn [datom]
(let [tar-db (:db-after tx-report)]
;; Reverse query the corresponding page id of the modified `:file/content`)
(when-let [page-id (->> (:e datom)
(db-report/safe-pull tar-db '[:block/_file])
(:block/_file)
(first)
(:db/id))]
;; Fetch page entity according to what page->index requested
(db-report/safe-pull tar-db '[:db/id :block/uuid
:block/original-name
{:block/file [:file/content]}]
page-id)))))
(remove nil?)))))
;; TODO merge with logic in `invoke-hooks` when feature and test is sufficient
(defn sync-search-indice!
[repo tx-report]
(let [{:keys [pages-to-add pages-to-remove-set pages-to-remove-id-set
blocks-to-add blocks-to-remove-set]} (get-direct-blocks-and-pages tx-report) ;; directly modified block & pages
updated-pages (get-indirect-pages tx-report)]
;; update page title indice
(when (or (seq pages-to-add) (seq pages-to-remove-set))
(swap! search-db/indices update-in [repo :pages]
(fn [indice]
(when indice
(doseq [page-name pages-to-remove-set]
(.remove indice
(fn [page]
(= (util/safe-page-name-sanity-lc page-name)
(util/safe-page-name-sanity-lc (gobj/get page "original-name"))))))
(when (seq pages-to-add)
(doseq [page pages-to-add]
(.add indice (bean/->js page)))))
indice)))
;; update block indice
(when (or (seq blocks-to-add) (seq blocks-to-remove-set))
(transact-blocks! repo
{:blocks-to-remove-set blocks-to-remove-set
:blocks-to-add blocks-to-add}))
;; update page indice
(when (or (seq pages-to-remove-id-set) (seq updated-pages)) ;; when move op happens, no :block/content provided
(let [indice-pages (map search-db/page->index updated-pages)
invalid-set (->> (map (fn [updated indiced] ;; get id of pages without valid page index
(if indiced nil (:db/id updated)))
updated-pages indice-pages)
(remove nil?)
set)
pages-to-add (->> indice-pages
(remove nil?)
set)
pages-to-remove-set (set/union pages-to-remove-id-set invalid-set)]
(transact-pages! repo {:pages-to-remove-set pages-to-remove-set
:pages-to-add pages-to-add})))))
(defn rebuild-indices!
([]
@ -251,10 +334,10 @@
([repo]
(when repo
(when-let [engine (get-engine repo)]
(let [pages (search-db/make-pages-indice!)]
(let [page-titles (search-db/make-pages-title-indice!)]
(p/let [blocks (protocol/rebuild-blocks-indice! engine)]
(let [result {:pages pages
:blocks blocks}]
(let [result {:pages page-titles ;; TODO: rename key to :page-titles
:blocks blocks}]
(swap! indices assoc repo result)
indices)))))))

View File

@ -31,6 +31,13 @@
(protocol/query e q opts))
(protocol/query e1 q opts)))
(query-page [_this q opts]
(println "D:Search > Query-page contents:" repo q opts)
(let [[e1 e2] (get-registered-engines repo)]
(doseq [e e2]
(protocol/query-page e q opts))
(protocol/query-page e1 q opts)))
(rebuild-blocks-indice! [_this]
(println "D:Search > Initial blocks indice!:" repo)
(let [[e1 e2] (get-registered-engines repo)]
@ -43,6 +50,11 @@
(doseq [e (get-flatten-registered-engines repo)]
(protocol/transact-blocks! e data)))
(transact-pages! [_this data]
(println "D:Search > Transact pages!:" repo)
(doseq [e (get-flatten-registered-engines repo)]
(protocol/transact-pages! e data)))
(truncate-blocks! [_this]
(println "D:Search > Truncate blocks!" repo)
(doseq [e (get-flatten-registered-engines repo)]

View File

@ -35,6 +35,7 @@
protocol/Engine
(query [_this q option]
(p/promise (search-blocks repo q option)))
(query-page [_this _q _opt] nil) ;; Page index is not available with fuse.js until sufficient performance benchmarking
(rebuild-blocks-indice! [_this]
(let [indice (search-db/make-blocks-indice! repo)]
(p/promise indice)))
@ -51,6 +52,7 @@
(doseq [block blocks-to-add]
(.add indice (bean/->js block)))))
indice)))
(transact-pages! [_this _data] nil) ;; Page index is not available with fuse.js until sufficient performance benchmarking
(truncate-blocks! [_this]
(swap! indices assoc-in [repo :blocks] nil))
(remove-db! [_this]

View File

@ -10,15 +10,33 @@
(defonce indices (atom nil))
(defn- sanitize
[content]
(util/search-normalize content (state/enable-search-remove-accents?)))
(defn- max-len
[]
(state/block-content-max-length (state/get-current-repo)))
(defn block->index
"Convert a block to the index for searching"
[{:block/keys [uuid page content] :as block}]
(when-let [content (util/search-normalize content (state/enable-search-remove-accents?))]
(when-not (> (count content) (state/block-content-max-length (state/get-current-repo)))
{:id (:db/id block)
(when-not (> (count content) (max-len))
{:id (:db/id block)
:uuid (str uuid)
:page page
:content (sanitize content)}))
(defn page->index
"Convert a page name to the index for searching (page content level)
Generate index based on the DB content AT THE POINT OF TIME"
[{:block/keys [uuid _original-name] :as page}]
(when-let [content (some-> (:block/file page)
(:file/content))]
(when-not (> (count content) (* (max-len) 10))
{:id (:db/id page)
:uuid (str uuid)
:page page
:content content})))
:content (sanitize content)})))
(defn build-blocks-indice
;; TODO: Remove repo effects fns further up the call stack. db fns need standardization on taking connection
@ -29,6 +47,14 @@
(remove nil?)
(bean/->js)))
(defn build-pages-indice
[repo]
(->> (db/get-all-pages repo)
(map #(db/entity (:db/id %))) ;; get full file-content
(map page->index)
(remove nil?)
(bean/->js)))
(defn make-blocks-indice!
[repo]
(let [blocks (build-blocks-indice repo)
@ -46,9 +72,11 @@
[p] {:name (util/search-normalize p (state/enable-search-remove-accents?))
:original-name p})
(defn make-pages-indice!
"Build a page indice from scratch.
Incremental page indice is implemented in frontend.search.sync-search-indice!"
(defn make-pages-title-indice!
"Build a page title indice from scratch.
Incremental page title indice is implemented in frontend.search.sync-search-indice!
Rename from the page indice since 10.25.2022, since this is only used for page title search.
From now on, page indice is talking about page content search."
[]
(when-let [repo (state/get-current-repo)]
(let [pages (->> (db/get-pages (state/get-current-repo))

View File

@ -17,12 +17,22 @@
{:block/uuid uuid
:block/content content
:block/page page})) result)))
(query-page [_this q opts]
(p/let [result (ipc/ipc "search-pages" repo q opts)
result (bean/->clj result)]
(keep (fn [{:keys [content snippet uuid]}]
(when-not (> (count content) (* 10 (state/block-content-max-length repo)))
{:block/uuid uuid
:block/snippet snippet})) result)))
(rebuild-blocks-indice! [_this]
(let [indice (search-db/build-blocks-indice repo)]
(ipc/ipc "rebuild-blocks-indice" repo indice)))
(let [blocks-indice (search-db/build-blocks-indice repo)
pages-indice (search-db/build-pages-indice repo)]
(ipc/ipc "rebuild-indice" repo blocks-indice pages-indice)))
(transact-blocks! [_this data]
(ipc/ipc "transact-blocks" repo (bean/->js data)))
(truncate-blocks! [_this]
(ipc/ipc "truncate-blocks" repo))
(ipc/ipc "truncate-indice" repo))
(transact-pages! [_this data]
(ipc/ipc "transact-pages" repo (bean/->js data)))
(remove-db! [_this]
(ipc/ipc "remove-db" repo)))

View File

@ -23,6 +23,9 @@
(query [_this q opts]
(call-service! service "search:query" (merge {:q q} opts) true))
(query-page [_this q opts]
(call-service! service "search:queryPage" (merge {:q q} opts) true))
(rebuild-blocks-indice! [_this]
;; Not pushing all data for performance temporarily
;;(let [blocks (search-db/build-blocks-indice repo)])
@ -34,6 +37,12 @@
{:data {:added blocks-to-add
:removed blocks-to-remove-set}})))
(transact-pages! [_this data]
(let [{:keys [pages-to-remove-set pages-to-add]} data]
(call-service! service "search:transactpages"
{:data {:added pages-to-add
:removed pages-to-remove-set}})))
(truncate-blocks! [_this]
(call-service! service "search:truncateBlocks" {}))

View File

@ -1,8 +1,10 @@
(ns ^:no-doc frontend.search.protocol)
(defprotocol Engine
(query [this q option])
(rebuild-blocks-indice! [this])
(query [this q option])
(query-page [this q option])
(rebuild-blocks-indice! [this]) ;; TODO: rename to rebuild-indice!
(transact-blocks! [this data])
(truncate-blocks! [this])
(truncate-blocks! [this]) ;; TODO: rename to truncate-indice!
(transact-pages! [this data])
(remove-db! [this]))

View File

@ -51,7 +51,7 @@
:journals-length 3
:search/q ""
:search/mode :global
:search/mode :global ;; inner page or full graph? {:page :global}
:search/result nil
:search/graph-filters []
:search/engines {}
@ -69,7 +69,7 @@
;; ui
:ui/viewport {}
;; left sidebar
:ui/navigation-item-collapsed? {}
@ -310,6 +310,9 @@
:default-arweave-gateway "https://arweave.net"
;; For flushing the settings of old versions. Don't bump this value.
;; There are only two kinds of graph, one is not upgraded (:legacy) and one is upgraded (:triple-lowbar)
;; For not upgraded graphs, the config will have no key `:file/name-format`
;; Then the default value is applied
:file/name-format :legacy})
;; State that most user config is dependent on

View File

@ -250,10 +250,11 @@
content]]
[:div.ml-4.flex-shrink-0.flex
[:button.inline-flex.text-gray-400.focus:outline-none.focus:text-gray-500.transition.ease-in-out.duration-150.notification-close-button
{:on-click (fn []
{:aria-label "Close"
:on-click (fn []
(notification/clear! uid))}
(icon "x" {:fill "currentColor"})]]]]]]])))
(icon "x" {:fill "currentColor"})]]]]]]])))
(declare button)

View File

@ -11,6 +11,7 @@
["remove-accents" :as removeAccents]
["sanitize-filename" :as sanitizeFilename]
["check-password-strength" :refer [passwordStrength]]
["path-complete-extname" :as pathCompleteExtname]
[frontend.loader :refer [load]]
[cljs-bean.core :as bean]
[cljs-time.coerce :as tc]
@ -43,7 +44,7 @@
(-write writer (str "\"" (.toString sym) "\"")))))
#?(:cljs (defonce ^js node-path utils/nodePath))
#?(:cljs (defonce ^js full-path-extname utils/fullPathExtname))
#?(:cljs (defonce ^js full-path-extname pathCompleteExtname))
#?(:cljs (defn app-scroll-container-node
([]
(gdom/getElement "main-content-container"))
@ -200,6 +201,11 @@
(string/ends-with? %))
[".png" ".jpg" ".jpeg" ".bmp" ".gif" ".webp" ".svg"]))
(defn ext-of-video? [s]
(some #(-> (string/lower-case s)
(string/ends-with? %))
[".mp4" ".mkv" ".mov" ".wmv" ".avi" ".webm" ".mpg" ".ts" ".ogg" ".flv"]))
;; ".lg:absolute.lg:inset-y-0.lg:right-0.lg:w-1/2"
(defn hiccup->class
[class]
@ -222,29 +228,6 @@
(.then #(on-ok %)))
(on-failed resp)))))))))
#?(:cljs
(defn upload
[url file on-ok on-failed on-progress]
(let [xhr (js/XMLHttpRequest.)]
(.open xhr "put" url)
(gobj/set xhr "onload" on-ok)
(gobj/set xhr "onerror" on-failed)
(when (and (gobj/get xhr "upload")
on-progress)
(gobj/set (gobj/get xhr "upload")
"onprogress"
on-progress))
(.send xhr file))))
#?(:cljs
(defn post
[url body on-ok on-failed]
(fetch url {:method "post"
:headers {:Content-Type "application/json"}
:body (js/JSON.stringify (clj->js body))}
on-ok
on-failed)))
(defn zero-pad
[n]
(if (< n 10)

View File

@ -118,6 +118,26 @@
[]
ks))))
(defn cut-by
"Cut string by specifid wrapping symbols, only match the first occurrence.
value - string to cut
before - cutting symbol (before)
end - cutting symbol (end)"
[value before end]
(let [b-pos (string/index-of value before)
b-len (count before)]
(if b-pos
(let [b-cut (subs value 0 b-pos)
m-cut (subs value (+ b-pos b-len))
e-len (count end)
e-pos (string/index-of m-cut end)]
(if e-pos
(let [e-cut (subs m-cut (+ e-pos e-len))
m-cut (subs m-cut 0 e-pos)]
[b-cut m-cut e-cut])
[b-cut m-cut nil]))
[value nil nil])))
(defn get-graph-name-from-path
[path]
(when (string? path)

View File

@ -121,7 +121,4 @@
(#'model/get-unnecessary-namespaces-name '("one/two/tree" "one" "one/two" "non nested tag" "non nested link")))
"Must be one/two one"))
#_(cljs.test/test-ns 'frontend.db.model-test)

View File

@ -43,7 +43,6 @@
(is (= 8 authors)))
(testing "tags"
(prn (-> properties :tags))
;; tags split by `,` are counted into different tags
;; https://github.com/logseq/logseq/commit/435c2110bcc2d30ed743ba31375450f1a705b00b
(is (= 20 tags)))))

View File

@ -1,9 +1,11 @@
(ns frontend.handler.repo-test
(:require [cljs.test :refer [deftest use-fixtures]]
(:require [cljs.test :refer [deftest use-fixtures testing is]]
[frontend.handler.repo :as repo-handler]
[frontend.test.helper :as test-helper]
[frontend.test.helper :as test-helper :refer [load-test-files]]
[logseq.graph-parser.cli :as gp-cli]
[logseq.graph-parser.test.docs-graph-helper :as docs-graph-helper]
[logseq.graph-parser.util.block-ref :as block-ref]
[frontend.db.model :as model]
[frontend.db.conn :as conn]))
(use-fixtures :each {:before test-helper/start-test-db!
@ -19,3 +21,44 @@
db (conn/get-db test-helper/test-db)]
(docs-graph-helper/docs-graph-assertions db (map :file/path files))))
(deftest parse-files-and-load-to-db-with-block-refs-on-reload
(testing "Refs to blocks on a page are retained if that page is reloaded"
(let [test-uuid "16c90195-6a03-4b3f-839d-095a496d9acd"
target-page-content (str "- target block\n id:: " test-uuid)
referring-page-content (str "- " (block-ref/->block-ref test-uuid))]
(load-test-files [{:file/path "pages/target.md"
:file/content target-page-content}
{:file/path "pages/referrer.md"
:file/content referring-page-content}])
(is (= [(parse-uuid test-uuid)] (model/get-all-referenced-blocks-uuid)))
(load-test-files [{:file/path "pages/target.md"
:file/content target-page-content}])
(is (= [(parse-uuid test-uuid)] (model/get-all-referenced-blocks-uuid))))))
(deftest parse-files-and-load-to-db-with-page-rename
(testing
"Reload a file when the disk contents result in the file having a new page name"
(let [test-uuid "16c90195-6a03-4b3f-839d-095a496d9efc"
target-page-content (str "- target block\n id:: " test-uuid)
referring-page-content (str "- " (block-ref/->block-ref test-uuid))
update-referring-page-content (str "title:: updatedPage\n- " (block-ref/->block-ref test-uuid))
get-page-block-count (fn [page-name]
(let [page-id (:db/id (model/get-page page-name))]
(if (some? page-id)
(model/get-page-blocks-count test-helper/test-db page-id)
0)))]
(load-test-files [{:file/path "pages/target.md"
:file/content target-page-content}
{:file/path "pages/referrer.md"
:file/content referring-page-content}])
(is (= [(parse-uuid test-uuid)] (model/get-all-referenced-blocks-uuid)))
(is (= 1 (get-page-block-count "referrer")))
(is (= 0 (get-page-block-count "updatedPage")))
(load-test-files [{:file/path "pages/referrer.md"
:file/content update-referring-page-content}])
(is (= [(parse-uuid test-uuid)] (model/get-all-referenced-blocks-uuid)))
(is (= 0 (get-page-block-count "referrer")))
(is (= 2 (get-page-block-count "updatedPage"))))))

View File

@ -57,3 +57,37 @@
'(false false false false false false true true true true true true)
(map #(text-util/wrapped-by? "prop::value" % "::" "") (take 12 (range)))
))
(deftest test-cut-by
[]
(are [x y] (= x y)
["" "" ""]
(text-util/cut-by "[[]]" "[[" "]]")
["" "abc" ""]
(text-util/cut-by "[[abc]]" "[[" "]]")
["012 " "6" " [[2]]"]
(text-util/cut-by "012 [[6]] [[2]]" "[[" "]]")
["" "prop" "value"]
(text-util/cut-by "prop::value" "" "::")
["prop" "" "value"]
(text-util/cut-by "prop::value" "::" "")
["some " "content" " here"]
(text-util/cut-by "some $pfts>$content$pfts<$ here" "$pfts>$" "$pfts<$")
["some " "content$pft" nil]
(text-util/cut-by "some $pfts>$content$pft" "$pfts>$" "$pfts<$")
["some $pf" nil nil]
(text-util/cut-by "some $pf" "$pfts>$" "$pfts<$")
["" "content" ""]
(text-util/cut-by "$pfts>$content$pfts<$" "$pfts>$" "$pfts<$")
["" "content$p" nil]
(text-util/cut-by "$pfts>$content$p" "$pfts>$" "$pfts<$")))

View File

@ -254,7 +254,7 @@
;; ignore #+keyword: for parsing page references in orgmode
;; :ignored-page-references-keywords #{"author" "startup"}
;; Quick capture templates on mobile for recieving contents from other apps.
;; Quick capture templates for recieving contents from other apps.
;; Each template contains three elements {time}, {text} and {url}, which can be auto-expanded
;; by received contents from other apps. Note: the {} cannot be omitted.
;; - {time}: capture time
@ -266,6 +266,9 @@
;; {:text "[[quick capture]] **{time}**: {text} from {url}"
;; :media "[[quick capture]] **{time}**: {url}"}
;; Quick capture options
;; :quick-capture-options {:insert-today? false :redirect-page? false}
;; File sync options
;; Ignore these files when syncing, regexp is supported.
;; :file-sync/ignore-files []
@ -284,7 +287,7 @@
;; Decide the way to escape the special characters in the page title.
;; Warning:
;; This is a dangerous operation. If you want to change the setting,
;; should access the setting `Filename format` and follow the instructions.
;; should access the setting `Filename format` and follow the instructions.
;; Or you have to rename all the affected files manually then re-index on all
;; clients after the files are synced. Wrong handling may cause page titles
;; containing special characters to be messy.
@ -292,8 +295,5 @@
;; :file/name-format :triple-lowbar
;; ;use triple underscore `___` for slash `/` in page title
;; ;use Percent-encoding for other invalid characters
;; :file/name-format :legacy
;; ;use Percent-encoding for slash and other invalid characters
;; ;parse `.` in file name as slash `/` in page title
:file/name-format :triple-lowbar
}

View File

@ -8,7 +8,7 @@ This folder contains the JS codes for a custom build of Tldraw to fit the needs
### Prerequisites
Morden JS eco tools like Node.js and yarn.
Modern JS eco tools like Node.js and yarn.
### Run in dev mode

View File

@ -3,7 +3,13 @@ import React from 'react'
import { LogseqContext } from '../../lib/logseq-context'
import { TablerIcon } from '../icons'
export const BlockLink = ({ id }: { id: string }) => {
export const BlockLink = ({
id,
showReferenceContent = false,
}: {
id: string
showReferenceContent?: boolean
}) => {
const {
handlers: { isWhiteboardPage, redirectToPage, sidebarAddBlock, queryBlockByUUID },
renderers: { Breadcrumb, PageName, BlockReference },
@ -11,13 +17,16 @@ export const BlockLink = ({ id }: { id: string }) => {
let iconName = ''
let linkType = validUUID(id) ? 'B' : 'P'
let blockContent = ''
if (validUUID(id)) {
const block = queryBlockByUUID(id)
if (!block) {
return <span className='p-2'>Invalid reference. Did you remove it?</span>
return <span className="p-2">Invalid reference. Did you remove it?</span>
}
blockContent = block.content
if (block.properties?.['ls-type'] === 'whiteboard-shape') {
iconName = 'link-to-whiteboard'
} else {
@ -31,6 +40,9 @@ export const BlockLink = ({ id }: { id: string }) => {
}
}
const slicedContent =
blockContent && blockContent.length > 23 ? blockContent.slice(0, 20) + '...' : blockContent
return (
<button
className="inline-flex gap-1 items-center w-full"
@ -49,8 +61,8 @@ export const BlockLink = ({ id }: { id: string }) => {
<PageName pageName={id} />
) : (
<>
<Breadcrumb levelLimit={1} blockId={id} endSeparator />
<BlockReference blockId={id} />
<Breadcrumb levelLimit={1} blockId={id} endSeparator={showReferenceContent} />
{showReferenceContent && slicedContent}
</>
)}
</span>

View File

@ -29,7 +29,7 @@ const _ContextBar: TLContextBarComponent<Shape> = ({ shapes, offsets, hidden })
const elm = rContextBar.current
if (!elm) return
const size = rSize.current ?? [0, 0]
const [x, y] = getContextBarTranslation(size, { ...offsets, bottom: offsets.bottom - 32 })
const [x, y] = getContextBarTranslation(size, offsets)
elm.style.setProperty('transform', `translateX(${x}px) translateY(${y}px)`)
}, [offsets])

View File

@ -4,12 +4,16 @@ import React from 'react'
import type { Shape } from '../../lib'
import { BlockLink } from '../BlockLink'
export const QuickLinks: TLQuickLinksComponent<Shape> = observer(({ id, shape }) => {
export const QuickLinks: TLQuickLinksComponent<Shape> = observer(({ shape }) => {
const links = React.useMemo(() => {
const links = [...(shape.props.refs ?? [])]
const links = [...(shape.props.refs ?? [])].map<[ref: string, showReferenceContent: boolean]>(
// user added links should show the referenced block content
l => [l, true]
)
if (shape.props.type === 'logseq-portal' && shape.props.pageId) {
links.unshift(shape.props.pageId)
// portal reference should not show the block content
links.unshift([shape.props.pageId, false])
}
return links
@ -19,10 +23,10 @@ export const QuickLinks: TLQuickLinksComponent<Shape> = observer(({ id, shape })
return (
<div className="tl-quick-links" title="Shape Quick Links">
{links.map(ref => {
{links.map(([ref, showReferenceContent]) => {
return (
<div key={ref} className="tl-quick-links-row">
<BlockLink id={ref} />
<BlockLink id={ref} showReferenceContent={showReferenceContent} />
</div>
)
})}

View File

@ -14,7 +14,12 @@ export const ZoomMenu = observer(function ZoomMenu(): JSX.Element {
<DropdownMenuPrimitive.Trigger className="tl-button text-sm px-2 important" id="tl-zoom">
{(app.viewport.camera.zoom * 100).toFixed(0) + '%'}
</DropdownMenuPrimitive.Trigger>
<DropdownMenuPrimitive.Content className="tl-menu" id="zoomPopup" sideOffset={12}>
<DropdownMenuPrimitive.Content
onCloseAutoFocus={e => e.preventDefault()}
className="tl-menu"
id="zoomPopup"
sideOffset={12}
>
<DropdownMenuPrimitive.Item
className="tl-menu-item"
onSelect={preventEvent}

View File

@ -23,17 +23,19 @@ function ShapeLinkItem({
id,
type,
onRemove,
showContent,
}: {
id: string
type: 'B' | 'P'
onRemove?: () => void
showContent?: boolean
}) {
const { handlers } = React.useContext(LogseqContext)
return (
<div className="tl-shape-links-panel-item color-level relative">
<div className="whitespace-pre break-all overflow-hidden text-ellipsis inline-flex">
<BlockLink id={id} />
<BlockLink id={id} showReferenceContent={showContent} />
</div>
<div className="flex-1" />
<Button title="Open Page" type="button" onClick={() => handlers?.redirectToPage(id)}>
@ -131,6 +133,7 @@ export const ShapeLinksInput = observer(function ShapeLinksInput({
onRemove={() => {
onRefsChange(refs.filter((_, j) => i !== j))
}}
showContent
/>
)
})}

View File

@ -1,6 +1,5 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
import { observable, makeObservable, action } from 'mobx'
import { isSafari } from '../utils'
export interface TLSettingsProps {
mode: 'light' | 'dark'

View File

@ -110,8 +110,7 @@ export const Canvas = observer(function Renderer<S extends TLReactShape>({
const selectedShapesSet = React.useMemo(() => new Set(selectedShapes || []), [selectedShapes])
const erasingShapesSet = React.useMemo(() => new Set(erasingShapes || []), [erasingShapes])
const singleSelectedShape = selectedShapes?.length === 1 ? selectedShapes[0] : undefined
const selectedOrHooveredShape = hoveredShape || singleSelectedShape
const selectedOrHoveredShape = hoveredShape || singleSelectedShape
return (
<div ref={rContainer} className={`tl-container ${className ?? ''}`}>
@ -140,7 +139,7 @@ export const Canvas = observer(function Renderer<S extends TLReactShape>({
isSelected={selectedShapesSet.has(shape)}
isErasing={erasingShapesSet.has(shape)}
meta={meta}
zIndex={1000 + i}
zIndex={selectedOrHoveredShape === shape ? 10000 : 1000 + i}
onEditingEnd={onEditingEnd}
/>
))}

View File

@ -14,7 +14,7 @@ export function getContextBarTranslation(barSize: number[], offset: TLOffset) {
let y = 0
if (offset.top < 116) {
// Show on bottom
y = offset.height / 2 + 72
y = offset.height / 2 + 40
// Too far down, move up
if (offset.bottom < 140) {
y += offset.bottom - 140