From 019cf9e0bf571af1b6d1bbdd5cc4e9d765db8ffe Mon Sep 17 00:00:00 2001 From: forehalo Date: Tue, 25 Feb 2025 04:27:28 +0000 Subject: [PATCH] perf(core): only full sync before exporting (#10408) --- packages/common/nbstore/src/frontend/blob.ts | 8 +- .../common/nbstore/src/sync/blob/index.ts | 178 +++++++++++------- packages/common/nbstore/src/worker/client.ts | 31 ++- .../common/nbstore/src/worker/consumer.ts | 18 +- packages/common/nbstore/src/worker/ops.ts | 3 +- .../workspace-setting/storage/export.tsx | 72 ++++++- packages/frontend/i18n/src/i18n.gen.ts | 24 ++- packages/frontend/i18n/src/resources/en.json | 4 + 8 files changed, 220 insertions(+), 118 deletions(-) diff --git a/packages/common/nbstore/src/frontend/blob.ts b/packages/common/nbstore/src/frontend/blob.ts index 4586b83e1c4b1..5ae4ad4759353 100644 --- a/packages/common/nbstore/src/frontend/blob.ts +++ b/packages/common/nbstore/src/frontend/blob.ts @@ -15,8 +15,12 @@ export class BlobFrontend { return this.sync.uploadBlob(blob); } - fullSync() { - return this.sync.fullSync(); + fullDownload() { + return this.sync.fullDownload(); + } + + fullUpload() { + return this.sync.fullUpload(); } addPriority(_id: string, _priority: number) { diff --git a/packages/common/nbstore/src/sync/blob/index.ts b/packages/common/nbstore/src/sync/blob/index.ts index 020b98f19c7e5..3ccc03f2827e1 100644 --- a/packages/common/nbstore/src/sync/blob/index.ts +++ b/packages/common/nbstore/src/sync/blob/index.ts @@ -9,6 +9,8 @@ import type { PeerStorageOptions } from '../types'; export interface BlobSyncState { isStorageOverCapacity: boolean; + total: number; + synced: number; } export interface BlobSync { @@ -18,7 +20,8 @@ export interface BlobSync { signal?: AbortSignal ): Promise; uploadBlob(blob: BlobRecord, signal?: AbortSignal): Promise; - fullSync(signal?: AbortSignal): Promise; + fullDownload(signal?: AbortSignal): Promise; + fullUpload(signal?: AbortSignal): Promise; setMaxBlobSize(size: number): void; onReachedMaxBlobSize(cb: (byteSize: number) => void): () => void; } @@ -26,6 +29,8 @@ export interface BlobSync { export class BlobSyncImpl implements BlobSync { readonly state$ = new BehaviorSubject({ isStorageOverCapacity: false, + total: Object.values(this.storages.remotes).length ? 1 : 0, + synced: 0, }); private abort: AbortController | null = null; private maxBlobSize: number = 1024 * 1024 * 100; // 100MB @@ -34,19 +39,24 @@ export class BlobSyncImpl implements BlobSync { constructor(readonly storages: PeerStorageOptions) {} async downloadBlob(blobId: string, signal?: AbortSignal) { - const localBlob = await this.storages.local.get(blobId, signal); - if (localBlob) { - return localBlob; - } + try { + const localBlob = await this.storages.local.get(blobId, signal); + if (localBlob) { + return localBlob; + } - for (const storage of Object.values(this.storages.remotes)) { - const data = await storage.get(blobId, signal); - if (data) { - await this.storages.local.set(data, signal); - return data; + for (const storage of Object.values(this.storages.remotes)) { + const data = await storage.get(blobId, signal); + if (data) { + await this.storages.local.set(data, signal); + return data; + } } + return null; + } catch (e) { + console.error('error when download blob', e); + return null; } - return null; } async uploadBlob(blob: BlobRecord, signal?: AbortSignal) { @@ -62,7 +72,11 @@ export class BlobSyncImpl implements BlobSync { return await remote.set(blob, signal); } catch (err) { if (err instanceof OverCapacityError) { - this.state$.next({ isStorageOverCapacity: true }); + this.state$.next({ + isStorageOverCapacity: true, + total: this.state$.value.total, + synced: this.state$.value.synced, + }); } throw err; } @@ -70,71 +84,95 @@ export class BlobSyncImpl implements BlobSync { ); } - async fullSync(signal?: AbortSignal) { + async fullDownload(signal?: AbortSignal) { throwIfAborted(signal); await this.storages.local.connection.waitForConnected(signal); + const localList = (await this.storages.local.list(signal)).map(b => b.key); + this.state$.next({ + ...this.state$.value, + synced: localList.length, + }); - for (const [remotePeer, remote] of Object.entries(this.storages.remotes)) { - let localList: string[] = []; - let remoteList: string[] = []; + await Promise.allSettled( + Object.entries(this.storages.remotes).map( + async ([remotePeer, remote]) => { + await remote.connection.waitForConnected(signal); - await remote.connection.waitForConnected(signal); + const remoteList = (await remote.list(signal)).map(b => b.key); - try { - localList = (await this.storages.local.list(signal)).map(b => b.key); - throwIfAborted(signal); - remoteList = (await remote.list(signal)).map(b => b.key); - throwIfAborted(signal); - } catch (err) { - if (err === MANUALLY_STOP) { - throw err; - } - console.error(`error when sync`, err); - continue; - } + this.state$.next({ + ...this.state$.value, + total: Math.max(this.state$.value.total, remoteList.length), + }); - const needUpload = difference(localList, remoteList); - for (const key of needUpload) { - try { - const data = await this.storages.local.get(key, signal); throwIfAborted(signal); - if (data) { - await remote.set(data, signal); - throwIfAborted(signal); - } - } catch (err) { - if (err === MANUALLY_STOP) { - throw err; + + const needDownload = difference(remoteList, localList); + for (const key of needDownload) { + try { + const data = await remote.get(key, signal); + throwIfAborted(signal); + if (data) { + await this.storages.local.set(data, signal); + this.state$.next({ + ...this.state$.value, + synced: this.state$.value.synced + 1, + }); + throwIfAborted(signal); + } + } catch (err) { + if (err === MANUALLY_STOP) { + throw err; + } + console.error( + `error when sync ${key} from [${remotePeer}] to [local]`, + err + ); + } } - console.error( - `error when sync ${key} from [local] to [${remotePeer}]`, - err - ); } - } + ) + ); + } - const needDownload = difference(remoteList, localList); + async fullUpload(signal?: AbortSignal) { + throwIfAborted(signal); + + await this.storages.local.connection.waitForConnected(signal); + const localList = (await this.storages.local.list(signal)).map(b => b.key); + + await Promise.allSettled( + Object.entries(this.storages.remotes).map( + async ([remotePeer, remote]) => { + await remote.connection.waitForConnected(signal); + + const remoteList = (await remote.list(signal)).map(b => b.key); - for (const key of needDownload) { - try { - const data = await remote.get(key, signal); throwIfAborted(signal); - if (data) { - await this.storages.local.set(data, signal); - throwIfAborted(signal); - } - } catch (err) { - if (err === MANUALLY_STOP) { - throw err; + + const needUpload = difference(localList, remoteList); + for (const key of needUpload) { + try { + const data = await this.storages.local.get(key, signal); + throwIfAborted(signal); + if (data) { + await remote.set(data, signal); + throwIfAborted(signal); + } + } catch (err) { + if (err === MANUALLY_STOP) { + throw err; + } + console.error( + `error when sync ${key} from [local] to [${remotePeer}]`, + err + ); + } } - console.error( - `error when sync ${key} from [${remotePeer}] to [local]`, - err - ); } - } - } + ) + ); } start() { @@ -144,16 +182,12 @@ export class BlobSyncImpl implements BlobSync { const abort = new AbortController(); this.abort = abort; - - // TODO(@eyhn): fix this, large blob may cause iOS to crash? - if (!BUILD_CONFIG.isIOS) { - this.fullSync(abort.signal).catch(error => { - if (error === MANUALLY_STOP) { - return; - } - console.error('sync blob error', error); - }); - } + this.fullUpload(abort.signal).catch(error => { + if (error === MANUALLY_STOP) { + return; + } + console.error('sync blob error', error); + }); } stop() { diff --git a/packages/common/nbstore/src/worker/client.ts b/packages/common/nbstore/src/worker/client.ts index 7dbeec2f6bbe2..43279bbdbcd2b 100644 --- a/packages/common/nbstore/src/worker/client.ts +++ b/packages/common/nbstore/src/worker/client.ts @@ -257,26 +257,23 @@ class WorkerBlobSync implements BlobSync { uploadBlob(blob: BlobRecord, _signal?: AbortSignal): Promise { return this.client.call('blobSync.uploadBlob', blob); } - fullSync(signal?: AbortSignal): Promise { - return new Promise((resolve, reject) => { - const abortListener = () => { - reject(signal?.reason); - subscription.unsubscribe(); - }; + fullDownload(signal?: AbortSignal): Promise { + const download = this.client.call('blobSync.fullDownload'); - signal?.addEventListener('abort', abortListener); + signal?.addEventListener('abort', () => { + download.cancel(); + }); - const subscription = this.client.ob$('blobSync.fullSync').subscribe({ - next() { - signal?.removeEventListener('abort', abortListener); - resolve(); - }, - error(err) { - signal?.removeEventListener('abort', abortListener); - reject(err); - }, - }); + return download; + } + fullUpload(signal?: AbortSignal): Promise { + const upload = this.client.call('blobSync.fullUpload'); + + signal?.addEventListener('abort', () => { + upload.cancel(); }); + + return upload; } } diff --git a/packages/common/nbstore/src/worker/consumer.ts b/packages/common/nbstore/src/worker/consumer.ts index cffc864b1d155..e5ce66ceda406 100644 --- a/packages/common/nbstore/src/worker/consumer.ts +++ b/packages/common/nbstore/src/worker/consumer.ts @@ -234,20 +234,10 @@ class StoreConsumer { 'docSync.resetSync': () => this.docSync.resetSync(), 'blobSync.downloadBlob': key => this.blobSync.downloadBlob(key), 'blobSync.uploadBlob': blob => this.blobSync.uploadBlob(blob), - 'blobSync.fullSync': () => - new Observable(subscriber => { - const abortController = new AbortController(); - this.blobSync - .fullSync(abortController.signal) - .then(() => { - subscriber.next(true); - subscriber.complete(); - }) - .catch(error => { - subscriber.error(error); - }); - return () => abortController.abort(MANUALLY_STOP); - }), + 'blobSync.fullDownload': (_, { signal }) => + this.blobSync.fullDownload(signal), + 'blobSync.fullUpload': (_, { signal }) => + this.blobSync.fullUpload(signal), 'blobSync.state': () => this.blobSync.state$, 'blobSync.setMaxBlobSize': size => this.blobSync.setMaxBlobSize(size), 'blobSync.onReachedMaxBlobSize': () => diff --git a/packages/common/nbstore/src/worker/ops.ts b/packages/common/nbstore/src/worker/ops.ts index cbdeefa682155..228a3c6d0fdf1 100644 --- a/packages/common/nbstore/src/worker/ops.ts +++ b/packages/common/nbstore/src/worker/ops.ts @@ -87,7 +87,8 @@ interface GroupedWorkerOps { blobSync: { downloadBlob: [string, BlobRecord | null]; uploadBlob: [BlobRecord, void]; - fullSync: [void, boolean]; + fullDownload: [void, void]; + fullUpload: [void, void]; setMaxBlobSize: [number, void]; onReachedMaxBlobSize: [void, number]; state: [void, BlobSyncState]; diff --git a/packages/frontend/core/src/desktop/dialogs/setting/workspace-setting/storage/export.tsx b/packages/frontend/core/src/desktop/dialogs/setting/workspace-setting/storage/export.tsx index b6cf93ab281c2..f47ffb2c09c25 100644 --- a/packages/frontend/core/src/desktop/dialogs/setting/workspace-setting/storage/export.tsx +++ b/packages/frontend/core/src/desktop/dialogs/setting/workspace-setting/storage/export.tsx @@ -8,8 +8,8 @@ import type { Workspace } from '@affine/core/modules/workspace'; import { useI18n } from '@affine/i18n'; import { universalId } from '@affine/nbstore'; import track from '@affine/track'; -import { useService } from '@toeverything/infra'; -import { useState } from 'react'; +import { LiveData, useLiveData, useService } from '@toeverything/infra'; +import { useMemo, useState } from 'react'; interface ExportPanelProps { workspace: Workspace; @@ -20,9 +20,44 @@ export const DesktopExportPanel = ({ workspace }: ExportPanelProps) => { const [saving, setSaving] = useState(false); const isOnline = useSystemOnline(); const desktopApi = useService(DesktopApiService); + const isLocalWorkspace = workspace.flavour === 'local'; + + const docSyncState = useLiveData( + useMemo(() => { + return workspace + ? LiveData.from(workspace.engine.doc.state$, null).throttleTime(500) + : null; + }, [workspace]) + ); + + const blobSyncState = useLiveData( + useMemo(() => { + return workspace + ? LiveData.from(workspace.engine.blob.state$, null).throttleTime(500) + : null; + }, [workspace]) + ); + + const docSynced = !docSyncState?.syncing; + const blobSynced = + !blobSyncState || blobSyncState.synced === blobSyncState.total; + const [fullSynced, setFullSynced] = useState(false); + + const shouldWaitForFullSync = + isLocalWorkspace || !isOnline || (fullSynced && docSynced && blobSynced); + const fullSyncing = fullSynced && (!docSynced || !blobSynced); + + const fullSync = useAsyncCallback(async () => { + // NOTE: doc full sync is always started by default + // await workspace.engine.doc.waitForSynced(); + workspace.engine.blob.fullDownload().catch(() => { + /* noop */ + }); + setFullSynced(true); + }, [workspace.engine.blob]); const onExport = useAsyncCallback(async () => { - if (saving || !workspace) { + if (saving) { return; } setSaving(true); @@ -30,10 +65,6 @@ export const DesktopExportPanel = ({ workspace }: ExportPanelProps) => { track.$.settingsPanel.workspace.export({ type: 'workspace', }); - if (isOnline) { - await workspace.engine.doc.waitForSynced(); - await workspace.engine.blob.fullSync(); - } const result = await desktopApi.handler?.dialog.saveDBFileAs( universalId({ @@ -53,16 +84,37 @@ export const DesktopExportPanel = ({ workspace }: ExportPanelProps) => { } finally { setSaving(false); } - }, [desktopApi, isOnline, saving, t, workspace]); + }, [desktopApi, saving, t, workspace]); + + if (!shouldWaitForFullSync) { + return ( + + + + ); + } + + const button = + isLocalWorkspace || isOnline ? t['Export']() : t['Export(Offline)'](); + const desc = + isLocalWorkspace || isOnline + ? t['Export Description']() + : t['Export Description(Offline)'](); return ( - + ); diff --git a/packages/frontend/i18n/src/i18n.gen.ts b/packages/frontend/i18n/src/i18n.gen.ts index 12b67db7dca5c..98a582eb6f94b 100644 --- a/packages/frontend/i18n/src/i18n.gen.ts +++ b/packages/frontend/i18n/src/i18n.gen.ts @@ -195,10 +195,26 @@ export function useAFFiNEI18N(): { * `Export` */ Export(): string; + /** + * `Export (Offline)` + */ + ["Export(Offline)"](): string; + /** + * `Full Sync` + */ + ["Full Sync"](): string; /** * `You can export the entire Workspace data for backup, and the exported data can be re-imported.` */ ["Export Description"](): string; + /** + * `You can export the entire Workspace data for backup, and the exported data can be re-imported, but you are offline now which will cause the exported data not up to date.` + */ + ["Export Description(Offline)"](): string; + /** + * `You can export the entire Workspace data for backup, and the exported data can be re-imported, but you must sync all cloud data first to keep your exported data up to date.` + */ + ["Full Sync Description"](): string; /** * `Export failed` */ @@ -2675,6 +2691,10 @@ export function useAFFiNEI18N(): { * `Workspace name` */ ["com.affine.nameWorkspace.subtitle.workspace-name"](): string; + /** + * `Workspace type` + */ + ["com.affine.nameWorkspace.subtitle.workspace-type"](): string; /** * `Name your workspace` */ @@ -3513,11 +3533,11 @@ export function useAFFiNEI18N(): { */ ["com.affine.payment.cloud.free.benefit.g2-5"](): string; /** - * `Open-source under MIT license.` + * `Local Editor under MIT license.` */ ["com.affine.payment.cloud.free.description"](): string; /** - * `FOSS + Basic` + * `Local FOSS + Cloud Basic` */ ["com.affine.payment.cloud.free.name"](): string; /** diff --git a/packages/frontend/i18n/src/resources/en.json b/packages/frontend/i18n/src/resources/en.json index 8ccc4a0b68296..fa7aaae22e143 100644 --- a/packages/frontend/i18n/src/resources/en.json +++ b/packages/frontend/i18n/src/resources/en.json @@ -39,7 +39,11 @@ "Enable AFFiNE Cloud Description": "If enabled, the data in this workspace will be backed up and synchronised via AFFiNE Cloud.", "Enable cloud hint": "The following functions rely on AFFiNE Cloud. All data is stored on the current device. You can enable AFFiNE Cloud for this workspace to keep data in sync with the cloud.", "Export": "Export", + "Export(Offline)": "Export (Offline)", + "Full Sync": "Full Sync", "Export Description": "You can export the entire Workspace data for backup, and the exported data can be re-imported.", + "Export Description(Offline)": "You can export the entire Workspace data for backup, and the exported data can be re-imported. But you are offline now which will cause the exported data not up to date.", + "Full Sync Description": "You can export the entire Workspace data for backup, and the exported data can be re-imported. But you must sync all cloud data first to keep your exported data up to date.", "Export failed": "Export failed", "Export success": "Export success", "Export to HTML": "Export to HTML",