@powerhousedao/connect 1.0.9-dev.0 → 1.0.11-dev.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +661 -0
- package/dist/assets/{app-D9QJBr8i.css → app-BIIVKAZr.css} +223 -60
- package/dist/assets/{app-Uu65do7x.js → app-CSB8ljfZ.js} +2107 -1027
- package/dist/assets/{app-loader-BnxLpX27.js → app-loader-k9YqeUtT.js} +416 -268
- package/dist/assets/{app-loader-Bnp0H-wa.css → app-loader-pcztQTL4.css} +173 -26
- package/dist/assets/{ccip-CpD5P3bc.js → ccip-CE4X4bM8.js} +3 -3
- package/dist/assets/{content-DV_bRXbH.js → content-DPhkZXQC.js} +37 -7
- package/dist/assets/{index-B4m3ucR9.js → index-COuqxA6R.js} +3 -3
- package/dist/assets/{index-Ch_NXi_f.js → index-CTQ6qYUS.js} +670 -519
- package/dist/assets/{index-yrCJMRn6.js → index-Dr18XVHc.js} +4 -4
- package/dist/assets/{main.CpORyZoE.js → main.Biy01WIR.js} +1 -1
- package/dist/assets/{style-D4JhTt_m.css → style-Ce3V83BE.css} +31 -36
- package/dist/hmr.js +1 -1
- package/dist/index.html +1 -1
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-G6LMXRY5.js → chunk-2ONJ2PX4.js} +1 -1
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-IC6B3767.js → chunk-6GRZOLU5.js} +1 -1
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-FW7N6EJH.js → chunk-C7QRY43M.js} +3 -3
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-45DCPCA7.js → chunk-CO2RVWYY.js} +1 -1
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-UDKYG6I4.js → chunk-HNBKSZYU.js} +1 -1
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-6MBHOHAA.js → chunk-HNTH6HAH.js} +7 -20
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-N5UNGAA6.js → chunk-HYN4HC4N.js} +391 -232
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-F3RCGUF6.js → chunk-NHD6VUCD.js} +2 -2
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-OYYVE7RP.js → chunk-ODF4NZBP.js} +1 -1
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-M2UUQ5LH.js → chunk-U34SEKEB.js} +2 -2
- package/dist/modules/@powerhousedao/reactor-browser/context/index.js +2 -2
- package/dist/modules/@powerhousedao/reactor-browser/context/read-mode.js +2 -2
- package/dist/modules/@powerhousedao/reactor-browser/hooks/index.js +8 -8
- package/dist/modules/@powerhousedao/reactor-browser/hooks/useAddDebouncedOperations.js +3 -3
- package/dist/modules/@powerhousedao/reactor-browser/hooks/useConnectCrypto.js +2 -2
- package/dist/modules/@powerhousedao/reactor-browser/hooks/useDocumentDrives.js +2 -2
- package/dist/modules/@powerhousedao/reactor-browser/hooks/useDocumentEditor.js +5 -5
- package/dist/modules/@powerhousedao/reactor-browser/hooks/useDriveActions.js +2 -2
- package/dist/modules/@powerhousedao/reactor-browser/hooks/useDriveActionsWithUiNodes.js +3 -3
- package/dist/modules/@powerhousedao/reactor-browser/index.js +10 -10
- package/dist/modules/@powerhousedao/reactor-browser/reactor.js +2 -2
- package/dist/vite-envs.sh +1 -1
- package/package.json +9 -9
|
@@ -1271,13 +1271,6 @@ var DriveAlreadyExistsError = class extends Error {
|
|
|
1271
1271
|
this.driveId = driveId;
|
|
1272
1272
|
}
|
|
1273
1273
|
};
|
|
1274
|
-
var DriveNotFoundError = class extends Error {
|
|
1275
|
-
driveId;
|
|
1276
|
-
constructor(driveId) {
|
|
1277
|
-
super(`Drive with id ${driveId} not found`);
|
|
1278
|
-
this.driveId = driveId;
|
|
1279
|
-
}
|
|
1280
|
-
};
|
|
1281
1274
|
var SynchronizationUnitNotFoundError = class extends Error {
|
|
1282
1275
|
syncUnitId;
|
|
1283
1276
|
constructor(message, syncUnitId) {
|
|
@@ -1710,7 +1703,7 @@ var MemoryStorage = class {
|
|
|
1710
1703
|
// IDocumentStorage
|
|
1711
1704
|
////////////////////////////////
|
|
1712
1705
|
exists(documentId) {
|
|
1713
|
-
return Promise.resolve(!!this.documents[documentId]
|
|
1706
|
+
return Promise.resolve(!!this.documents[documentId]);
|
|
1714
1707
|
}
|
|
1715
1708
|
create(documentId, document) {
|
|
1716
1709
|
this.documents[documentId] = document;
|
|
@@ -1719,10 +1712,6 @@ var MemoryStorage = class {
|
|
|
1719
1712
|
get(documentId) {
|
|
1720
1713
|
const document = this.documents[documentId];
|
|
1721
1714
|
if (!document) {
|
|
1722
|
-
const drive = this.documents[`drive/${documentId}`];
|
|
1723
|
-
if (drive) {
|
|
1724
|
-
return Promise.resolve(drive);
|
|
1725
|
-
}
|
|
1726
1715
|
throw new Error(`Document with id ${documentId} not found`);
|
|
1727
1716
|
}
|
|
1728
1717
|
return Promise.resolve(document);
|
|
@@ -1775,9 +1764,6 @@ var MemoryStorage = class {
|
|
|
1775
1764
|
const manifest = this.getManifest(drive);
|
|
1776
1765
|
return Promise.resolve([...manifest.documentIds]);
|
|
1777
1766
|
}
|
|
1778
|
-
getDocument(driveId, id) {
|
|
1779
|
-
return this.get(id);
|
|
1780
|
-
}
|
|
1781
1767
|
async clearStorage() {
|
|
1782
1768
|
this.documents = {};
|
|
1783
1769
|
this.driveManifests = {};
|
|
@@ -1789,7 +1775,7 @@ var MemoryStorage = class {
|
|
|
1789
1775
|
this.updateDriveManifest(drive, manifest);
|
|
1790
1776
|
}
|
|
1791
1777
|
async addDocumentOperations(drive, id, operations, header) {
|
|
1792
|
-
const document = await this.
|
|
1778
|
+
const document = await this.get(id);
|
|
1793
1779
|
if (!document) {
|
|
1794
1780
|
throw new Error(`Document with id ${id} not found`);
|
|
1795
1781
|
}
|
|
@@ -1806,16 +1792,9 @@ var MemoryStorage = class {
|
|
|
1806
1792
|
async getDrives() {
|
|
1807
1793
|
return Object.keys(this.driveManifests);
|
|
1808
1794
|
}
|
|
1809
|
-
async getDrive(id) {
|
|
1810
|
-
const drive = this.documents[`drive/${id}`];
|
|
1811
|
-
if (!drive) {
|
|
1812
|
-
throw new DriveNotFoundError(id);
|
|
1813
|
-
}
|
|
1814
|
-
return drive;
|
|
1815
|
-
}
|
|
1816
1795
|
async getDriveBySlug(slug) {
|
|
1817
1796
|
for (const driveId of Object.keys(this.driveManifests)) {
|
|
1818
|
-
const drive = this.documents[
|
|
1797
|
+
const drive = this.documents[driveId];
|
|
1819
1798
|
if (drive.initialState.state.global.slug === slug) {
|
|
1820
1799
|
return drive;
|
|
1821
1800
|
}
|
|
@@ -1834,13 +1813,13 @@ var MemoryStorage = class {
|
|
|
1834
1813
|
throw new Error(`Drive with slug ${slug} already exists`);
|
|
1835
1814
|
}
|
|
1836
1815
|
}
|
|
1837
|
-
await this.create(
|
|
1816
|
+
await this.create(id, drive);
|
|
1838
1817
|
this.updateDriveManifest(id, { documentIds: /* @__PURE__ */ new Set() });
|
|
1839
1818
|
}
|
|
1840
1819
|
async addDriveOperations(id, operations, header) {
|
|
1841
|
-
const drive = await this.
|
|
1820
|
+
const drive = await this.get(id);
|
|
1842
1821
|
const mergedOperations = mergeOperations(drive.operations, operations);
|
|
1843
|
-
this.documents[
|
|
1822
|
+
this.documents[id] = {
|
|
1844
1823
|
...drive,
|
|
1845
1824
|
...header,
|
|
1846
1825
|
operations: mergedOperations
|
|
@@ -11745,9 +11724,7 @@ var DefaultDrivesManager = class {
|
|
|
11745
11724
|
try {
|
|
11746
11725
|
await this.server.deleteDrive(driveId);
|
|
11747
11726
|
} catch (error) {
|
|
11748
|
-
|
|
11749
|
-
logger.error(error);
|
|
11750
|
-
}
|
|
11727
|
+
logger.error(error);
|
|
11751
11728
|
}
|
|
11752
11729
|
}
|
|
11753
11730
|
async preserveDrivesById(driveIdsToPreserve, drives, removeStrategy = "detach") {
|
|
@@ -11898,11 +11875,14 @@ var PULL_DRIVE_INTERVAL = 1500;
|
|
|
11898
11875
|
|
|
11899
11876
|
// ../../packages/document-drive/dist/src/server/listener/transmitter/pull-responder.js
|
|
11900
11877
|
var MAX_REVISIONS_PER_ACK = 100;
|
|
11878
|
+
var _staticLogger;
|
|
11879
|
+
var staticLogger = () => {
|
|
11880
|
+
if (!_staticLogger) {
|
|
11881
|
+
_staticLogger = childLogger(["PullResponderTransmitter", "static"]);
|
|
11882
|
+
}
|
|
11883
|
+
return _staticLogger;
|
|
11884
|
+
};
|
|
11901
11885
|
var PullResponderTransmitter = class _PullResponderTransmitter {
|
|
11902
|
-
static staticLogger = childLogger([
|
|
11903
|
-
"PullResponderTransmitter",
|
|
11904
|
-
"static"
|
|
11905
|
-
]);
|
|
11906
11886
|
logger = childLogger([
|
|
11907
11887
|
"PullResponderTransmitter",
|
|
11908
11888
|
Math.floor(Math.random() * 999).toString()
|
|
@@ -11915,8 +11895,18 @@ var PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
11915
11895
|
this.logger.verbose(`constructor(listener: ${listener.listenerId})`);
|
|
11916
11896
|
}
|
|
11917
11897
|
getStrands(options) {
|
|
11918
|
-
this.logger.verbose(`getStrands
|
|
11919
|
-
return this.manager.getStrands(this.listener.driveId, this.listener.listenerId, options)
|
|
11898
|
+
this.logger.verbose(`[SYNC DEBUG] PullResponderTransmitter.getStrands called for drive: ${this.listener.driveId}, listener: ${this.listener.listenerId}, options: ${JSON.stringify(options || {})}`);
|
|
11899
|
+
return this.manager.getStrands(this.listener.driveId, this.listener.listenerId, options).then((strands) => {
|
|
11900
|
+
this.logger.verbose(`[SYNC DEBUG] PullResponderTransmitter.getStrands returning ${strands.length} strands for drive: ${this.listener.driveId}, listener: ${this.listener.listenerId}`);
|
|
11901
|
+
if (strands.length === 0) {
|
|
11902
|
+
this.logger.verbose(`[SYNC DEBUG] No strands returned for drive: ${this.listener.driveId}, listener: ${this.listener.listenerId}`);
|
|
11903
|
+
} else {
|
|
11904
|
+
for (const strand of strands) {
|
|
11905
|
+
this.logger.verbose(`[SYNC DEBUG] Strand for drive: ${strand.driveId}, document: ${strand.documentId}, scope: ${strand.scope}, operations: ${strand.operations.length}`);
|
|
11906
|
+
}
|
|
11907
|
+
}
|
|
11908
|
+
return strands;
|
|
11909
|
+
});
|
|
11920
11910
|
}
|
|
11921
11911
|
disconnect() {
|
|
11922
11912
|
return Promise.resolve();
|
|
@@ -11936,15 +11926,21 @@ var PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
11936
11926
|
}
|
|
11937
11927
|
return success;
|
|
11938
11928
|
}
|
|
11939
|
-
static async registerPullResponder(driveId, url, filter) {
|
|
11940
|
-
|
|
11929
|
+
static async registerPullResponder(driveId, url, filter, listenerId) {
|
|
11930
|
+
staticLogger().verbose(`registerPullResponder(url: ${url})`, filter);
|
|
11941
11931
|
const result = await requestGraphql(url, gql`
|
|
11942
|
-
mutation registerPullResponderListener(
|
|
11943
|
-
|
|
11932
|
+
mutation registerPullResponderListener(
|
|
11933
|
+
$filter: InputListenerFilter!
|
|
11934
|
+
$listenerId: String
|
|
11935
|
+
) {
|
|
11936
|
+
registerPullResponderListener(
|
|
11937
|
+
filter: $filter
|
|
11938
|
+
listenerId: $listenerId
|
|
11939
|
+
) {
|
|
11944
11940
|
listenerId
|
|
11945
11941
|
}
|
|
11946
11942
|
}
|
|
11947
|
-
`, { filter });
|
|
11943
|
+
`, { filter, listenerId });
|
|
11948
11944
|
const error = result.errors?.at(0);
|
|
11949
11945
|
if (error) {
|
|
11950
11946
|
throw error;
|
|
@@ -11955,7 +11951,7 @@ var PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
11955
11951
|
return result.registerPullResponderListener.listenerId;
|
|
11956
11952
|
}
|
|
11957
11953
|
static async pullStrands(driveId, url, listenerId, options) {
|
|
11958
|
-
|
|
11954
|
+
staticLogger().verbose(`[SYNC DEBUG] PullResponderTransmitter.pullStrands called for drive: ${driveId}, url: ${url}, listener: ${listenerId}, options: ${JSON.stringify(options || {})}`);
|
|
11959
11955
|
const result = await requestGraphql(url, gql`
|
|
11960
11956
|
query strands($listenerId: ID!) {
|
|
11961
11957
|
system {
|
|
@@ -11995,27 +11991,34 @@ var PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
11995
11991
|
`, { listenerId });
|
|
11996
11992
|
const error = result.errors?.at(0);
|
|
11997
11993
|
if (error) {
|
|
11994
|
+
staticLogger().verbose(`[SYNC DEBUG] Error pulling strands for drive: ${driveId}, listener: ${listenerId}, error: ${JSON.stringify(error)}`);
|
|
11998
11995
|
throw error;
|
|
11999
11996
|
}
|
|
12000
11997
|
if (!result.system) {
|
|
11998
|
+
staticLogger().verbose(`[SYNC DEBUG] No system data returned when pulling strands for drive: ${driveId}, listener: ${listenerId}`);
|
|
12001
11999
|
return [];
|
|
12002
12000
|
}
|
|
12003
|
-
|
|
12001
|
+
const strands = result.system.sync.strands.map((s) => ({
|
|
12004
12002
|
...s,
|
|
12005
12003
|
operations: s.operations.map((o) => ({
|
|
12006
12004
|
...o,
|
|
12007
12005
|
input: JSON.parse(o.input)
|
|
12008
12006
|
}))
|
|
12009
12007
|
}));
|
|
12008
|
+
staticLogger().verbose(`[SYNC DEBUG] PullResponderTransmitter.pullStrands returning ${strands.length} strands for drive: ${driveId}, listener: ${listenerId}`);
|
|
12009
|
+
if (strands.length > 0) {
|
|
12010
|
+
staticLogger().verbose(`[SYNC DEBUG] Strands being returned: ${strands.map((s) => `${s.documentId}:${s.scope}`).join(", ")}`);
|
|
12011
|
+
}
|
|
12012
|
+
return strands;
|
|
12010
12013
|
}
|
|
12011
12014
|
static async acknowledgeStrands(url, listenerId, revisions) {
|
|
12012
|
-
|
|
12015
|
+
staticLogger().verbose(`acknowledgeStrands(url: ${url}, listener: ${listenerId})`, revisions);
|
|
12013
12016
|
const chunks = [];
|
|
12014
12017
|
for (let i = 0; i < revisions.length; i += MAX_REVISIONS_PER_ACK) {
|
|
12015
12018
|
chunks.push(revisions.slice(i, i + MAX_REVISIONS_PER_ACK));
|
|
12016
12019
|
}
|
|
12017
12020
|
if (chunks.length > 1) {
|
|
12018
|
-
|
|
12021
|
+
staticLogger().verbose(`Breaking strand acknowledgement into ${chunks.length} chunks...`);
|
|
12019
12022
|
}
|
|
12020
12023
|
const results = await Promise.allSettled(chunks.map(async (chunk) => {
|
|
12021
12024
|
const result = await requestGraphql(url, gql`
|
|
@@ -12039,74 +12042,124 @@ var PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
12039
12042
|
throw new Error("Error acknowledging strands");
|
|
12040
12043
|
}
|
|
12041
12044
|
}
|
|
12045
|
+
/**
|
|
12046
|
+
* This function will only throw if `onError` throws an error (or there is
|
|
12047
|
+
* an unintentionally unhandled error in the pull loop).
|
|
12048
|
+
*
|
|
12049
|
+
* All other errors are caught, logged, and passed to `onError`.
|
|
12050
|
+
*
|
|
12051
|
+
* Because of this, `onError` _may be called multiple times_.
|
|
12052
|
+
*/
|
|
12042
12053
|
static async executePull(driveId, trigger, onStrandUpdate, onError, onRevisions, onAcknowledge) {
|
|
12043
|
-
|
|
12054
|
+
staticLogger().verbose(`executePull(driveId: ${driveId}), trigger:`, trigger);
|
|
12055
|
+
staticLogger().info(`[SYNC DEBUG] PullResponderTransmitter.executePull starting for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
12056
|
+
const { url } = trigger.data;
|
|
12057
|
+
let strands;
|
|
12058
|
+
let error;
|
|
12059
|
+
const listenerId = trigger.data.listenerId;
|
|
12044
12060
|
try {
|
|
12045
|
-
|
|
12046
|
-
|
|
12047
|
-
|
|
12048
|
-
|
|
12049
|
-
|
|
12050
|
-
|
|
12051
|
-
|
|
12052
|
-
|
|
12053
|
-
|
|
12054
|
-
|
|
12055
|
-
|
|
12056
|
-
|
|
12057
|
-
|
|
12058
|
-
|
|
12059
|
-
|
|
12060
|
-
|
|
12061
|
-
let error = void 0;
|
|
12062
|
-
try {
|
|
12063
|
-
const result = await onStrandUpdate(strand, {
|
|
12064
|
-
type: "trigger",
|
|
12065
|
-
trigger
|
|
12066
|
-
});
|
|
12067
|
-
if (result.error) {
|
|
12068
|
-
throw result.error;
|
|
12061
|
+
strands = await _PullResponderTransmitter.pullStrands(driveId, url, listenerId);
|
|
12062
|
+
} catch (e) {
|
|
12063
|
+
error = e;
|
|
12064
|
+
const graphqlError = error;
|
|
12065
|
+
const errors = graphqlError.response?.errors ?? [];
|
|
12066
|
+
for (const err of errors) {
|
|
12067
|
+
if (err.message === "Listener not found") {
|
|
12068
|
+
staticLogger().verbose(`[SYNC DEBUG] Auto-registering pull responder for drive: ${driveId}`);
|
|
12069
|
+
await _PullResponderTransmitter.registerPullResponder(trigger.driveId, url, trigger.filter, listenerId);
|
|
12070
|
+
try {
|
|
12071
|
+
strands = await _PullResponderTransmitter.pullStrands(driveId, url, listenerId);
|
|
12072
|
+
staticLogger().verbose(`Successfully auto-registered and pulled strands for drive: ${driveId}, listenerId: ${listenerId}`);
|
|
12073
|
+
} catch (error2) {
|
|
12074
|
+
staticLogger().error(`Could not resolve 'Listener not found' error by registering a new pull responder for drive: ${driveId}, listenerId: ${listenerId}: ${error2}`);
|
|
12075
|
+
onError(error2);
|
|
12076
|
+
return;
|
|
12069
12077
|
}
|
|
12070
|
-
|
|
12071
|
-
|
|
12072
|
-
onError(error);
|
|
12073
|
-
}
|
|
12074
|
-
listenerRevisions.push({
|
|
12075
|
-
branch: strand.branch,
|
|
12076
|
-
documentId: strand.documentId || "",
|
|
12077
|
-
driveId: strand.driveId,
|
|
12078
|
-
revision: operations.pop()?.index ?? -1,
|
|
12079
|
-
scope: strand.scope,
|
|
12080
|
-
status: error ? error instanceof OperationError ? error.status : "ERROR" : "SUCCESS",
|
|
12081
|
-
error
|
|
12082
|
-
});
|
|
12078
|
+
break;
|
|
12079
|
+
}
|
|
12083
12080
|
}
|
|
12084
|
-
|
|
12085
|
-
|
|
12086
|
-
|
|
12087
|
-
|
|
12081
|
+
}
|
|
12082
|
+
if (!strands) {
|
|
12083
|
+
staticLogger().error(`Error pulling strands for drive, and could not auto-register: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error}`);
|
|
12084
|
+
onError(error);
|
|
12085
|
+
return;
|
|
12086
|
+
}
|
|
12087
|
+
if (!strands.length) {
|
|
12088
|
+
staticLogger().verbose(`[SYNC DEBUG] No strands returned in pull cycle for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
12088
12089
|
try {
|
|
12089
|
-
|
|
12090
|
-
|
|
12091
|
-
|
|
12092
|
-
|
|
12093
|
-
success = true;
|
|
12094
|
-
} catch (error) {
|
|
12095
|
-
this.staticLogger.error("ACK error", error);
|
|
12090
|
+
onRevisions?.([]);
|
|
12091
|
+
} catch (error2) {
|
|
12092
|
+
staticLogger().error(`Error calling onRevisions for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error2}`);
|
|
12093
|
+
onError(error2);
|
|
12096
12094
|
}
|
|
12097
|
-
|
|
12098
|
-
|
|
12099
|
-
|
|
12100
|
-
|
|
12095
|
+
return;
|
|
12096
|
+
}
|
|
12097
|
+
staticLogger().verbose(`[SYNC DEBUG] Processing ${strands.length} strands in pull cycle for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
12098
|
+
const listenerRevisions = [];
|
|
12099
|
+
for (const strand of strands) {
|
|
12100
|
+
const operations = strand.operations.map((op) => ({
|
|
12101
|
+
...op,
|
|
12102
|
+
scope: strand.scope,
|
|
12103
|
+
branch: strand.branch
|
|
12104
|
+
}));
|
|
12105
|
+
staticLogger().verbose(`[SYNC DEBUG] Processing strand for drive: ${strand.driveId}, document: ${strand.documentId}, scope: ${strand.scope}, with ${operations.length} operations`);
|
|
12106
|
+
let error2 = void 0;
|
|
12107
|
+
try {
|
|
12108
|
+
const result = await onStrandUpdate(strand, {
|
|
12109
|
+
type: "trigger",
|
|
12110
|
+
trigger
|
|
12111
|
+
});
|
|
12112
|
+
if (result.error) {
|
|
12113
|
+
throw result.error;
|
|
12114
|
+
}
|
|
12115
|
+
} catch (e) {
|
|
12116
|
+
staticLogger().error(`Error processing strand for drive: ${strand.driveId}, document: ${strand.documentId}, scope: ${strand.scope}, with ${operations.length} operations: ${e}`);
|
|
12117
|
+
error2 = e;
|
|
12118
|
+
onError(error2);
|
|
12101
12119
|
}
|
|
12120
|
+
listenerRevisions.push({
|
|
12121
|
+
branch: strand.branch,
|
|
12122
|
+
documentId: strand.documentId || "",
|
|
12123
|
+
driveId: strand.driveId,
|
|
12124
|
+
revision: operations.pop()?.index ?? -1,
|
|
12125
|
+
scope: strand.scope,
|
|
12126
|
+
status: error2 ? error2 instanceof OperationError ? error2.status : "ERROR" : "SUCCESS",
|
|
12127
|
+
error: error2
|
|
12128
|
+
});
|
|
12129
|
+
}
|
|
12130
|
+
staticLogger().verbose("Processed strands...");
|
|
12131
|
+
try {
|
|
12132
|
+
onRevisions?.(listenerRevisions);
|
|
12133
|
+
} catch (error2) {
|
|
12134
|
+
staticLogger().error(`Error calling onRevisions for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error2}`);
|
|
12135
|
+
onError(error2);
|
|
12136
|
+
}
|
|
12137
|
+
staticLogger().verbose(`[SYNC DEBUG] Acknowledging ${listenerRevisions.length} strands for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
12138
|
+
let success = false;
|
|
12139
|
+
try {
|
|
12140
|
+
await _PullResponderTransmitter.acknowledgeStrands(url, trigger.data.listenerId, listenerRevisions.map((revision) => {
|
|
12141
|
+
const { error: error2, ...rest } = revision;
|
|
12142
|
+
return rest;
|
|
12143
|
+
}));
|
|
12144
|
+
success = true;
|
|
12145
|
+
} catch (error2) {
|
|
12146
|
+
staticLogger().error(`Error acknowledging strands for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error2}`);
|
|
12147
|
+
onError(error2);
|
|
12148
|
+
}
|
|
12149
|
+
if (success) {
|
|
12150
|
+
staticLogger().verbose(`[SYNC DEBUG] Successfully acknowledged strands for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
12151
|
+
} else {
|
|
12152
|
+
staticLogger().error("Failed to acknowledge strands");
|
|
12153
|
+
}
|
|
12154
|
+
try {
|
|
12102
12155
|
onAcknowledge?.(success);
|
|
12103
|
-
} catch (
|
|
12104
|
-
|
|
12105
|
-
onError(
|
|
12156
|
+
} catch (error2) {
|
|
12157
|
+
staticLogger().error(`Error calling onAcknowledge for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error2}`);
|
|
12158
|
+
onError(error2);
|
|
12106
12159
|
}
|
|
12107
12160
|
}
|
|
12108
12161
|
static setupPull(driveId, trigger, onStrandUpdate, onError, onRevisions, onAcknowledge) {
|
|
12109
|
-
|
|
12162
|
+
staticLogger().verbose(`[SYNC DEBUG] PullResponderTransmitter.setupPull initiated for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
12110
12163
|
const { interval } = trigger.data;
|
|
12111
12164
|
let loopInterval = PULL_DRIVE_INTERVAL;
|
|
12112
12165
|
if (interval) {
|
|
@@ -12118,20 +12171,25 @@ var PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
12118
12171
|
} catch {
|
|
12119
12172
|
}
|
|
12120
12173
|
}
|
|
12174
|
+
staticLogger().verbose(`[SYNC DEBUG] Pull interval set to ${loopInterval}ms for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
12121
12175
|
let isCancelled = false;
|
|
12122
12176
|
let timeout;
|
|
12123
12177
|
const executeLoop = async () => {
|
|
12124
12178
|
while (!isCancelled) {
|
|
12125
|
-
|
|
12179
|
+
staticLogger().verbose(`[SYNC DEBUG] Starting pull cycle for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
12126
12180
|
await this.executePull(driveId, trigger, onStrandUpdate, onError, onRevisions, onAcknowledge);
|
|
12181
|
+
staticLogger().verbose(`[SYNC DEBUG] Completed pull cycle for drive: ${driveId}, listenerId: ${trigger.data.listenerId}, waiting ${loopInterval}ms for next cycle`);
|
|
12127
12182
|
await new Promise((resolve) => {
|
|
12128
|
-
|
|
12183
|
+
staticLogger().verbose(`Scheduling next pull in ${loopInterval} ms`);
|
|
12129
12184
|
timeout = setTimeout(resolve, loopInterval);
|
|
12130
12185
|
});
|
|
12131
12186
|
}
|
|
12132
12187
|
};
|
|
12133
|
-
executeLoop().catch(
|
|
12188
|
+
executeLoop().catch((error) => {
|
|
12189
|
+
staticLogger().error(`Error in executeLoop for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error}`);
|
|
12190
|
+
});
|
|
12134
12191
|
return () => {
|
|
12192
|
+
staticLogger().verbose(`[SYNC DEBUG] Cancelling pull loop for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
12135
12193
|
isCancelled = true;
|
|
12136
12194
|
if (timeout !== void 0) {
|
|
12137
12195
|
clearTimeout(timeout);
|
|
@@ -12139,17 +12197,20 @@ var PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
12139
12197
|
};
|
|
12140
12198
|
}
|
|
12141
12199
|
static async createPullResponderTrigger(driveId, url, options) {
|
|
12142
|
-
|
|
12200
|
+
staticLogger().verbose(`createPullResponderTrigger(drive: ${driveId}, url: ${url})`);
|
|
12143
12201
|
const { pullFilter, pullInterval } = options;
|
|
12144
|
-
const
|
|
12202
|
+
const filter = pullFilter ?? {
|
|
12145
12203
|
documentId: ["*"],
|
|
12146
12204
|
documentType: ["*"],
|
|
12147
12205
|
branch: ["*"],
|
|
12148
12206
|
scope: ["*"]
|
|
12149
|
-
}
|
|
12207
|
+
};
|
|
12208
|
+
const listenerId = await _PullResponderTransmitter.registerPullResponder(driveId, url, filter);
|
|
12150
12209
|
const pullTrigger = {
|
|
12151
12210
|
id: generateUUID(),
|
|
12152
12211
|
type: "PullResponder",
|
|
12212
|
+
driveId,
|
|
12213
|
+
filter,
|
|
12153
12214
|
data: {
|
|
12154
12215
|
url,
|
|
12155
12216
|
listenerId,
|
|
@@ -12276,9 +12337,10 @@ function isAtRevision(document, revisions) {
|
|
|
12276
12337
|
|
|
12277
12338
|
// ../../packages/document-drive/dist/src/server/base-server.js
|
|
12278
12339
|
var BaseDocumentDriveServer = class {
|
|
12340
|
+
logger = childLogger(["BaseDocumentDriveServer"]);
|
|
12279
12341
|
// external dependencies
|
|
12280
12342
|
documentModelModules;
|
|
12281
|
-
|
|
12343
|
+
legacyStorage;
|
|
12282
12344
|
documentStorage;
|
|
12283
12345
|
cache;
|
|
12284
12346
|
queueManager;
|
|
@@ -12315,7 +12377,7 @@ var BaseDocumentDriveServer = class {
|
|
|
12315
12377
|
initializePromise;
|
|
12316
12378
|
constructor(documentModelModules, storage, documentStorage, cache, queueManager, eventEmitter, synchronizationManager, listenerManager, options) {
|
|
12317
12379
|
this.documentModelModules = documentModelModules;
|
|
12318
|
-
this.
|
|
12380
|
+
this.legacyStorage = storage;
|
|
12319
12381
|
this.documentStorage = documentStorage;
|
|
12320
12382
|
this.cache = cache;
|
|
12321
12383
|
this.queueManager = queueManager;
|
|
@@ -12347,19 +12409,19 @@ var BaseDocumentDriveServer = class {
|
|
|
12347
12409
|
async _initialize() {
|
|
12348
12410
|
await this.listenerManager.initialize(this.handleListenerError);
|
|
12349
12411
|
await this.queueManager.init(this.queueDelegate, (error) => {
|
|
12350
|
-
logger.error(`Error initializing queue manager`, error);
|
|
12412
|
+
this.logger.error(`Error initializing queue manager`, error);
|
|
12351
12413
|
errors.push(error);
|
|
12352
12414
|
});
|
|
12353
12415
|
try {
|
|
12354
12416
|
await this.defaultDrivesManager.removeOldremoteDrives();
|
|
12355
12417
|
} catch (error) {
|
|
12356
|
-
logger.error(error);
|
|
12418
|
+
this.logger.error(error);
|
|
12357
12419
|
}
|
|
12358
12420
|
const errors = [];
|
|
12359
12421
|
const drives = await this.getDrives();
|
|
12360
12422
|
for (const drive of drives) {
|
|
12361
12423
|
await this._initializeDrive(drive).catch((error) => {
|
|
12362
|
-
logger.error(`Error initializing drive ${drive}`, error);
|
|
12424
|
+
this.logger.error(`Error initializing drive ${drive}`, error);
|
|
12363
12425
|
errors.push(error);
|
|
12364
12426
|
});
|
|
12365
12427
|
}
|
|
@@ -12389,7 +12451,7 @@ var BaseDocumentDriveServer = class {
|
|
|
12389
12451
|
return source.type === "local" ? "push" : "pull";
|
|
12390
12452
|
}
|
|
12391
12453
|
handleListenerError(error, driveId, listener) {
|
|
12392
|
-
logger.error(`Listener ${listener.listener.label ?? listener.listener.listenerId} error:`, error);
|
|
12454
|
+
this.logger.error(`Listener ${listener.listener.label ?? listener.listener.listenerId} error:`, error);
|
|
12393
12455
|
const status = error instanceof OperationError ? error.status : "ERROR";
|
|
12394
12456
|
this.synchronizationManager.updateSyncStatus(driveId, { push: status }, error);
|
|
12395
12457
|
}
|
|
@@ -12449,9 +12511,9 @@ var BaseDocumentDriveServer = class {
|
|
|
12449
12511
|
if (pushListener) {
|
|
12450
12512
|
this.getSynchronizationUnitsRevision(driveId, syncUnits).then((syncUnitRevisions) => {
|
|
12451
12513
|
for (const revision of syncUnitRevisions) {
|
|
12452
|
-
this.listenerManager.updateListenerRevision(pushListener.listenerId, driveId, revision.syncId, revision.revision).catch(logger.error);
|
|
12514
|
+
this.listenerManager.updateListenerRevision(pushListener.listenerId, driveId, revision.syncId, revision.revision).catch(this.logger.error);
|
|
12453
12515
|
}
|
|
12454
|
-
}).catch(logger.error);
|
|
12516
|
+
}).catch(this.logger.error);
|
|
12455
12517
|
}
|
|
12456
12518
|
}
|
|
12457
12519
|
});
|
|
@@ -12473,14 +12535,19 @@ var BaseDocumentDriveServer = class {
|
|
|
12473
12535
|
}
|
|
12474
12536
|
async _initializeDrive(driveId) {
|
|
12475
12537
|
const drive = await this.getDrive(driveId);
|
|
12538
|
+
this.logger.verbose(`[SYNC DEBUG] Initializing drive ${driveId} with slug "${drive.state.global.slug}"`);
|
|
12476
12539
|
await this.synchronizationManager.initializeDriveSyncStatus(driveId, drive);
|
|
12477
12540
|
if (this.shouldSyncRemoteDrive(drive)) {
|
|
12541
|
+
this.logger.verbose(`[SYNC DEBUG] Starting sync for remote drive ${driveId}`);
|
|
12478
12542
|
await this.startSyncRemoteDrive(driveId);
|
|
12479
12543
|
}
|
|
12544
|
+
this.logger.verbose(`[SYNC DEBUG] Processing ${drive.state.local.listeners.length} listeners for drive ${driveId}`);
|
|
12480
12545
|
for (const zodListener of drive.state.local.listeners) {
|
|
12481
12546
|
if (zodListener.callInfo?.transmitterType === "SwitchboardPush") {
|
|
12547
|
+
this.logger.verbose(`[SYNC DEBUG] Setting up SwitchboardPush listener ${zodListener.listenerId} for drive ${driveId}`);
|
|
12482
12548
|
const transmitter = new SwitchboardPushTransmitter(zodListener.callInfo?.data ?? "");
|
|
12483
|
-
this.
|
|
12549
|
+
this.logger.verbose(`[SYNC DEBUG] Created SwitchboardPush transmitter with URL: ${zodListener.callInfo?.data || "none"}`);
|
|
12550
|
+
await this.listenerManager.setListener(driveId, {
|
|
12484
12551
|
block: zodListener.block,
|
|
12485
12552
|
driveId: drive.state.global.id,
|
|
12486
12553
|
filter: {
|
|
@@ -12494,7 +12561,29 @@ var BaseDocumentDriveServer = class {
|
|
|
12494
12561
|
system: zodListener.system,
|
|
12495
12562
|
label: zodListener.label ?? "",
|
|
12496
12563
|
transmitter
|
|
12564
|
+
}).then(() => {
|
|
12565
|
+
this.logger.verbose(`[SYNC DEBUG] Successfully set up listener ${zodListener.listenerId} for drive ${driveId}`);
|
|
12497
12566
|
});
|
|
12567
|
+
} else if (zodListener.callInfo?.transmitterType === "PullResponder") {
|
|
12568
|
+
this.logger.verbose(`[SYNC DEBUG] Setting up PullResponder listener ${zodListener.listenerId} for drive ${driveId}`);
|
|
12569
|
+
const pullResponderListener = {
|
|
12570
|
+
driveId,
|
|
12571
|
+
listenerId: zodListener.listenerId,
|
|
12572
|
+
block: false,
|
|
12573
|
+
filter: zodListener.filter,
|
|
12574
|
+
system: false,
|
|
12575
|
+
label: `PullResponder #${zodListener.listenerId}`,
|
|
12576
|
+
callInfo: {
|
|
12577
|
+
data: "",
|
|
12578
|
+
name: "PullResponder",
|
|
12579
|
+
transmitterType: "PullResponder"
|
|
12580
|
+
}
|
|
12581
|
+
};
|
|
12582
|
+
const pullResponder = new PullResponderTransmitter(pullResponderListener, this.listenerManager);
|
|
12583
|
+
pullResponderListener.transmitter = pullResponder;
|
|
12584
|
+
await this.listenerManager.setListener(driveId, pullResponderListener);
|
|
12585
|
+
} else {
|
|
12586
|
+
this.logger.error(`Skipping listener ${zodListener.listenerId} with unsupported type ${zodListener.callInfo?.transmitterType || "unknown"}`);
|
|
12498
12587
|
}
|
|
12499
12588
|
}
|
|
12500
12589
|
}
|
|
@@ -12526,7 +12615,7 @@ var BaseDocumentDriveServer = class {
|
|
|
12526
12615
|
if (!id) {
|
|
12527
12616
|
throw new Error("Invalid Drive Id");
|
|
12528
12617
|
}
|
|
12529
|
-
const drives = await this.
|
|
12618
|
+
const drives = await this.legacyStorage.getDrives();
|
|
12530
12619
|
if (drives.includes(id)) {
|
|
12531
12620
|
throw new DriveAlreadyExistsError(id);
|
|
12532
12621
|
}
|
|
@@ -12536,7 +12625,7 @@ var BaseDocumentDriveServer = class {
|
|
|
12536
12625
|
document.meta = {
|
|
12537
12626
|
preferredEditor
|
|
12538
12627
|
};
|
|
12539
|
-
await this.
|
|
12628
|
+
await this.legacyStorage.createDrive(id, document);
|
|
12540
12629
|
if (input.global.slug) {
|
|
12541
12630
|
await this.cache.deleteDriveBySlug(input.global.slug);
|
|
12542
12631
|
}
|
|
@@ -12571,7 +12660,7 @@ var BaseDocumentDriveServer = class {
|
|
|
12571
12660
|
this.stopSyncRemoteDrive(driveId),
|
|
12572
12661
|
this.listenerManager.removeDrive(driveId),
|
|
12573
12662
|
this.cache.deleteDrive(driveId),
|
|
12574
|
-
this.
|
|
12663
|
+
this.legacyStorage.deleteDrive(driveId)
|
|
12575
12664
|
]);
|
|
12576
12665
|
result.forEach((r) => {
|
|
12577
12666
|
if (r.status === "rejected") {
|
|
@@ -12580,7 +12669,7 @@ var BaseDocumentDriveServer = class {
|
|
|
12580
12669
|
});
|
|
12581
12670
|
}
|
|
12582
12671
|
getDrives() {
|
|
12583
|
-
return this.
|
|
12672
|
+
return this.legacyStorage.getDrives();
|
|
12584
12673
|
}
|
|
12585
12674
|
async getDrive(driveId, options) {
|
|
12586
12675
|
let document;
|
|
@@ -12593,15 +12682,15 @@ var BaseDocumentDriveServer = class {
|
|
|
12593
12682
|
}
|
|
12594
12683
|
}
|
|
12595
12684
|
} catch (e) {
|
|
12596
|
-
logger.error("Error getting drive from cache", e);
|
|
12685
|
+
this.logger.error("Error getting drive from cache", e);
|
|
12597
12686
|
}
|
|
12598
|
-
const driveStorage = document ?? await this.
|
|
12687
|
+
const driveStorage = document ?? await this.documentStorage.get(driveId);
|
|
12599
12688
|
const result = this._buildDocument(driveStorage, options);
|
|
12600
12689
|
if (!isDocumentDrive(result)) {
|
|
12601
12690
|
throw new Error(`Document with id ${driveId} is not a Document Drive`);
|
|
12602
12691
|
} else {
|
|
12603
12692
|
if (!options?.revisions) {
|
|
12604
|
-
this.cache.setDrive(driveId, result).catch(logger.error);
|
|
12693
|
+
this.cache.setDrive(driveId, result).catch(this.logger.error);
|
|
12605
12694
|
}
|
|
12606
12695
|
return result;
|
|
12607
12696
|
}
|
|
@@ -12613,14 +12702,14 @@ var BaseDocumentDriveServer = class {
|
|
|
12613
12702
|
return drive;
|
|
12614
12703
|
}
|
|
12615
12704
|
} catch (e) {
|
|
12616
|
-
logger.error("Error getting drive from cache", e);
|
|
12705
|
+
this.logger.error("Error getting drive from cache", e);
|
|
12617
12706
|
}
|
|
12618
|
-
const driveStorage = await this.
|
|
12707
|
+
const driveStorage = await this.legacyStorage.getDriveBySlug(slug);
|
|
12619
12708
|
const document = this._buildDocument(driveStorage, options);
|
|
12620
12709
|
if (!isDocumentDrive(document)) {
|
|
12621
12710
|
throw new Error(`Document with slug ${slug} is not a Document Drive`);
|
|
12622
12711
|
} else {
|
|
12623
|
-
this.cache.setDriveBySlug(slug, document).catch(logger.error);
|
|
12712
|
+
this.cache.setDriveBySlug(slug, document).catch(this.logger.error);
|
|
12624
12713
|
return document;
|
|
12625
12714
|
}
|
|
12626
12715
|
}
|
|
@@ -12632,17 +12721,17 @@ var BaseDocumentDriveServer = class {
|
|
|
12632
12721
|
return cachedDocument;
|
|
12633
12722
|
}
|
|
12634
12723
|
} catch (e) {
|
|
12635
|
-
logger.error("Error getting document from cache", e);
|
|
12724
|
+
this.logger.error("Error getting document from cache", e);
|
|
12636
12725
|
}
|
|
12637
|
-
const documentStorage = cachedDocument ?? await this.
|
|
12726
|
+
const documentStorage = cachedDocument ?? await this.documentStorage.get(documentId);
|
|
12638
12727
|
const document = this._buildDocument(documentStorage, options);
|
|
12639
12728
|
if (!options?.revisions) {
|
|
12640
|
-
this.cache.setDocument(documentId, document).catch(logger.error);
|
|
12729
|
+
this.cache.setDocument(documentId, document).catch(this.logger.error);
|
|
12641
12730
|
}
|
|
12642
12731
|
return document;
|
|
12643
12732
|
}
|
|
12644
12733
|
getDocuments(driveId) {
|
|
12645
|
-
return this.
|
|
12734
|
+
return this.legacyStorage.getDocuments(driveId);
|
|
12646
12735
|
}
|
|
12647
12736
|
async createDocument(driveId, input) {
|
|
12648
12737
|
let state = void 0;
|
|
@@ -12665,7 +12754,7 @@ var BaseDocumentDriveServer = class {
|
|
|
12665
12754
|
clipboard: [],
|
|
12666
12755
|
state: state ?? document.state
|
|
12667
12756
|
};
|
|
12668
|
-
await this.
|
|
12757
|
+
await this.legacyStorage.createDocument(driveId, input.id, documentStorage);
|
|
12669
12758
|
for (const syncUnit of input.synchronizationUnits) {
|
|
12670
12759
|
this.synchronizationManager.updateSyncStatus(syncUnit.syncId, {
|
|
12671
12760
|
pull: this.triggerMap.get(driveId) ? "INITIAL_SYNC" : void 0,
|
|
@@ -12675,9 +12764,9 @@ var BaseDocumentDriveServer = class {
|
|
|
12675
12764
|
const operations = Object.values(document.operations).flat();
|
|
12676
12765
|
if (operations.length) {
|
|
12677
12766
|
if (isDocumentDrive(document)) {
|
|
12678
|
-
await this.
|
|
12767
|
+
await this.legacyStorage.addDriveOperations(driveId, operations, document);
|
|
12679
12768
|
} else {
|
|
12680
|
-
await this.
|
|
12769
|
+
await this.legacyStorage.addDocumentOperations(driveId, input.id, operations, document);
|
|
12681
12770
|
}
|
|
12682
12771
|
}
|
|
12683
12772
|
return document;
|
|
@@ -12692,10 +12781,10 @@ var BaseDocumentDriveServer = class {
|
|
|
12692
12781
|
}
|
|
12693
12782
|
await this.listenerManager.removeSyncUnits(driveId, syncUnits);
|
|
12694
12783
|
} catch (error) {
|
|
12695
|
-
logger.warn("Error deleting document", error);
|
|
12784
|
+
this.logger.warn("Error deleting document", error);
|
|
12696
12785
|
}
|
|
12697
12786
|
await this.cache.deleteDocument(documentId);
|
|
12698
|
-
return this.
|
|
12787
|
+
return this.legacyStorage.deleteDocument(driveId, documentId);
|
|
12699
12788
|
}
|
|
12700
12789
|
async _processOperations(driveId, documentId, documentStorage, operations) {
|
|
12701
12790
|
const operationsApplied = [];
|
|
@@ -12746,7 +12835,7 @@ var BaseDocumentDriveServer = class {
|
|
|
12746
12835
|
for (const scope of Object.keys(documentOperations)) {
|
|
12747
12836
|
const lastRemainingOperation = documentOperations[scope].at(-1);
|
|
12748
12837
|
if (lastRemainingOperation && !lastRemainingOperation.resultingState) {
|
|
12749
|
-
lastRemainingOperation.resultingState = await (documentId ? this.
|
|
12838
|
+
lastRemainingOperation.resultingState = await (documentId ? this.legacyStorage.getOperationResultingState?.(driveId, documentId, lastRemainingOperation.index, lastRemainingOperation.scope, "main") : this.legacyStorage.getDriveOperationResultingState?.(driveId, lastRemainingOperation.index, lastRemainingOperation.scope, "main"));
|
|
12750
12839
|
}
|
|
12751
12840
|
}
|
|
12752
12841
|
return {
|
|
@@ -12778,7 +12867,7 @@ var BaseDocumentDriveServer = class {
|
|
|
12778
12867
|
});
|
|
12779
12868
|
const lastRemainingOperation = documentOperations[scope].at(-1);
|
|
12780
12869
|
if (lastRemainingOperation && !lastRemainingOperation.resultingState) {
|
|
12781
|
-
lastRemainingOperation.resultingState = await (documentId ? this.
|
|
12870
|
+
lastRemainingOperation.resultingState = await (documentId ? this.legacyStorage.getOperationResultingState?.(driveId, documentId, lastRemainingOperation.index, lastRemainingOperation.scope, "main") : this.legacyStorage.getDriveOperationResultingState?.(driveId, lastRemainingOperation.index, lastRemainingOperation.scope, "main"));
|
|
12782
12871
|
}
|
|
12783
12872
|
const operationSignals = [];
|
|
12784
12873
|
newDocument = documentModelModule.reducer(newDocument, operation, (signal) => {
|
|
@@ -12825,14 +12914,14 @@ var BaseDocumentDriveServer = class {
|
|
|
12825
12914
|
return this.addOperations(driveId, documentId, [operation], options);
|
|
12826
12915
|
}
|
|
12827
12916
|
async _addOperations(driveId, documentId, callback) {
|
|
12828
|
-
if (!this.
|
|
12829
|
-
const documentStorage = await this.
|
|
12917
|
+
if (!this.legacyStorage.addDocumentOperationsWithTransaction) {
|
|
12918
|
+
const documentStorage = await this.documentStorage.get(documentId);
|
|
12830
12919
|
const result = await callback(documentStorage);
|
|
12831
12920
|
if (result.operations.length > 0) {
|
|
12832
|
-
await this.
|
|
12921
|
+
await this.legacyStorage.addDocumentOperations(driveId, documentId, result.operations, result.header);
|
|
12833
12922
|
}
|
|
12834
12923
|
} else {
|
|
12835
|
-
await this.
|
|
12924
|
+
await this.legacyStorage.addDocumentOperationsWithTransaction(driveId, documentId, callback);
|
|
12836
12925
|
}
|
|
12837
12926
|
}
|
|
12838
12927
|
queueOperation(driveId, documentId, operation, options) {
|
|
@@ -12864,33 +12953,35 @@ var BaseDocumentDriveServer = class {
|
|
|
12864
12953
|
if (result) {
|
|
12865
12954
|
return result;
|
|
12866
12955
|
}
|
|
12956
|
+
let jobId;
|
|
12957
|
+
const promise = new Promise((resolve, reject) => {
|
|
12958
|
+
const unsubscribe = this.queueManager.on("jobCompleted", (job, result2) => {
|
|
12959
|
+
if (job.jobId === jobId) {
|
|
12960
|
+
unsubscribe();
|
|
12961
|
+
unsubscribeError();
|
|
12962
|
+
resolve(result2);
|
|
12963
|
+
}
|
|
12964
|
+
});
|
|
12965
|
+
const unsubscribeError = this.queueManager.on("jobFailed", (job, error) => {
|
|
12966
|
+
if (job.jobId === jobId) {
|
|
12967
|
+
unsubscribe();
|
|
12968
|
+
unsubscribeError();
|
|
12969
|
+
reject(error);
|
|
12970
|
+
}
|
|
12971
|
+
});
|
|
12972
|
+
});
|
|
12867
12973
|
try {
|
|
12868
|
-
|
|
12974
|
+
jobId = await this.queueManager.addJob({
|
|
12869
12975
|
driveId,
|
|
12870
12976
|
documentId,
|
|
12871
12977
|
operations,
|
|
12872
12978
|
options
|
|
12873
12979
|
});
|
|
12874
|
-
return new Promise((resolve, reject) => {
|
|
12875
|
-
const unsubscribe = this.queueManager.on("jobCompleted", (job, result2) => {
|
|
12876
|
-
if (job.jobId === jobId) {
|
|
12877
|
-
unsubscribe();
|
|
12878
|
-
unsubscribeError();
|
|
12879
|
-
resolve(result2);
|
|
12880
|
-
}
|
|
12881
|
-
});
|
|
12882
|
-
const unsubscribeError = this.queueManager.on("jobFailed", (job, error) => {
|
|
12883
|
-
if (job.jobId === jobId) {
|
|
12884
|
-
unsubscribe();
|
|
12885
|
-
unsubscribeError();
|
|
12886
|
-
reject(error);
|
|
12887
|
-
}
|
|
12888
|
-
});
|
|
12889
|
-
});
|
|
12890
12980
|
} catch (error) {
|
|
12891
|
-
logger.error("Error adding job", error);
|
|
12981
|
+
this.logger.error("Error adding job", error);
|
|
12892
12982
|
throw error;
|
|
12893
12983
|
}
|
|
12984
|
+
return promise;
|
|
12894
12985
|
}
|
|
12895
12986
|
async queueAction(driveId, documentId, action, options) {
|
|
12896
12987
|
return this.queueActions(driveId, documentId, [action], options);
|
|
@@ -12920,7 +13011,7 @@ var BaseDocumentDriveServer = class {
|
|
|
12920
13011
|
});
|
|
12921
13012
|
});
|
|
12922
13013
|
} catch (error) {
|
|
12923
|
-
logger.error("Error adding job", error);
|
|
13014
|
+
this.logger.error("Error adding job", error);
|
|
12924
13015
|
throw error;
|
|
12925
13016
|
}
|
|
12926
13017
|
}
|
|
@@ -12951,7 +13042,7 @@ var BaseDocumentDriveServer = class {
|
|
|
12951
13042
|
});
|
|
12952
13043
|
});
|
|
12953
13044
|
} catch (error) {
|
|
12954
|
-
logger.error("Error adding drive job", error);
|
|
13045
|
+
this.logger.error("Error adding drive job", error);
|
|
12955
13046
|
throw error;
|
|
12956
13047
|
}
|
|
12957
13048
|
}
|
|
@@ -12968,7 +13059,7 @@ var BaseDocumentDriveServer = class {
|
|
|
12968
13059
|
await this._addOperations(driveId, documentId, async (documentStorage) => {
|
|
12969
13060
|
const result2 = await this._processOperations(driveId, documentId, documentStorage, operations);
|
|
12970
13061
|
if (!result2.document) {
|
|
12971
|
-
logger.error("Invalid document");
|
|
13062
|
+
this.logger.error("Invalid document");
|
|
12972
13063
|
throw result2.error ?? new Error("Invalid document");
|
|
12973
13064
|
}
|
|
12974
13065
|
document = result2.document;
|
|
@@ -12982,7 +13073,7 @@ var BaseDocumentDriveServer = class {
|
|
|
12982
13073
|
};
|
|
12983
13074
|
});
|
|
12984
13075
|
if (document) {
|
|
12985
|
-
this.cache.setDocument(documentId, document).catch(logger.error);
|
|
13076
|
+
this.cache.setDocument(documentId, document).catch(this.logger.error);
|
|
12986
13077
|
}
|
|
12987
13078
|
const { scopes, branches } = operationsApplied.reduce((acc, operation) => {
|
|
12988
13079
|
if (!acc.scopes.includes(operation.scope)) {
|
|
@@ -13015,7 +13106,7 @@ var BaseDocumentDriveServer = class {
|
|
|
13015
13106
|
});
|
|
13016
13107
|
}
|
|
13017
13108
|
}).catch((error2) => {
|
|
13018
|
-
logger.error("Non handled error updating sync revision", error2);
|
|
13109
|
+
this.logger.error("Non handled error updating sync revision", error2);
|
|
13019
13110
|
this.synchronizationManager.updateSyncStatus(driveId, {
|
|
13020
13111
|
[operationSource]: "ERROR"
|
|
13021
13112
|
}, error2);
|
|
@@ -13052,18 +13143,18 @@ var BaseDocumentDriveServer = class {
|
|
|
13052
13143
|
for (const drive of await this.getDrives()) {
|
|
13053
13144
|
await this.deleteDrive(drive);
|
|
13054
13145
|
}
|
|
13055
|
-
await this.
|
|
13146
|
+
await this.legacyStorage.clearStorage?.();
|
|
13056
13147
|
}
|
|
13057
13148
|
async _addDriveOperations(driveId, callback) {
|
|
13058
|
-
if (!this.
|
|
13059
|
-
const documentStorage = await this.
|
|
13149
|
+
if (!this.legacyStorage.addDriveOperationsWithTransaction) {
|
|
13150
|
+
const documentStorage = await this.documentStorage.get(driveId);
|
|
13060
13151
|
const result = await callback(documentStorage);
|
|
13061
13152
|
if (result.operations.length > 0) {
|
|
13062
|
-
await this.
|
|
13153
|
+
await this.legacyStorage.addDriveOperations(driveId, result.operations, result.header);
|
|
13063
13154
|
}
|
|
13064
13155
|
return result;
|
|
13065
13156
|
} else {
|
|
13066
|
-
return this.
|
|
13157
|
+
return this.legacyStorage.addDriveOperationsWithTransaction(driveId, callback);
|
|
13067
13158
|
}
|
|
13068
13159
|
}
|
|
13069
13160
|
queueDriveOperation(driveId, operation, options) {
|
|
@@ -13116,7 +13207,7 @@ var BaseDocumentDriveServer = class {
|
|
|
13116
13207
|
});
|
|
13117
13208
|
});
|
|
13118
13209
|
} catch (error) {
|
|
13119
|
-
logger.error("Error adding drive job", error);
|
|
13210
|
+
this.logger.error("Error adding drive job", error);
|
|
13120
13211
|
throw error;
|
|
13121
13212
|
}
|
|
13122
13213
|
}
|
|
@@ -13144,7 +13235,7 @@ var BaseDocumentDriveServer = class {
|
|
|
13144
13235
|
if (!document || !isDocumentDrive(document)) {
|
|
13145
13236
|
throw error ?? new Error("Invalid Document Drive document");
|
|
13146
13237
|
}
|
|
13147
|
-
this.cache.setDrive(driveId, document).catch(logger.error);
|
|
13238
|
+
this.cache.setDrive(driveId, document).catch(this.logger.error);
|
|
13148
13239
|
const lastOperation = operationsApplied.filter((op) => op.scope === "global").slice().pop();
|
|
13149
13240
|
if (lastOperation) {
|
|
13150
13241
|
const newOp = operationsApplied.find((appliedOp) => !operations.find((o) => o.id === appliedOp.id && o.index === appliedOp.index && o.skip === appliedOp.skip && o.hash === appliedOp.hash));
|
|
@@ -13171,7 +13262,7 @@ var BaseDocumentDriveServer = class {
|
|
|
13171
13262
|
});
|
|
13172
13263
|
}
|
|
13173
13264
|
}).catch((error2) => {
|
|
13174
|
-
logger.error("Non handled error updating sync revision", error2);
|
|
13265
|
+
this.logger.error("Non handled error updating sync revision", error2);
|
|
13175
13266
|
this.synchronizationManager.updateSyncStatus(driveId, {
|
|
13176
13267
|
[operationSource]: "ERROR"
|
|
13177
13268
|
}, error2);
|
|
@@ -13273,9 +13364,26 @@ var BaseDocumentDriveServer = class {
|
|
|
13273
13364
|
scope: strand.scope,
|
|
13274
13365
|
branch: strand.branch
|
|
13275
13366
|
}));
|
|
13276
|
-
|
|
13277
|
-
|
|
13278
|
-
|
|
13367
|
+
let result;
|
|
13368
|
+
if (strand.documentId) {
|
|
13369
|
+
try {
|
|
13370
|
+
result = await this.queueOperations(strand.driveId, strand.documentId, operations, {
|
|
13371
|
+
source
|
|
13372
|
+
});
|
|
13373
|
+
} catch (error) {
|
|
13374
|
+
this.logger.error("Error queueing operations", error);
|
|
13375
|
+
throw error;
|
|
13376
|
+
}
|
|
13377
|
+
} else {
|
|
13378
|
+
try {
|
|
13379
|
+
result = await this.queueDriveOperations(strand.driveId, operations, {
|
|
13380
|
+
source
|
|
13381
|
+
});
|
|
13382
|
+
} catch (error) {
|
|
13383
|
+
this.logger.error("Error queueing operations", error);
|
|
13384
|
+
throw error;
|
|
13385
|
+
}
|
|
13386
|
+
}
|
|
13279
13387
|
if (result.status === "ERROR") {
|
|
13280
13388
|
const syncUnits = strand.documentId !== "" ? (await this.getSynchronizationUnitsIds(strand.driveId, [strand.documentId], [strand.scope], [strand.branch])).map((s) => s.syncId) : [strand.driveId];
|
|
13281
13389
|
const operationSource = this.getOperationSource(source);
|
|
@@ -13447,11 +13555,10 @@ var ListenerManager = class _ListenerManager {
|
|
|
13447
13555
|
throw new Error("Maximum retries exhausted.");
|
|
13448
13556
|
}
|
|
13449
13557
|
const listenerUpdates = [];
|
|
13450
|
-
for (const [driveId,
|
|
13451
|
-
for (const [listenerId, listenerState] of
|
|
13558
|
+
for (const [driveId, listenerStateById] of this.listenerStateByDriveId) {
|
|
13559
|
+
for (const [listenerId, listenerState] of listenerStateById) {
|
|
13452
13560
|
const transmitter = listenerState.listener.transmitter;
|
|
13453
13561
|
if (!transmitter?.transmit) {
|
|
13454
|
-
this.logger.verbose(`Transmitter not set on listener: ${listenerId}`);
|
|
13455
13562
|
continue;
|
|
13456
13563
|
}
|
|
13457
13564
|
const syncUnits = await this.getListenerSyncUnits(driveId, listenerId);
|
|
@@ -13602,57 +13709,78 @@ var ListenerManager = class _ListenerManager {
|
|
|
13602
13709
|
}
|
|
13603
13710
|
}
|
|
13604
13711
|
async getStrands(driveId, listenerId, options) {
|
|
13605
|
-
|
|
13712
|
+
this.logger.verbose(`[SYNC DEBUG] ListenerManager.getStrands called for drive: ${driveId}, listener: ${listenerId}, options: ${JSON.stringify(options || {})}`);
|
|
13713
|
+
let listenerState;
|
|
13714
|
+
try {
|
|
13715
|
+
listenerState = this.getListenerState(driveId, listenerId);
|
|
13716
|
+
this.logger.verbose(`[SYNC DEBUG] Found listener state for drive: ${driveId}, listener: ${listenerId}, status: ${listenerState.listenerStatus}`);
|
|
13717
|
+
} catch (error) {
|
|
13718
|
+
this.logger.error(`[SYNC DEBUG] Failed to find listener state for drive: ${driveId}, listener: ${listenerId}. Error: ${error}`);
|
|
13719
|
+
throw error;
|
|
13720
|
+
}
|
|
13606
13721
|
const strands = [];
|
|
13607
|
-
|
|
13608
|
-
|
|
13609
|
-
|
|
13610
|
-
|
|
13611
|
-
|
|
13612
|
-
|
|
13613
|
-
|
|
13614
|
-
|
|
13615
|
-
|
|
13616
|
-
|
|
13617
|
-
|
|
13618
|
-
|
|
13619
|
-
|
|
13620
|
-
|
|
13621
|
-
|
|
13622
|
-
|
|
13623
|
-
|
|
13624
|
-
|
|
13625
|
-
|
|
13626
|
-
|
|
13627
|
-
{
|
|
13628
|
-
|
|
13629
|
-
|
|
13630
|
-
|
|
13722
|
+
try {
|
|
13723
|
+
const syncUnits = await this.getListenerSyncUnits(driveId, listenerId);
|
|
13724
|
+
this.logger.verbose(`[SYNC DEBUG] Retrieved ${syncUnits.length} sync units for drive: ${driveId}, listener: ${listenerId}`);
|
|
13725
|
+
const limit = options?.limit;
|
|
13726
|
+
let operationsCount = 0;
|
|
13727
|
+
const tasks = syncUnits.map((syncUnit) => async () => {
|
|
13728
|
+
if (limit && operationsCount >= limit) {
|
|
13729
|
+
return;
|
|
13730
|
+
}
|
|
13731
|
+
if (syncUnit.revision < 0) {
|
|
13732
|
+
this.logger.verbose(`[SYNC DEBUG] Skipping sync unit with negative revision: ${syncUnit.syncId}, revision: ${syncUnit.revision}`);
|
|
13733
|
+
return;
|
|
13734
|
+
}
|
|
13735
|
+
const entry = listenerState.syncUnits.get(syncUnit.syncId);
|
|
13736
|
+
if (entry && entry.listenerRev >= syncUnit.revision) {
|
|
13737
|
+
this.logger.verbose(`[SYNC DEBUG] Skipping sync unit - listener already up to date: ${syncUnit.syncId}, listenerRev: ${entry.listenerRev}, revision: ${syncUnit.revision}`);
|
|
13738
|
+
return;
|
|
13739
|
+
}
|
|
13740
|
+
const { documentId, scope, branch } = syncUnit;
|
|
13741
|
+
try {
|
|
13742
|
+
this.logger.verbose(`[SYNC DEBUG] Getting operations for syncUnit: ${syncUnit.syncId}, documentId: ${documentId}, scope: ${scope}, branch: ${branch}`);
|
|
13743
|
+
const operations = await this.syncManager.getOperationData(
|
|
13744
|
+
// DEAL WITH INVALID SYNC ID ERROR
|
|
13745
|
+
driveId,
|
|
13746
|
+
syncUnit.syncId,
|
|
13747
|
+
{
|
|
13748
|
+
since: options?.since,
|
|
13749
|
+
fromRevision: options?.fromRevision ?? entry?.listenerRev,
|
|
13750
|
+
limit: limit ? limit - operationsCount : void 0
|
|
13751
|
+
}
|
|
13752
|
+
);
|
|
13753
|
+
this.logger.verbose(`[SYNC DEBUG] Retrieved ${operations.length} operations for syncUnit: ${syncUnit.syncId}`);
|
|
13754
|
+
if (!operations.length) {
|
|
13755
|
+
return;
|
|
13631
13756
|
}
|
|
13632
|
-
|
|
13633
|
-
|
|
13757
|
+
operationsCount += operations.length;
|
|
13758
|
+
strands.push({
|
|
13759
|
+
driveId,
|
|
13760
|
+
documentId,
|
|
13761
|
+
scope,
|
|
13762
|
+
branch,
|
|
13763
|
+
operations
|
|
13764
|
+
});
|
|
13765
|
+
this.logger.verbose(`[SYNC DEBUG] Added strand with ${operations.length} operations for syncUnit: ${syncUnit.syncId}`);
|
|
13766
|
+
} catch (error) {
|
|
13767
|
+
this.logger.error(`Error getting operations for syncUnit: ${syncUnit.syncId}, error: ${error}`);
|
|
13634
13768
|
return;
|
|
13635
13769
|
}
|
|
13636
|
-
|
|
13637
|
-
|
|
13638
|
-
|
|
13639
|
-
|
|
13640
|
-
|
|
13641
|
-
|
|
13642
|
-
|
|
13643
|
-
});
|
|
13644
|
-
|
|
13645
|
-
this.logger.error(error);
|
|
13646
|
-
return;
|
|
13647
|
-
}
|
|
13648
|
-
});
|
|
13649
|
-
if (this.options.sequentialUpdates) {
|
|
13650
|
-
for (const task of tasks) {
|
|
13651
|
-
await task();
|
|
13770
|
+
});
|
|
13771
|
+
if (this.options.sequentialUpdates) {
|
|
13772
|
+
this.logger.verbose(`[SYNC DEBUG] Processing ${tasks.length} sync units sequentially`);
|
|
13773
|
+
for (const task of tasks) {
|
|
13774
|
+
await task();
|
|
13775
|
+
}
|
|
13776
|
+
} else {
|
|
13777
|
+
this.logger.verbose(`[SYNC DEBUG] Processing ${tasks.length} sync units in parallel`);
|
|
13778
|
+
await Promise.all(tasks.map((task) => task()));
|
|
13652
13779
|
}
|
|
13653
|
-
}
|
|
13654
|
-
|
|
13780
|
+
} catch (error) {
|
|
13781
|
+
this.logger.error(`Error in getStrands: ${error}`);
|
|
13655
13782
|
}
|
|
13783
|
+
this.logger.verbose(`ListenerManager.getStrands returning ${strands.length} strands for drive: ${driveId}, listener: ${listenerId}`);
|
|
13656
13784
|
return strands;
|
|
13657
13785
|
}
|
|
13658
13786
|
getListenerState(driveId, listenerId) {
|
|
@@ -13704,36 +13832,48 @@ var TransmitterFactory = class {
|
|
|
13704
13832
|
// ../../packages/document-drive/dist/src/server/sync-manager.js
|
|
13705
13833
|
var SynchronizationManager = class {
|
|
13706
13834
|
storage;
|
|
13835
|
+
documentStorage;
|
|
13707
13836
|
cache;
|
|
13708
13837
|
documentModelModules;
|
|
13709
13838
|
eventEmitter;
|
|
13710
13839
|
syncStatus = /* @__PURE__ */ new Map();
|
|
13711
13840
|
logger = childLogger(["SynchronizationManager"]);
|
|
13712
|
-
constructor(storage, cache, documentModelModules, eventEmitter) {
|
|
13841
|
+
constructor(storage, documentStorage, cache, documentModelModules, eventEmitter) {
|
|
13713
13842
|
this.storage = storage;
|
|
13843
|
+
this.documentStorage = documentStorage;
|
|
13714
13844
|
this.cache = cache;
|
|
13715
13845
|
this.documentModelModules = documentModelModules;
|
|
13716
13846
|
this.eventEmitter = eventEmitter;
|
|
13717
13847
|
}
|
|
13718
13848
|
async getSynchronizationUnits(driveId, documentId, scope, branch, documentType2) {
|
|
13719
13849
|
const synchronizationUnitsQuery = await this.getSynchronizationUnitsIds(driveId, documentId, scope, branch, documentType2);
|
|
13850
|
+
this.logger.verbose(`getSynchronizationUnits query: ${JSON.stringify(synchronizationUnitsQuery)}`);
|
|
13720
13851
|
return this.getSynchronizationUnitsRevision(driveId, synchronizationUnitsQuery);
|
|
13721
13852
|
}
|
|
13722
13853
|
async getSynchronizationUnitsRevision(driveId, syncUnitsQuery) {
|
|
13723
13854
|
const drive = await this.getDrive(driveId);
|
|
13724
13855
|
const revisions = await this.storage.getSynchronizationUnitsRevision(syncUnitsQuery);
|
|
13725
|
-
|
|
13856
|
+
this.logger.verbose(`getSynchronizationUnitsRevision: ${JSON.stringify(revisions)}`);
|
|
13857
|
+
const synchronizationUnits = syncUnitsQuery.map((s) => ({
|
|
13726
13858
|
...s,
|
|
13727
13859
|
lastUpdated: drive.created,
|
|
13728
|
-
revision:
|
|
13860
|
+
revision: -1
|
|
13729
13861
|
}));
|
|
13862
|
+
for (const revision of revisions) {
|
|
13863
|
+
const syncUnit = synchronizationUnits.find((s) => revision.documentId === s.documentId && revision.scope === s.scope && revision.branch === s.branch);
|
|
13864
|
+
if (syncUnit) {
|
|
13865
|
+
syncUnit.revision = revision.revision;
|
|
13866
|
+
syncUnit.lastUpdated = revision.lastUpdated;
|
|
13867
|
+
}
|
|
13868
|
+
}
|
|
13869
|
+
return synchronizationUnits;
|
|
13730
13870
|
}
|
|
13731
13871
|
async getSynchronizationUnitsIds(driveId, documentId, scope, branch, documentType2) {
|
|
13732
13872
|
const drive = await this.getDrive(driveId);
|
|
13733
13873
|
const nodes = drive.state.global.nodes.filter((node) => isFileNode(node) && (!documentId?.length || documentId.includes(node.id) || documentId.includes("*")) && (!documentType2?.length || documentType2.includes(node.documentType) || documentType2.includes("*")));
|
|
13734
13874
|
if ((!documentId || documentId.includes("*") || documentId.includes("")) && (!documentType2?.length || documentType2.includes("powerhouse/document-drive") || documentType2.includes("*"))) {
|
|
13735
13875
|
nodes.unshift({
|
|
13736
|
-
id:
|
|
13876
|
+
id: driveId,
|
|
13737
13877
|
documentType: "powerhouse/document-drive",
|
|
13738
13878
|
synchronizationUnits: [
|
|
13739
13879
|
{
|
|
@@ -13799,14 +13939,27 @@ var SynchronizationManager = class {
|
|
|
13799
13939
|
};
|
|
13800
13940
|
}
|
|
13801
13941
|
async getOperationData(driveId, syncId, filter) {
|
|
13942
|
+
this.logger.verbose(`[SYNC DEBUG] SynchronizationManager.getOperationData called for drive: ${driveId}, syncId: ${syncId}, filter: ${JSON.stringify(filter)}`);
|
|
13802
13943
|
const syncUnit = syncId === "0" ? { documentId: "", scope: "global" } : await this.getSynchronizationUnitIdInfo(driveId, syncId);
|
|
13803
13944
|
if (!syncUnit) {
|
|
13945
|
+
this.logger.error(`SYNC DEBUG] Invalid Sync Id ${syncId} in drive ${driveId}`);
|
|
13804
13946
|
throw new Error(`Invalid Sync Id ${syncId} in drive ${driveId}`);
|
|
13805
13947
|
}
|
|
13948
|
+
this.logger.verbose(`[SYNC DEBUG] Found sync unit: documentId: ${syncUnit.documentId}, scope: ${syncUnit.scope}`);
|
|
13806
13949
|
const document = syncId === "0" ? await this.getDrive(driveId) : await this.getDocument(driveId, syncUnit.documentId);
|
|
13950
|
+
this.logger.verbose(`[SYNC DEBUG] Retrieved document ${syncUnit.documentId} with type: ${document.documentType}`);
|
|
13807
13951
|
const operations = document.operations[syncUnit.scope] ?? [];
|
|
13952
|
+
this.logger.verbose(`[SYNC DEBUG] Found ${operations.length} total operations in scope ${syncUnit.scope}`);
|
|
13808
13953
|
const filteredOperations = operations.filter((operation) => Object.keys(filter).length === 0 || (filter.since === void 0 || isBefore(filter.since, operation.timestamp)) && (filter.fromRevision === void 0 || operation.index > filter.fromRevision));
|
|
13954
|
+
this.logger.verbose(`[SYNC DEBUG] Filtered to ${filteredOperations.length} operations based on filter criteria` + (filter.fromRevision !== void 0 ? ` (fromRevision: ${filter.fromRevision})` : ""));
|
|
13809
13955
|
const limitedOperations = filter.limit ? filteredOperations.slice(0, filter.limit) : filteredOperations;
|
|
13956
|
+
this.logger.verbose(`[SYNC DEBUG] Returning ${limitedOperations.length} operations after applying limit`);
|
|
13957
|
+
if (limitedOperations.length > 0) {
|
|
13958
|
+
const firstOp = limitedOperations[0];
|
|
13959
|
+
const lastOp = limitedOperations[limitedOperations.length - 1];
|
|
13960
|
+
this.logger.verbose(`[SYNC DEBUG] First operation: index=${firstOp.index}, type=${firstOp.type}`);
|
|
13961
|
+
this.logger.verbose(`[SYNC DEBUG] Last operation: index=${lastOp.index}, type=${lastOp.type}`);
|
|
13962
|
+
}
|
|
13810
13963
|
return limitedOperations.map((operation) => ({
|
|
13811
13964
|
hash: operation.hash,
|
|
13812
13965
|
index: operation.index,
|
|
@@ -13827,7 +13980,7 @@ var SynchronizationManager = class {
|
|
|
13827
13980
|
} catch (e) {
|
|
13828
13981
|
this.logger.error("Error getting drive from cache", e);
|
|
13829
13982
|
}
|
|
13830
|
-
const driveStorage = await this.
|
|
13983
|
+
const driveStorage = await this.documentStorage.get(driveId);
|
|
13831
13984
|
const result = this._buildDocument(driveStorage);
|
|
13832
13985
|
if (!isDocumentDrive(result)) {
|
|
13833
13986
|
throw new Error(`Document with id ${driveId} is not a Document Drive`);
|
|
@@ -13843,7 +13996,7 @@ var SynchronizationManager = class {
|
|
|
13843
13996
|
} catch (e) {
|
|
13844
13997
|
this.logger.error("Error getting document from cache", e);
|
|
13845
13998
|
}
|
|
13846
|
-
const documentStorage = await this.
|
|
13999
|
+
const documentStorage = await this.documentStorage.get(documentId);
|
|
13847
14000
|
return this._buildDocument(documentStorage);
|
|
13848
14001
|
}
|
|
13849
14002
|
_buildDocument(documentStorage) {
|
|
@@ -14005,7 +14158,14 @@ var ReactorBuilder = class {
|
|
|
14005
14158
|
this.eventEmitter = new DefaultEventEmitter();
|
|
14006
14159
|
}
|
|
14007
14160
|
if (!this.synchronizationManager) {
|
|
14008
|
-
this.synchronizationManager = new SynchronizationManager(
|
|
14161
|
+
this.synchronizationManager = new SynchronizationManager(
|
|
14162
|
+
this.storage,
|
|
14163
|
+
// as we refactor, we're secretly making all the IStorage implementations also implement IDocumentStorage
|
|
14164
|
+
this.storage,
|
|
14165
|
+
this.cache,
|
|
14166
|
+
this.documentModelModules,
|
|
14167
|
+
this.eventEmitter
|
|
14168
|
+
);
|
|
14009
14169
|
}
|
|
14010
14170
|
if (!this.listenerManager) {
|
|
14011
14171
|
const config = {
|
|
@@ -14043,7 +14203,6 @@ export {
|
|
|
14043
14203
|
isFolderNode,
|
|
14044
14204
|
generateNodesCopy,
|
|
14045
14205
|
generateAddNodeAction,
|
|
14046
|
-
DriveNotFoundError,
|
|
14047
14206
|
mergeOperations,
|
|
14048
14207
|
BaseQueueManager,
|
|
14049
14208
|
ReadDriveNotFoundError,
|