@powerhousedao/connect 1.0.4-dev.0 → 1.0.4-dev.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/assets/{app-CYjeY4R-.css → app-CcH3qGge.css} +100 -10
- package/dist/assets/{app-DRGUnpgP.js → app-CrkKw3iM.js} +12 -10
- package/dist/assets/{app-loader-DC8DXqh9.css → app-loader-DGByWxSG.css} +36 -4
- package/dist/assets/{app-loader-q6UCPwaf.js → app-loader-UcJ4Us8H.js} +238 -119
- package/dist/assets/{ccip-BMVv2Emt.js → ccip-By_pxEiI.js} +3 -3
- package/dist/assets/{content-CVGL3ZO5.js → content-Cztr255I.js} +3 -3
- package/dist/assets/{index-CR5B5rEB.js → index-CIzDiBJQ.js} +4 -4
- package/dist/assets/{index-BsB1NVuP.js → index-DL-uHasR.js} +3 -3
- package/dist/assets/{index-Bn8-eDIz.js → index-vxDIqVse.js} +3 -3
- package/dist/assets/{main.DL5ahUYG.js → main.b7W0Jdwz.js} +1 -1
- package/dist/index.html +1 -1
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-6MBHOHAA.js → chunk-4GNNWOQN.js} +1 -1
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-UDKYG6I4.js → chunk-AEE452AO.js} +1 -1
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-IC6B3767.js → chunk-N7NRLUYA.js} +1 -1
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-OYYVE7RP.js → chunk-Q5MVFG2N.js} +1 -1
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-N5UNGAA6.js → chunk-ZZIQ3KGW.js} +229 -110
- package/dist/modules/@powerhousedao/reactor-browser/context/index.js +2 -2
- package/dist/modules/@powerhousedao/reactor-browser/context/read-mode.js +2 -2
- package/dist/modules/@powerhousedao/reactor-browser/hooks/index.js +3 -3
- package/dist/modules/@powerhousedao/reactor-browser/hooks/useDriveActions.js +2 -2
- package/dist/modules/@powerhousedao/reactor-browser/hooks/useDriveActionsWithUiNodes.js +3 -3
- package/dist/modules/@powerhousedao/reactor-browser/index.js +5 -5
- package/dist/modules/@powerhousedao/reactor-browser/reactor.js +2 -2
- package/dist/vite-envs.sh +1 -1
- package/package.json +6 -6
|
@@ -11915,8 +11915,18 @@ var PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
11915
11915
|
this.logger.verbose(`constructor(listener: ${listener.listenerId})`);
|
|
11916
11916
|
}
|
|
11917
11917
|
getStrands(options) {
|
|
11918
|
-
this.logger.verbose(`getStrands
|
|
11919
|
-
return this.manager.getStrands(this.listener.driveId, this.listener.listenerId, options)
|
|
11918
|
+
this.logger.verbose(`[SYNC DEBUG] PullResponderTransmitter.getStrands called for drive: ${this.listener.driveId}, listener: ${this.listener.listenerId}, options: ${JSON.stringify(options || {})}`);
|
|
11919
|
+
return this.manager.getStrands(this.listener.driveId, this.listener.listenerId, options).then((strands) => {
|
|
11920
|
+
this.logger.verbose(`[SYNC DEBUG] PullResponderTransmitter.getStrands returning ${strands.length} strands for drive: ${this.listener.driveId}, listener: ${this.listener.listenerId}`);
|
|
11921
|
+
if (strands.length === 0) {
|
|
11922
|
+
this.logger.verbose(`[SYNC DEBUG] No strands returned for drive: ${this.listener.driveId}, listener: ${this.listener.listenerId}`);
|
|
11923
|
+
} else {
|
|
11924
|
+
for (const strand of strands) {
|
|
11925
|
+
this.logger.verbose(`[SYNC DEBUG] Strand for drive: ${strand.driveId}, document: ${strand.documentId}, scope: ${strand.scope}, operations: ${strand.operations.length}`);
|
|
11926
|
+
}
|
|
11927
|
+
}
|
|
11928
|
+
return strands;
|
|
11929
|
+
});
|
|
11920
11930
|
}
|
|
11921
11931
|
disconnect() {
|
|
11922
11932
|
return Promise.resolve();
|
|
@@ -11955,7 +11965,7 @@ var PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
11955
11965
|
return result.registerPullResponderListener.listenerId;
|
|
11956
11966
|
}
|
|
11957
11967
|
static async pullStrands(driveId, url, listenerId, options) {
|
|
11958
|
-
this.staticLogger.verbose(`pullStrands
|
|
11968
|
+
this.staticLogger.verbose(`[SYNC DEBUG] PullResponderTransmitter.pullStrands called for drive: ${driveId}, url: ${url}, listener: ${listenerId}, options: ${JSON.stringify(options || {})}`);
|
|
11959
11969
|
const result = await requestGraphql(url, gql`
|
|
11960
11970
|
query strands($listenerId: ID!) {
|
|
11961
11971
|
system {
|
|
@@ -11995,18 +12005,25 @@ var PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
11995
12005
|
`, { listenerId });
|
|
11996
12006
|
const error = result.errors?.at(0);
|
|
11997
12007
|
if (error) {
|
|
12008
|
+
this.staticLogger.verbose(`[SYNC DEBUG] Error pulling strands for drive: ${driveId}, listener: ${listenerId}, error: ${JSON.stringify(error)}`);
|
|
11998
12009
|
throw error;
|
|
11999
12010
|
}
|
|
12000
12011
|
if (!result.system) {
|
|
12012
|
+
this.staticLogger.verbose(`[SYNC DEBUG] No system data returned when pulling strands for drive: ${driveId}, listener: ${listenerId}`);
|
|
12001
12013
|
return [];
|
|
12002
12014
|
}
|
|
12003
|
-
|
|
12015
|
+
const strands = result.system.sync.strands.map((s) => ({
|
|
12004
12016
|
...s,
|
|
12005
12017
|
operations: s.operations.map((o) => ({
|
|
12006
12018
|
...o,
|
|
12007
12019
|
input: JSON.parse(o.input)
|
|
12008
12020
|
}))
|
|
12009
12021
|
}));
|
|
12022
|
+
this.staticLogger.verbose(`[SYNC DEBUG] PullResponderTransmitter.pullStrands returning ${strands.length} strands for drive: ${driveId}, listener: ${listenerId}`);
|
|
12023
|
+
if (strands.length > 0) {
|
|
12024
|
+
this.staticLogger.verbose(`[SYNC DEBUG] Strands being returned: ${strands.map((s) => `${s.documentId}:${s.scope}`).join(", ")}`);
|
|
12025
|
+
}
|
|
12026
|
+
return strands;
|
|
12010
12027
|
}
|
|
12011
12028
|
static async acknowledgeStrands(url, listenerId, revisions) {
|
|
12012
12029
|
this.staticLogger.verbose(`acknowledgeStrands(url: ${url}, listener: ${listenerId})`, revisions);
|
|
@@ -12039,74 +12056,123 @@ var PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
12039
12056
|
throw new Error("Error acknowledging strands");
|
|
12040
12057
|
}
|
|
12041
12058
|
}
|
|
12059
|
+
/**
|
|
12060
|
+
* This function will only throw if `onError` throws an error (or there is
|
|
12061
|
+
* an unintentionally unhandled error in the pull loop).
|
|
12062
|
+
*
|
|
12063
|
+
* All other errors are caught, logged, and passed to `onError`.
|
|
12064
|
+
*
|
|
12065
|
+
* Because of this, `onError` _may be called multiple times_.
|
|
12066
|
+
*/
|
|
12042
12067
|
static async executePull(driveId, trigger, onStrandUpdate, onError, onRevisions, onAcknowledge) {
|
|
12043
12068
|
this.staticLogger.verbose(`executePull(driveId: ${driveId}), trigger:`, trigger);
|
|
12069
|
+
this.staticLogger.info(`[SYNC DEBUG] PullResponderTransmitter.executePull starting for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
12070
|
+
const { url } = trigger.data;
|
|
12071
|
+
let strands;
|
|
12072
|
+
let error;
|
|
12044
12073
|
try {
|
|
12045
|
-
|
|
12046
|
-
|
|
12047
|
-
|
|
12048
|
-
|
|
12049
|
-
|
|
12050
|
-
|
|
12051
|
-
|
|
12052
|
-
|
|
12053
|
-
|
|
12054
|
-
|
|
12055
|
-
|
|
12056
|
-
|
|
12057
|
-
|
|
12058
|
-
|
|
12059
|
-
|
|
12060
|
-
|
|
12061
|
-
let error = void 0;
|
|
12062
|
-
try {
|
|
12063
|
-
const result = await onStrandUpdate(strand, {
|
|
12064
|
-
type: "trigger",
|
|
12065
|
-
trigger
|
|
12066
|
-
});
|
|
12067
|
-
if (result.error) {
|
|
12068
|
-
throw result.error;
|
|
12074
|
+
strands = await _PullResponderTransmitter.pullStrands(driveId, url, trigger.data.listenerId);
|
|
12075
|
+
} catch (e) {
|
|
12076
|
+
error = e;
|
|
12077
|
+
const errors = error.response.errors;
|
|
12078
|
+
for (const error2 of errors) {
|
|
12079
|
+
if (error2.message === "Listener not found") {
|
|
12080
|
+
this.staticLogger.verbose(`[SYNC DEBUG] Auto-registering pull responder for drive: ${driveId}`);
|
|
12081
|
+
const listenerId = await _PullResponderTransmitter.registerPullResponder(trigger.driveId, url, trigger.filter);
|
|
12082
|
+
trigger.data.listenerId = listenerId;
|
|
12083
|
+
try {
|
|
12084
|
+
strands = await _PullResponderTransmitter.pullStrands(driveId, url, listenerId);
|
|
12085
|
+
this.staticLogger.verbose(`Successfully auto-registerd and pulled strands for drive: ${driveId}, listenerId: ${listenerId}`);
|
|
12086
|
+
} catch (error3) {
|
|
12087
|
+
this.staticLogger.error(`Could not resolve 'Listener not found' error by registering a new pull responder for drive: ${driveId}, listenerId: ${listenerId}: ${error3}`);
|
|
12088
|
+
onError(error3);
|
|
12089
|
+
return;
|
|
12069
12090
|
}
|
|
12070
|
-
|
|
12071
|
-
|
|
12072
|
-
onError(error);
|
|
12073
|
-
}
|
|
12074
|
-
listenerRevisions.push({
|
|
12075
|
-
branch: strand.branch,
|
|
12076
|
-
documentId: strand.documentId || "",
|
|
12077
|
-
driveId: strand.driveId,
|
|
12078
|
-
revision: operations.pop()?.index ?? -1,
|
|
12079
|
-
scope: strand.scope,
|
|
12080
|
-
status: error ? error instanceof OperationError ? error.status : "ERROR" : "SUCCESS",
|
|
12081
|
-
error
|
|
12082
|
-
});
|
|
12091
|
+
break;
|
|
12092
|
+
}
|
|
12083
12093
|
}
|
|
12084
|
-
|
|
12085
|
-
|
|
12086
|
-
this.staticLogger.
|
|
12087
|
-
|
|
12094
|
+
}
|
|
12095
|
+
if (!strands) {
|
|
12096
|
+
this.staticLogger.error(`Error pulling strands for drive, and could not auto-register: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error}`);
|
|
12097
|
+
onError(error);
|
|
12098
|
+
return;
|
|
12099
|
+
}
|
|
12100
|
+
if (!strands.length) {
|
|
12101
|
+
this.staticLogger.verbose(`[SYNC DEBUG] No strands returned in pull cycle for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
12088
12102
|
try {
|
|
12089
|
-
|
|
12090
|
-
|
|
12091
|
-
|
|
12092
|
-
|
|
12093
|
-
success = true;
|
|
12094
|
-
} catch (error) {
|
|
12095
|
-
this.staticLogger.error("ACK error", error);
|
|
12103
|
+
onRevisions?.([]);
|
|
12104
|
+
} catch (error2) {
|
|
12105
|
+
this.staticLogger.error(`Error calling onRevisions for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error2}`);
|
|
12106
|
+
onError(error2);
|
|
12096
12107
|
}
|
|
12097
|
-
|
|
12098
|
-
|
|
12099
|
-
|
|
12100
|
-
|
|
12108
|
+
return;
|
|
12109
|
+
}
|
|
12110
|
+
this.staticLogger.verbose(`[SYNC DEBUG] Processing ${strands.length} strands in pull cycle for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
12111
|
+
const listenerRevisions = [];
|
|
12112
|
+
for (const strand of strands) {
|
|
12113
|
+
const operations = strand.operations.map((op) => ({
|
|
12114
|
+
...op,
|
|
12115
|
+
scope: strand.scope,
|
|
12116
|
+
branch: strand.branch
|
|
12117
|
+
}));
|
|
12118
|
+
this.staticLogger.verbose(`[SYNC DEBUG] Processing strand for drive: ${strand.driveId}, document: ${strand.documentId}, scope: ${strand.scope}, with ${operations.length} operations`);
|
|
12119
|
+
let error2 = void 0;
|
|
12120
|
+
try {
|
|
12121
|
+
const result = await onStrandUpdate(strand, {
|
|
12122
|
+
type: "trigger",
|
|
12123
|
+
trigger
|
|
12124
|
+
});
|
|
12125
|
+
if (result.error) {
|
|
12126
|
+
throw result.error;
|
|
12127
|
+
}
|
|
12128
|
+
} catch (e) {
|
|
12129
|
+
this.staticLogger.error(`Error processing strand for drive: ${strand.driveId}, document: ${strand.documentId}, scope: ${strand.scope}, with ${operations.length} operations: ${e}`);
|
|
12130
|
+
error2 = e;
|
|
12131
|
+
onError(error2);
|
|
12101
12132
|
}
|
|
12133
|
+
listenerRevisions.push({
|
|
12134
|
+
branch: strand.branch,
|
|
12135
|
+
documentId: strand.documentId || "",
|
|
12136
|
+
driveId: strand.driveId,
|
|
12137
|
+
revision: operations.pop()?.index ?? -1,
|
|
12138
|
+
scope: strand.scope,
|
|
12139
|
+
status: error2 ? error2 instanceof OperationError ? error2.status : "ERROR" : "SUCCESS",
|
|
12140
|
+
error: error2
|
|
12141
|
+
});
|
|
12142
|
+
}
|
|
12143
|
+
this.staticLogger.verbose("Processed strands...");
|
|
12144
|
+
try {
|
|
12145
|
+
onRevisions?.(listenerRevisions);
|
|
12146
|
+
} catch (error2) {
|
|
12147
|
+
this.staticLogger.error(`Error calling onRevisions for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error2}`);
|
|
12148
|
+
onError(error2);
|
|
12149
|
+
}
|
|
12150
|
+
this.staticLogger.verbose(`[SYNC DEBUG] Acknowledging ${listenerRevisions.length} strands for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
12151
|
+
let success = false;
|
|
12152
|
+
try {
|
|
12153
|
+
await _PullResponderTransmitter.acknowledgeStrands(url, trigger.data.listenerId, listenerRevisions.map((revision) => {
|
|
12154
|
+
const { error: error2, ...rest } = revision;
|
|
12155
|
+
return rest;
|
|
12156
|
+
}));
|
|
12157
|
+
success = true;
|
|
12158
|
+
} catch (error2) {
|
|
12159
|
+
this.staticLogger.error(`Error acknowledging strands for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error2}`);
|
|
12160
|
+
onError(error2);
|
|
12161
|
+
}
|
|
12162
|
+
if (success) {
|
|
12163
|
+
this.staticLogger.verbose(`[SYNC DEBUG] Successfully acknowledged strands for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
12164
|
+
} else {
|
|
12165
|
+
this.staticLogger.error("Failed to acknowledge strands");
|
|
12166
|
+
}
|
|
12167
|
+
try {
|
|
12102
12168
|
onAcknowledge?.(success);
|
|
12103
|
-
} catch (
|
|
12104
|
-
this.staticLogger.error(
|
|
12105
|
-
onError(
|
|
12169
|
+
} catch (error2) {
|
|
12170
|
+
this.staticLogger.error(`Error calling onAcknowledge for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error2}`);
|
|
12171
|
+
onError(error2);
|
|
12106
12172
|
}
|
|
12107
12173
|
}
|
|
12108
12174
|
static setupPull(driveId, trigger, onStrandUpdate, onError, onRevisions, onAcknowledge) {
|
|
12109
|
-
this.staticLogger.verbose(`setupPull
|
|
12175
|
+
this.staticLogger.verbose(`[SYNC DEBUG] PullResponderTransmitter.setupPull initiated for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
12110
12176
|
const { interval } = trigger.data;
|
|
12111
12177
|
let loopInterval = PULL_DRIVE_INTERVAL;
|
|
12112
12178
|
if (interval) {
|
|
@@ -12118,20 +12184,25 @@ var PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
12118
12184
|
} catch {
|
|
12119
12185
|
}
|
|
12120
12186
|
}
|
|
12187
|
+
this.staticLogger.verbose(`[SYNC DEBUG] Pull interval set to ${loopInterval}ms for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
12121
12188
|
let isCancelled = false;
|
|
12122
12189
|
let timeout;
|
|
12123
12190
|
const executeLoop = async () => {
|
|
12124
12191
|
while (!isCancelled) {
|
|
12125
|
-
this.staticLogger.verbose(
|
|
12192
|
+
this.staticLogger.verbose(`[SYNC DEBUG] Starting pull cycle for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
12126
12193
|
await this.executePull(driveId, trigger, onStrandUpdate, onError, onRevisions, onAcknowledge);
|
|
12194
|
+
this.staticLogger.verbose(`[SYNC DEBUG] Completed pull cycle for drive: ${driveId}, listenerId: ${trigger.data.listenerId}, waiting ${loopInterval}ms for next cycle`);
|
|
12127
12195
|
await new Promise((resolve) => {
|
|
12128
12196
|
this.staticLogger.verbose(`Scheduling next pull in ${loopInterval} ms`);
|
|
12129
12197
|
timeout = setTimeout(resolve, loopInterval);
|
|
12130
12198
|
});
|
|
12131
12199
|
}
|
|
12132
12200
|
};
|
|
12133
|
-
executeLoop().catch(
|
|
12201
|
+
executeLoop().catch((error) => {
|
|
12202
|
+
this.staticLogger.error(`Error in executeLoop for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error}`);
|
|
12203
|
+
});
|
|
12134
12204
|
return () => {
|
|
12205
|
+
this.staticLogger.verbose(`[SYNC DEBUG] Cancelling pull loop for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
12135
12206
|
isCancelled = true;
|
|
12136
12207
|
if (timeout !== void 0) {
|
|
12137
12208
|
clearTimeout(timeout);
|
|
@@ -12141,15 +12212,18 @@ var PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
12141
12212
|
static async createPullResponderTrigger(driveId, url, options) {
|
|
12142
12213
|
this.staticLogger.verbose(`createPullResponderTrigger(drive: ${driveId}, url: ${url})`);
|
|
12143
12214
|
const { pullFilter, pullInterval } = options;
|
|
12144
|
-
const
|
|
12215
|
+
const filter = pullFilter ?? {
|
|
12145
12216
|
documentId: ["*"],
|
|
12146
12217
|
documentType: ["*"],
|
|
12147
12218
|
branch: ["*"],
|
|
12148
12219
|
scope: ["*"]
|
|
12149
|
-
}
|
|
12220
|
+
};
|
|
12221
|
+
const listenerId = await _PullResponderTransmitter.registerPullResponder(driveId, url, filter);
|
|
12150
12222
|
const pullTrigger = {
|
|
12151
12223
|
id: generateUUID(),
|
|
12152
12224
|
type: "PullResponder",
|
|
12225
|
+
driveId,
|
|
12226
|
+
filter,
|
|
12153
12227
|
data: {
|
|
12154
12228
|
url,
|
|
12155
12229
|
listenerId,
|
|
@@ -12473,14 +12547,19 @@ var BaseDocumentDriveServer = class {
|
|
|
12473
12547
|
}
|
|
12474
12548
|
async _initializeDrive(driveId) {
|
|
12475
12549
|
const drive = await this.getDrive(driveId);
|
|
12550
|
+
logger.verbose(`[SYNC DEBUG] Initializing drive ${driveId} with slug "${drive.state.global.slug}"`);
|
|
12476
12551
|
await this.synchronizationManager.initializeDriveSyncStatus(driveId, drive);
|
|
12477
12552
|
if (this.shouldSyncRemoteDrive(drive)) {
|
|
12553
|
+
logger.verbose(`[SYNC DEBUG] Starting sync for remote drive ${driveId}`);
|
|
12478
12554
|
await this.startSyncRemoteDrive(driveId);
|
|
12479
12555
|
}
|
|
12556
|
+
logger.verbose(`[SYNC DEBUG] Processing ${drive.state.local.listeners.length} listeners for drive ${driveId}`);
|
|
12480
12557
|
for (const zodListener of drive.state.local.listeners) {
|
|
12481
12558
|
if (zodListener.callInfo?.transmitterType === "SwitchboardPush") {
|
|
12559
|
+
logger.verbose(`[SYNC DEBUG] Setting up SwitchboardPush listener ${zodListener.listenerId} for drive ${driveId}`);
|
|
12482
12560
|
const transmitter = new SwitchboardPushTransmitter(zodListener.callInfo?.data ?? "");
|
|
12483
|
-
|
|
12561
|
+
logger.verbose(`[SYNC DEBUG] Created SwitchboardPush transmitter with URL: ${zodListener.callInfo?.data || "none"}`);
|
|
12562
|
+
await this.listenerManager.setListener(driveId, {
|
|
12484
12563
|
block: zodListener.block,
|
|
12485
12564
|
driveId: drive.state.global.id,
|
|
12486
12565
|
filter: {
|
|
@@ -12494,7 +12573,11 @@ var BaseDocumentDriveServer = class {
|
|
|
12494
12573
|
system: zodListener.system,
|
|
12495
12574
|
label: zodListener.label ?? "",
|
|
12496
12575
|
transmitter
|
|
12576
|
+
}).then(() => {
|
|
12577
|
+
logger.verbose(`[SYNC DEBUG] Successfully set up listener ${zodListener.listenerId} for drive ${driveId}`);
|
|
12497
12578
|
});
|
|
12579
|
+
} else {
|
|
12580
|
+
logger.error(`Skipping listener ${zodListener.listenerId} with unsupported type ${zodListener.callInfo?.transmitterType || "unknown"}`);
|
|
12498
12581
|
}
|
|
12499
12582
|
}
|
|
12500
12583
|
}
|
|
@@ -13602,57 +13685,78 @@ var ListenerManager = class _ListenerManager {
|
|
|
13602
13685
|
}
|
|
13603
13686
|
}
|
|
13604
13687
|
async getStrands(driveId, listenerId, options) {
|
|
13605
|
-
|
|
13688
|
+
this.logger.verbose(`[SYNC DEBUG] ListenerManager.getStrands called for drive: ${driveId}, listener: ${listenerId}, options: ${JSON.stringify(options || {})}`);
|
|
13689
|
+
let listenerState;
|
|
13690
|
+
try {
|
|
13691
|
+
listenerState = this.getListenerState(driveId, listenerId);
|
|
13692
|
+
this.logger.verbose(`[SYNC DEBUG] Found listener state for drive: ${driveId}, listener: ${listenerId}, status: ${listenerState.listenerStatus}`);
|
|
13693
|
+
} catch (error) {
|
|
13694
|
+
this.logger.error(`[SYNC DEBUG] Failed to find listener state for drive: ${driveId}, listener: ${listenerId}. Error: ${error}`);
|
|
13695
|
+
throw error;
|
|
13696
|
+
}
|
|
13606
13697
|
const strands = [];
|
|
13607
|
-
|
|
13608
|
-
|
|
13609
|
-
|
|
13610
|
-
|
|
13611
|
-
|
|
13612
|
-
|
|
13613
|
-
|
|
13614
|
-
|
|
13615
|
-
|
|
13616
|
-
|
|
13617
|
-
|
|
13618
|
-
|
|
13619
|
-
|
|
13620
|
-
|
|
13621
|
-
|
|
13622
|
-
|
|
13623
|
-
|
|
13624
|
-
|
|
13625
|
-
|
|
13626
|
-
|
|
13627
|
-
{
|
|
13628
|
-
|
|
13629
|
-
|
|
13630
|
-
|
|
13698
|
+
try {
|
|
13699
|
+
const syncUnits = await this.getListenerSyncUnits(driveId, listenerId);
|
|
13700
|
+
this.logger.verbose(`[SYNC DEBUG] Retrieved ${syncUnits.length} sync units for drive: ${driveId}, listener: ${listenerId}`);
|
|
13701
|
+
const limit = options?.limit;
|
|
13702
|
+
let operationsCount = 0;
|
|
13703
|
+
const tasks = syncUnits.map((syncUnit) => async () => {
|
|
13704
|
+
if (limit && operationsCount >= limit) {
|
|
13705
|
+
return;
|
|
13706
|
+
}
|
|
13707
|
+
if (syncUnit.revision < 0) {
|
|
13708
|
+
this.logger.verbose(`[SYNC DEBUG] Skipping sync unit with negative revision: ${syncUnit.syncId}, revision: ${syncUnit.revision}`);
|
|
13709
|
+
return;
|
|
13710
|
+
}
|
|
13711
|
+
const entry = listenerState.syncUnits.get(syncUnit.syncId);
|
|
13712
|
+
if (entry && entry.listenerRev >= syncUnit.revision) {
|
|
13713
|
+
this.logger.verbose(`[SYNC DEBUG] Skipping sync unit - listener already up to date: ${syncUnit.syncId}, listenerRev: ${entry.listenerRev}, revision: ${syncUnit.revision}`);
|
|
13714
|
+
return;
|
|
13715
|
+
}
|
|
13716
|
+
const { documentId, scope, branch } = syncUnit;
|
|
13717
|
+
try {
|
|
13718
|
+
this.logger.verbose(`[SYNC DEBUG] Getting operations for syncUnit: ${syncUnit.syncId}, documentId: ${documentId}, scope: ${scope}, branch: ${branch}`);
|
|
13719
|
+
const operations = await this.syncManager.getOperationData(
|
|
13720
|
+
// DEAL WITH INVALID SYNC ID ERROR
|
|
13721
|
+
driveId,
|
|
13722
|
+
syncUnit.syncId,
|
|
13723
|
+
{
|
|
13724
|
+
since: options?.since,
|
|
13725
|
+
fromRevision: options?.fromRevision ?? entry?.listenerRev,
|
|
13726
|
+
limit: limit ? limit - operationsCount : void 0
|
|
13727
|
+
}
|
|
13728
|
+
);
|
|
13729
|
+
this.logger.verbose(`[SYNC DEBUG] Retrieved ${operations.length} operations for syncUnit: ${syncUnit.syncId}`);
|
|
13730
|
+
if (!operations.length) {
|
|
13731
|
+
return;
|
|
13631
13732
|
}
|
|
13632
|
-
|
|
13633
|
-
|
|
13733
|
+
operationsCount += operations.length;
|
|
13734
|
+
strands.push({
|
|
13735
|
+
driveId,
|
|
13736
|
+
documentId,
|
|
13737
|
+
scope,
|
|
13738
|
+
branch,
|
|
13739
|
+
operations
|
|
13740
|
+
});
|
|
13741
|
+
this.logger.verbose(`[SYNC DEBUG] Added strand with ${operations.length} operations for syncUnit: ${syncUnit.syncId}`);
|
|
13742
|
+
} catch (error) {
|
|
13743
|
+
this.logger.error(`Error getting operations for syncUnit: ${syncUnit.syncId}, error: ${error}`);
|
|
13634
13744
|
return;
|
|
13635
13745
|
}
|
|
13636
|
-
|
|
13637
|
-
|
|
13638
|
-
|
|
13639
|
-
|
|
13640
|
-
|
|
13641
|
-
|
|
13642
|
-
|
|
13643
|
-
});
|
|
13644
|
-
|
|
13645
|
-
this.logger.error(error);
|
|
13646
|
-
return;
|
|
13647
|
-
}
|
|
13648
|
-
});
|
|
13649
|
-
if (this.options.sequentialUpdates) {
|
|
13650
|
-
for (const task of tasks) {
|
|
13651
|
-
await task();
|
|
13746
|
+
});
|
|
13747
|
+
if (this.options.sequentialUpdates) {
|
|
13748
|
+
this.logger.verbose(`[SYNC DEBUG] Processing ${tasks.length} sync units sequentially`);
|
|
13749
|
+
for (const task of tasks) {
|
|
13750
|
+
await task();
|
|
13751
|
+
}
|
|
13752
|
+
} else {
|
|
13753
|
+
this.logger.verbose(`[SYNC DEBUG] Processing ${tasks.length} sync units in parallel`);
|
|
13754
|
+
await Promise.all(tasks.map((task) => task()));
|
|
13652
13755
|
}
|
|
13653
|
-
}
|
|
13654
|
-
|
|
13756
|
+
} catch (error) {
|
|
13757
|
+
this.logger.error(`Error in getStrands: ${error}`);
|
|
13655
13758
|
}
|
|
13759
|
+
this.logger.verbose(`ListenerManager.getStrands returning ${strands.length} strands for drive: ${driveId}, listener: ${listenerId}`);
|
|
13656
13760
|
return strands;
|
|
13657
13761
|
}
|
|
13658
13762
|
getListenerState(driveId, listenerId) {
|
|
@@ -13717,11 +13821,13 @@ var SynchronizationManager = class {
|
|
|
13717
13821
|
}
|
|
13718
13822
|
async getSynchronizationUnits(driveId, documentId, scope, branch, documentType2) {
|
|
13719
13823
|
const synchronizationUnitsQuery = await this.getSynchronizationUnitsIds(driveId, documentId, scope, branch, documentType2);
|
|
13824
|
+
this.logger.verbose(`getSynchronizationUnits query: ${JSON.stringify(synchronizationUnitsQuery)}`);
|
|
13720
13825
|
return this.getSynchronizationUnitsRevision(driveId, synchronizationUnitsQuery);
|
|
13721
13826
|
}
|
|
13722
13827
|
async getSynchronizationUnitsRevision(driveId, syncUnitsQuery) {
|
|
13723
13828
|
const drive = await this.getDrive(driveId);
|
|
13724
13829
|
const revisions = await this.storage.getSynchronizationUnitsRevision(syncUnitsQuery);
|
|
13830
|
+
this.logger.verbose(`getSynchronizationUnitsRevision: ${JSON.stringify(revisions)}`);
|
|
13725
13831
|
return syncUnitsQuery.map((s) => ({
|
|
13726
13832
|
...s,
|
|
13727
13833
|
lastUpdated: drive.created,
|
|
@@ -13799,14 +13905,27 @@ var SynchronizationManager = class {
|
|
|
13799
13905
|
};
|
|
13800
13906
|
}
|
|
13801
13907
|
async getOperationData(driveId, syncId, filter) {
|
|
13908
|
+
this.logger.verbose(`[SYNC DEBUG] SynchronizationManager.getOperationData called for drive: ${driveId}, syncId: ${syncId}, filter: ${JSON.stringify(filter)}`);
|
|
13802
13909
|
const syncUnit = syncId === "0" ? { documentId: "", scope: "global" } : await this.getSynchronizationUnitIdInfo(driveId, syncId);
|
|
13803
13910
|
if (!syncUnit) {
|
|
13911
|
+
this.logger.error(`SYNC DEBUG] Invalid Sync Id ${syncId} in drive ${driveId}`);
|
|
13804
13912
|
throw new Error(`Invalid Sync Id ${syncId} in drive ${driveId}`);
|
|
13805
13913
|
}
|
|
13914
|
+
this.logger.verbose(`[SYNC DEBUG] Found sync unit: documentId: ${syncUnit.documentId}, scope: ${syncUnit.scope}`);
|
|
13806
13915
|
const document = syncId === "0" ? await this.getDrive(driveId) : await this.getDocument(driveId, syncUnit.documentId);
|
|
13916
|
+
this.logger.verbose(`[SYNC DEBUG] Retrieved document ${syncUnit.documentId} with type: ${document.documentType}`);
|
|
13807
13917
|
const operations = document.operations[syncUnit.scope] ?? [];
|
|
13918
|
+
this.logger.verbose(`[SYNC DEBUG] Found ${operations.length} total operations in scope ${syncUnit.scope}`);
|
|
13808
13919
|
const filteredOperations = operations.filter((operation) => Object.keys(filter).length === 0 || (filter.since === void 0 || isBefore(filter.since, operation.timestamp)) && (filter.fromRevision === void 0 || operation.index > filter.fromRevision));
|
|
13920
|
+
this.logger.verbose(`[SYNC DEBUG] Filtered to ${filteredOperations.length} operations based on filter criteria` + (filter.fromRevision !== void 0 ? ` (fromRevision: ${filter.fromRevision})` : ""));
|
|
13809
13921
|
const limitedOperations = filter.limit ? filteredOperations.slice(0, filter.limit) : filteredOperations;
|
|
13922
|
+
this.logger.verbose(`[SYNC DEBUG] Returning ${limitedOperations.length} operations after applying limit`);
|
|
13923
|
+
if (limitedOperations.length > 0) {
|
|
13924
|
+
const firstOp = limitedOperations[0];
|
|
13925
|
+
const lastOp = limitedOperations[limitedOperations.length - 1];
|
|
13926
|
+
this.logger.verbose(`[SYNC DEBUG] First operation: index=${firstOp.index}, type=${firstOp.type}`);
|
|
13927
|
+
this.logger.verbose(`[SYNC DEBUG] Last operation: index=${lastOp.index}, type=${lastOp.type}`);
|
|
13928
|
+
}
|
|
13810
13929
|
return limitedOperations.map((operation) => ({
|
|
13811
13930
|
hash: operation.hash,
|
|
13812
13931
|
index: operation.index,
|
|
@@ -3,10 +3,10 @@ import {
|
|
|
3
3
|
ReadModeContext,
|
|
4
4
|
ReadModeContextProvider,
|
|
5
5
|
useReadModeContext
|
|
6
|
-
} from "../chunk-
|
|
6
|
+
} from "../chunk-Q5MVFG2N.js";
|
|
7
7
|
import "../chunk-OVGOA2P5.js";
|
|
8
8
|
import "../chunk-P6NJ6IAQ.js";
|
|
9
|
-
import "../chunk-
|
|
9
|
+
import "../chunk-ZZIQ3KGW.js";
|
|
10
10
|
import "../chunk-FQF4YAVC.js";
|
|
11
11
|
import "../chunk-2ESYSVXG.js";
|
|
12
12
|
export {
|
|
@@ -2,10 +2,10 @@ import {
|
|
|
2
2
|
ReadModeContext,
|
|
3
3
|
ReadModeContextProvider,
|
|
4
4
|
useReadModeContext
|
|
5
|
-
} from "../chunk-
|
|
5
|
+
} from "../chunk-Q5MVFG2N.js";
|
|
6
6
|
import "../chunk-OVGOA2P5.js";
|
|
7
7
|
import "../chunk-P6NJ6IAQ.js";
|
|
8
|
-
import "../chunk-
|
|
8
|
+
import "../chunk-ZZIQ3KGW.js";
|
|
9
9
|
import "../chunk-FQF4YAVC.js";
|
|
10
10
|
import "../chunk-2ESYSVXG.js";
|
|
11
11
|
export {
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import "../chunk-DKDARAJS.js";
|
|
2
2
|
import {
|
|
3
3
|
useDriveActionsWithUiNodes
|
|
4
|
-
} from "../chunk-
|
|
4
|
+
} from "../chunk-N7NRLUYA.js";
|
|
5
5
|
import {
|
|
6
6
|
useDriveActions
|
|
7
|
-
} from "../chunk-
|
|
7
|
+
} from "../chunk-AEE452AO.js";
|
|
8
8
|
import {
|
|
9
9
|
DriveContextProvider,
|
|
10
10
|
useDriveContext
|
|
@@ -57,7 +57,7 @@ import {
|
|
|
57
57
|
useUserPermissions
|
|
58
58
|
} from "../chunk-OVGOA2P5.js";
|
|
59
59
|
import "../chunk-P6NJ6IAQ.js";
|
|
60
|
-
import "../chunk-
|
|
60
|
+
import "../chunk-ZZIQ3KGW.js";
|
|
61
61
|
import "../chunk-FQF4YAVC.js";
|
|
62
62
|
import "../chunk-YWKVPJNL.js";
|
|
63
63
|
import "../chunk-2X2M6BYG.js";
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import {
|
|
2
2
|
useDriveActionsWithUiNodes
|
|
3
|
-
} from "../chunk-
|
|
4
|
-
import "../chunk-
|
|
3
|
+
} from "../chunk-N7NRLUYA.js";
|
|
4
|
+
import "../chunk-AEE452AO.js";
|
|
5
5
|
import "../chunk-XBTEGV5M.js";
|
|
6
6
|
import "../chunk-RGIB4DD4.js";
|
|
7
7
|
import "../chunk-7OQWVUC5.js";
|
|
8
8
|
import "../chunk-ZL5IPB6D.js";
|
|
9
|
-
import "../chunk-
|
|
9
|
+
import "../chunk-ZZIQ3KGW.js";
|
|
10
10
|
import "../chunk-FQF4YAVC.js";
|
|
11
11
|
import "../chunk-2ESYSVXG.js";
|
|
12
12
|
export {
|
|
@@ -6,10 +6,10 @@ import "./chunk-LH2T6NLN.js";
|
|
|
6
6
|
import "./chunk-DKDARAJS.js";
|
|
7
7
|
import {
|
|
8
8
|
useDriveActionsWithUiNodes
|
|
9
|
-
} from "./chunk-
|
|
9
|
+
} from "./chunk-N7NRLUYA.js";
|
|
10
10
|
import {
|
|
11
11
|
useDriveActions
|
|
12
|
-
} from "./chunk-
|
|
12
|
+
} from "./chunk-AEE452AO.js";
|
|
13
13
|
import {
|
|
14
14
|
DriveContextProvider,
|
|
15
15
|
useDriveContext
|
|
@@ -65,18 +65,18 @@ import {
|
|
|
65
65
|
import {
|
|
66
66
|
createBrowserDocumentDriveServer,
|
|
67
67
|
getReactorDefaultDrivesConfig
|
|
68
|
-
} from "./chunk-
|
|
68
|
+
} from "./chunk-4GNNWOQN.js";
|
|
69
69
|
import "./chunk-U36SV333.js";
|
|
70
70
|
import {
|
|
71
71
|
ReadModeContext,
|
|
72
72
|
ReadModeContextProvider,
|
|
73
73
|
useReadModeContext
|
|
74
|
-
} from "./chunk-
|
|
74
|
+
} from "./chunk-Q5MVFG2N.js";
|
|
75
75
|
import {
|
|
76
76
|
useUserPermissions
|
|
77
77
|
} from "./chunk-OVGOA2P5.js";
|
|
78
78
|
import "./chunk-P6NJ6IAQ.js";
|
|
79
|
-
import "./chunk-
|
|
79
|
+
import "./chunk-ZZIQ3KGW.js";
|
|
80
80
|
import "./chunk-FQF4YAVC.js";
|
|
81
81
|
import "./chunk-YWKVPJNL.js";
|
|
82
82
|
import "./chunk-2X2M6BYG.js";
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import {
|
|
2
2
|
createBrowserDocumentDriveServer,
|
|
3
3
|
getReactorDefaultDrivesConfig
|
|
4
|
-
} from "./chunk-
|
|
5
|
-
import "./chunk-
|
|
4
|
+
} from "./chunk-4GNNWOQN.js";
|
|
5
|
+
import "./chunk-ZZIQ3KGW.js";
|
|
6
6
|
import "./chunk-FQF4YAVC.js";
|
|
7
7
|
import "./chunk-2ESYSVXG.js";
|
|
8
8
|
export {
|