@powerhousedao/connect 1.0.9-dev.0 → 1.0.10-dev.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +661 -0
- package/dist/assets/{app-D9QJBr8i.css → app-BIIVKAZr.css} +223 -60
- package/dist/assets/{app-Uu65do7x.js → app-CsiwsM42.js} +2107 -1027
- package/dist/assets/{app-loader-BnxLpX27.js → app-loader-C7A2YjX4.js} +365 -191
- package/dist/assets/{app-loader-Bnp0H-wa.css → app-loader-pcztQTL4.css} +173 -26
- package/dist/assets/{ccip-CpD5P3bc.js → ccip-BG1d6viz.js} +3 -3
- package/dist/assets/{content-DV_bRXbH.js → content-0UFgs2d1.js} +37 -7
- package/dist/assets/{index-B4m3ucR9.js → index-BMDqhr-9.js} +3 -3
- package/dist/assets/{index-Ch_NXi_f.js → index-CTEGX1We.js} +670 -519
- package/dist/assets/{index-yrCJMRn6.js → index-yr0-SqYf.js} +4 -4
- package/dist/assets/{main.CpORyZoE.js → main.BmcV9296.js} +1 -1
- package/dist/assets/{style-D4JhTt_m.css → style-Ce3V83BE.css} +31 -36
- package/dist/hmr.js +1 -1
- package/dist/index.html +1 -1
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-G6LMXRY5.js → chunk-2ONJ2PX4.js} +1 -1
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-OYYVE7RP.js → chunk-3C54663M.js} +1 -1
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-UDKYG6I4.js → chunk-5QJXNK35.js} +1 -1
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-FW7N6EJH.js → chunk-C7QRY43M.js} +3 -3
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-45DCPCA7.js → chunk-CO2RVWYY.js} +1 -1
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-6MBHOHAA.js → chunk-ISDEPHKP.js} +1 -1
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-F3RCGUF6.js → chunk-NHD6VUCD.js} +2 -2
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-N5UNGAA6.js → chunk-SQ5HIKYV.js} +346 -172
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-M2UUQ5LH.js → chunk-U34SEKEB.js} +2 -2
- package/dist/modules/@powerhousedao/reactor-browser/{chunk-IC6B3767.js → chunk-XV42KZK3.js} +1 -1
- package/dist/modules/@powerhousedao/reactor-browser/context/index.js +2 -2
- package/dist/modules/@powerhousedao/reactor-browser/context/read-mode.js +2 -2
- package/dist/modules/@powerhousedao/reactor-browser/hooks/index.js +8 -8
- package/dist/modules/@powerhousedao/reactor-browser/hooks/useAddDebouncedOperations.js +3 -3
- package/dist/modules/@powerhousedao/reactor-browser/hooks/useConnectCrypto.js +2 -2
- package/dist/modules/@powerhousedao/reactor-browser/hooks/useDocumentDrives.js +2 -2
- package/dist/modules/@powerhousedao/reactor-browser/hooks/useDocumentEditor.js +5 -5
- package/dist/modules/@powerhousedao/reactor-browser/hooks/useDriveActions.js +2 -2
- package/dist/modules/@powerhousedao/reactor-browser/hooks/useDriveActionsWithUiNodes.js +3 -3
- package/dist/modules/@powerhousedao/reactor-browser/index.js +10 -10
- package/dist/modules/@powerhousedao/reactor-browser/reactor.js +2 -2
- package/dist/vite-envs.sh +1 -1
- package/package.json +9 -9
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=["assets/app-
|
|
1
|
+
const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=["assets/app-CsiwsM42.js","assets/main.BmcV9296.js","assets/app-BIIVKAZr.css"])))=>i.map(i=>d[i]);
|
|
2
2
|
var __defProp = Object.defineProperty;
|
|
3
3
|
var __typeError = (msg) => {
|
|
4
4
|
throw TypeError(msg);
|
|
@@ -11,7 +11,7 @@ var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot
|
|
|
11
11
|
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), setter ? setter.call(obj, value) : member.set(obj, value), value);
|
|
12
12
|
var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
|
|
13
13
|
var _tags, _levelString, _errorHandler, _ConsoleLogger_instances, levelValue_get, _getDocumentModelModule, _drives, _ReadModeService_instances, parseGraphQLErrors_fn, fetchDrive_fn, _a, _ServiceWorkerManager_instances, handleServiceWorkerMessage_fn, handleServiceWorker_fn;
|
|
14
|
-
import { _ as __vitePreload } from "./main.
|
|
14
|
+
import { _ as __vitePreload } from "./main.BmcV9296.js";
|
|
15
15
|
import { jsx, jsxs } from "react/jsx-runtime";
|
|
16
16
|
import { useState, useEffect, Suspense, lazy } from "react";
|
|
17
17
|
function _mergeNamespaces(n, m) {
|
|
@@ -22517,7 +22517,14 @@ class DefaultDrivesManager {
|
|
|
22517
22517
|
}
|
|
22518
22518
|
const PULL_DRIVE_INTERVAL = 1500;
|
|
22519
22519
|
const MAX_REVISIONS_PER_ACK = 100;
|
|
22520
|
-
|
|
22520
|
+
let _staticLogger;
|
|
22521
|
+
const staticLogger = () => {
|
|
22522
|
+
if (!_staticLogger) {
|
|
22523
|
+
_staticLogger = childLogger(["PullResponderTransmitter", "static"]);
|
|
22524
|
+
}
|
|
22525
|
+
return _staticLogger;
|
|
22526
|
+
};
|
|
22527
|
+
class PullResponderTransmitter {
|
|
22521
22528
|
constructor(listener, manager) {
|
|
22522
22529
|
__publicField(this, "logger", childLogger([
|
|
22523
22530
|
"PullResponderTransmitter",
|
|
@@ -22530,8 +22537,18 @@ const _PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
22530
22537
|
this.logger.verbose(`constructor(listener: ${listener.listenerId})`);
|
|
22531
22538
|
}
|
|
22532
22539
|
getStrands(options) {
|
|
22533
|
-
this.logger.verbose(`getStrands
|
|
22534
|
-
return this.manager.getStrands(this.listener.driveId, this.listener.listenerId, options)
|
|
22540
|
+
this.logger.verbose(`[SYNC DEBUG] PullResponderTransmitter.getStrands called for drive: ${this.listener.driveId}, listener: ${this.listener.listenerId}, options: ${JSON.stringify(options || {})}`);
|
|
22541
|
+
return this.manager.getStrands(this.listener.driveId, this.listener.listenerId, options).then((strands) => {
|
|
22542
|
+
this.logger.verbose(`[SYNC DEBUG] PullResponderTransmitter.getStrands returning ${strands.length} strands for drive: ${this.listener.driveId}, listener: ${this.listener.listenerId}`);
|
|
22543
|
+
if (strands.length === 0) {
|
|
22544
|
+
this.logger.verbose(`[SYNC DEBUG] No strands returned for drive: ${this.listener.driveId}, listener: ${this.listener.listenerId}`);
|
|
22545
|
+
} else {
|
|
22546
|
+
for (const strand of strands) {
|
|
22547
|
+
this.logger.verbose(`[SYNC DEBUG] Strand for drive: ${strand.driveId}, document: ${strand.documentId}, scope: ${strand.scope}, operations: ${strand.operations.length}`);
|
|
22548
|
+
}
|
|
22549
|
+
}
|
|
22550
|
+
return strands;
|
|
22551
|
+
});
|
|
22535
22552
|
}
|
|
22536
22553
|
disconnect() {
|
|
22537
22554
|
return Promise.resolve();
|
|
@@ -22551,16 +22568,22 @@ const _PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
22551
22568
|
}
|
|
22552
22569
|
return success;
|
|
22553
22570
|
}
|
|
22554
|
-
static async registerPullResponder(driveId, url, filter) {
|
|
22571
|
+
static async registerPullResponder(driveId, url, filter, listenerId) {
|
|
22555
22572
|
var _a2;
|
|
22556
|
-
|
|
22573
|
+
staticLogger().verbose(`registerPullResponder(url: ${url})`, filter);
|
|
22557
22574
|
const result = await requestGraphql(url, gql`
|
|
22558
|
-
mutation registerPullResponderListener(
|
|
22559
|
-
|
|
22575
|
+
mutation registerPullResponderListener(
|
|
22576
|
+
$filter: InputListenerFilter!
|
|
22577
|
+
$listenerId: String
|
|
22578
|
+
) {
|
|
22579
|
+
registerPullResponderListener(
|
|
22580
|
+
filter: $filter
|
|
22581
|
+
listenerId: $listenerId
|
|
22582
|
+
) {
|
|
22560
22583
|
listenerId
|
|
22561
22584
|
}
|
|
22562
22585
|
}
|
|
22563
|
-
`, { filter });
|
|
22586
|
+
`, { filter, listenerId });
|
|
22564
22587
|
const error = (_a2 = result.errors) == null ? void 0 : _a2.at(0);
|
|
22565
22588
|
if (error) {
|
|
22566
22589
|
throw error;
|
|
@@ -22572,7 +22595,7 @@ const _PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
22572
22595
|
}
|
|
22573
22596
|
static async pullStrands(driveId, url, listenerId, options) {
|
|
22574
22597
|
var _a2;
|
|
22575
|
-
|
|
22598
|
+
staticLogger().verbose(`[SYNC DEBUG] PullResponderTransmitter.pullStrands called for drive: ${driveId}, url: ${url}, listener: ${listenerId}, options: ${JSON.stringify(options || {})}`);
|
|
22576
22599
|
const result = await requestGraphql(url, gql`
|
|
22577
22600
|
query strands($listenerId: ID!) {
|
|
22578
22601
|
system {
|
|
@@ -22612,27 +22635,34 @@ const _PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
22612
22635
|
`, { listenerId });
|
|
22613
22636
|
const error = (_a2 = result.errors) == null ? void 0 : _a2.at(0);
|
|
22614
22637
|
if (error) {
|
|
22638
|
+
staticLogger().verbose(`[SYNC DEBUG] Error pulling strands for drive: ${driveId}, listener: ${listenerId}, error: ${JSON.stringify(error)}`);
|
|
22615
22639
|
throw error;
|
|
22616
22640
|
}
|
|
22617
22641
|
if (!result.system) {
|
|
22642
|
+
staticLogger().verbose(`[SYNC DEBUG] No system data returned when pulling strands for drive: ${driveId}, listener: ${listenerId}`);
|
|
22618
22643
|
return [];
|
|
22619
22644
|
}
|
|
22620
|
-
|
|
22645
|
+
const strands = result.system.sync.strands.map((s) => ({
|
|
22621
22646
|
...s,
|
|
22622
22647
|
operations: s.operations.map((o) => ({
|
|
22623
22648
|
...o,
|
|
22624
22649
|
input: JSON.parse(o.input)
|
|
22625
22650
|
}))
|
|
22626
22651
|
}));
|
|
22652
|
+
staticLogger().verbose(`[SYNC DEBUG] PullResponderTransmitter.pullStrands returning ${strands.length} strands for drive: ${driveId}, listener: ${listenerId}`);
|
|
22653
|
+
if (strands.length > 0) {
|
|
22654
|
+
staticLogger().verbose(`[SYNC DEBUG] Strands being returned: ${strands.map((s) => `${s.documentId}:${s.scope}`).join(", ")}`);
|
|
22655
|
+
}
|
|
22656
|
+
return strands;
|
|
22627
22657
|
}
|
|
22628
22658
|
static async acknowledgeStrands(url, listenerId, revisions) {
|
|
22629
|
-
|
|
22659
|
+
staticLogger().verbose(`acknowledgeStrands(url: ${url}, listener: ${listenerId})`, revisions);
|
|
22630
22660
|
const chunks = [];
|
|
22631
22661
|
for (let i = 0; i < revisions.length; i += MAX_REVISIONS_PER_ACK) {
|
|
22632
22662
|
chunks.push(revisions.slice(i, i + MAX_REVISIONS_PER_ACK));
|
|
22633
22663
|
}
|
|
22634
22664
|
if (chunks.length > 1) {
|
|
22635
|
-
|
|
22665
|
+
staticLogger().verbose(`Breaking strand acknowledgement into ${chunks.length} chunks...`);
|
|
22636
22666
|
}
|
|
22637
22667
|
const results = await Promise.allSettled(chunks.map(async (chunk) => {
|
|
22638
22668
|
var _a2;
|
|
@@ -22657,75 +22687,125 @@ const _PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
22657
22687
|
throw new Error("Error acknowledging strands");
|
|
22658
22688
|
}
|
|
22659
22689
|
}
|
|
22690
|
+
/**
|
|
22691
|
+
* This function will only throw if `onError` throws an error (or there is
|
|
22692
|
+
* an unintentionally unhandled error in the pull loop).
|
|
22693
|
+
*
|
|
22694
|
+
* All other errors are caught, logged, and passed to `onError`.
|
|
22695
|
+
*
|
|
22696
|
+
* Because of this, `onError` _may be called multiple times_.
|
|
22697
|
+
*/
|
|
22660
22698
|
static async executePull(driveId, trigger, onStrandUpdate, onError, onRevisions, onAcknowledge) {
|
|
22661
|
-
var _a2;
|
|
22662
|
-
|
|
22699
|
+
var _a2, _b;
|
|
22700
|
+
staticLogger().verbose(`executePull(driveId: ${driveId}), trigger:`, trigger);
|
|
22701
|
+
staticLogger().info(`[SYNC DEBUG] PullResponderTransmitter.executePull starting for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
22702
|
+
const { url } = trigger.data;
|
|
22703
|
+
let strands;
|
|
22704
|
+
let error;
|
|
22705
|
+
const listenerId = trigger.data.listenerId;
|
|
22663
22706
|
try {
|
|
22664
|
-
|
|
22665
|
-
|
|
22666
|
-
|
|
22667
|
-
|
|
22668
|
-
|
|
22669
|
-
|
|
22670
|
-
|
|
22671
|
-
|
|
22672
|
-
|
|
22673
|
-
|
|
22674
|
-
|
|
22675
|
-
|
|
22676
|
-
|
|
22677
|
-
|
|
22678
|
-
|
|
22679
|
-
|
|
22680
|
-
let error = void 0;
|
|
22681
|
-
try {
|
|
22682
|
-
const result = await onStrandUpdate(strand, {
|
|
22683
|
-
type: "trigger",
|
|
22684
|
-
trigger
|
|
22685
|
-
});
|
|
22686
|
-
if (result.error) {
|
|
22687
|
-
throw result.error;
|
|
22707
|
+
strands = await PullResponderTransmitter.pullStrands(driveId, url, listenerId);
|
|
22708
|
+
} catch (e) {
|
|
22709
|
+
error = e;
|
|
22710
|
+
const graphqlError = error;
|
|
22711
|
+
const errors = ((_a2 = graphqlError.response) == null ? void 0 : _a2.errors) ?? [];
|
|
22712
|
+
for (const err of errors) {
|
|
22713
|
+
if (err.message === "Listener not found") {
|
|
22714
|
+
staticLogger().verbose(`[SYNC DEBUG] Auto-registering pull responder for drive: ${driveId}`);
|
|
22715
|
+
await PullResponderTransmitter.registerPullResponder(trigger.driveId, url, trigger.filter, listenerId);
|
|
22716
|
+
try {
|
|
22717
|
+
strands = await PullResponderTransmitter.pullStrands(driveId, url, listenerId);
|
|
22718
|
+
staticLogger().verbose(`Successfully auto-registered and pulled strands for drive: ${driveId}, listenerId: ${listenerId}`);
|
|
22719
|
+
} catch (error2) {
|
|
22720
|
+
staticLogger().error(`Could not resolve 'Listener not found' error by registering a new pull responder for drive: ${driveId}, listenerId: ${listenerId}: ${error2}`);
|
|
22721
|
+
onError(error2);
|
|
22722
|
+
return;
|
|
22688
22723
|
}
|
|
22689
|
-
|
|
22690
|
-
error = e;
|
|
22691
|
-
onError(error);
|
|
22724
|
+
break;
|
|
22692
22725
|
}
|
|
22693
|
-
listenerRevisions.push({
|
|
22694
|
-
branch: strand.branch,
|
|
22695
|
-
documentId: strand.documentId || "",
|
|
22696
|
-
driveId: strand.driveId,
|
|
22697
|
-
revision: ((_a2 = operations.pop()) == null ? void 0 : _a2.index) ?? -1,
|
|
22698
|
-
scope: strand.scope,
|
|
22699
|
-
status: error ? error instanceof OperationError ? error.status : "ERROR" : "SUCCESS",
|
|
22700
|
-
error
|
|
22701
|
-
});
|
|
22702
22726
|
}
|
|
22703
|
-
|
|
22704
|
-
|
|
22705
|
-
|
|
22706
|
-
|
|
22727
|
+
}
|
|
22728
|
+
if (!strands) {
|
|
22729
|
+
staticLogger().error(`Error pulling strands for drive, and could not auto-register: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error}`);
|
|
22730
|
+
onError(error);
|
|
22731
|
+
return;
|
|
22732
|
+
}
|
|
22733
|
+
if (!strands.length) {
|
|
22734
|
+
staticLogger().verbose(`[SYNC DEBUG] No strands returned in pull cycle for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
22707
22735
|
try {
|
|
22708
|
-
|
|
22709
|
-
|
|
22710
|
-
|
|
22711
|
-
|
|
22712
|
-
success = true;
|
|
22713
|
-
} catch (error) {
|
|
22714
|
-
this.staticLogger.error("ACK error", error);
|
|
22715
|
-
}
|
|
22716
|
-
if (success) {
|
|
22717
|
-
this.staticLogger.verbose("Acknowledged strands successfully.");
|
|
22718
|
-
} else {
|
|
22719
|
-
this.staticLogger.error("Failed to acknowledge strands");
|
|
22736
|
+
onRevisions == null ? void 0 : onRevisions([]);
|
|
22737
|
+
} catch (error2) {
|
|
22738
|
+
staticLogger().error(`Error calling onRevisions for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error2}`);
|
|
22739
|
+
onError(error2);
|
|
22720
22740
|
}
|
|
22741
|
+
return;
|
|
22742
|
+
}
|
|
22743
|
+
staticLogger().verbose(`[SYNC DEBUG] Processing ${strands.length} strands in pull cycle for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
22744
|
+
const listenerRevisions = [];
|
|
22745
|
+
for (const strand of strands) {
|
|
22746
|
+
const operations = strand.operations.map((op) => ({
|
|
22747
|
+
...op,
|
|
22748
|
+
scope: strand.scope,
|
|
22749
|
+
branch: strand.branch
|
|
22750
|
+
}));
|
|
22751
|
+
staticLogger().verbose(`[SYNC DEBUG] Processing strand for drive: ${strand.driveId}, document: ${strand.documentId}, scope: ${strand.scope}, with ${operations.length} operations`);
|
|
22752
|
+
let error2 = void 0;
|
|
22753
|
+
try {
|
|
22754
|
+
const result = await onStrandUpdate(strand, {
|
|
22755
|
+
type: "trigger",
|
|
22756
|
+
trigger
|
|
22757
|
+
});
|
|
22758
|
+
if (result.error) {
|
|
22759
|
+
throw result.error;
|
|
22760
|
+
}
|
|
22761
|
+
} catch (e) {
|
|
22762
|
+
staticLogger().error(`Error processing strand for drive: ${strand.driveId}, document: ${strand.documentId}, scope: ${strand.scope}, with ${operations.length} operations: ${e}`);
|
|
22763
|
+
error2 = e;
|
|
22764
|
+
onError(error2);
|
|
22765
|
+
}
|
|
22766
|
+
listenerRevisions.push({
|
|
22767
|
+
branch: strand.branch,
|
|
22768
|
+
documentId: strand.documentId || "",
|
|
22769
|
+
driveId: strand.driveId,
|
|
22770
|
+
revision: ((_b = operations.pop()) == null ? void 0 : _b.index) ?? -1,
|
|
22771
|
+
scope: strand.scope,
|
|
22772
|
+
status: error2 ? error2 instanceof OperationError ? error2.status : "ERROR" : "SUCCESS",
|
|
22773
|
+
error: error2
|
|
22774
|
+
});
|
|
22775
|
+
}
|
|
22776
|
+
staticLogger().verbose("Processed strands...");
|
|
22777
|
+
try {
|
|
22778
|
+
onRevisions == null ? void 0 : onRevisions(listenerRevisions);
|
|
22779
|
+
} catch (error2) {
|
|
22780
|
+
staticLogger().error(`Error calling onRevisions for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error2}`);
|
|
22781
|
+
onError(error2);
|
|
22782
|
+
}
|
|
22783
|
+
staticLogger().verbose(`[SYNC DEBUG] Acknowledging ${listenerRevisions.length} strands for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
22784
|
+
let success = false;
|
|
22785
|
+
try {
|
|
22786
|
+
await PullResponderTransmitter.acknowledgeStrands(url, trigger.data.listenerId, listenerRevisions.map((revision) => {
|
|
22787
|
+
const { error: error2, ...rest } = revision;
|
|
22788
|
+
return rest;
|
|
22789
|
+
}));
|
|
22790
|
+
success = true;
|
|
22791
|
+
} catch (error2) {
|
|
22792
|
+
staticLogger().error(`Error acknowledging strands for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error2}`);
|
|
22793
|
+
onError(error2);
|
|
22794
|
+
}
|
|
22795
|
+
if (success) {
|
|
22796
|
+
staticLogger().verbose(`[SYNC DEBUG] Successfully acknowledged strands for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
22797
|
+
} else {
|
|
22798
|
+
staticLogger().error("Failed to acknowledge strands");
|
|
22799
|
+
}
|
|
22800
|
+
try {
|
|
22721
22801
|
onAcknowledge == null ? void 0 : onAcknowledge(success);
|
|
22722
|
-
} catch (
|
|
22723
|
-
|
|
22724
|
-
onError(
|
|
22802
|
+
} catch (error2) {
|
|
22803
|
+
staticLogger().error(`Error calling onAcknowledge for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error2}`);
|
|
22804
|
+
onError(error2);
|
|
22725
22805
|
}
|
|
22726
22806
|
}
|
|
22727
22807
|
static setupPull(driveId, trigger, onStrandUpdate, onError, onRevisions, onAcknowledge) {
|
|
22728
|
-
|
|
22808
|
+
staticLogger().verbose(`[SYNC DEBUG] PullResponderTransmitter.setupPull initiated for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
22729
22809
|
const { interval } = trigger.data;
|
|
22730
22810
|
let loopInterval = PULL_DRIVE_INTERVAL;
|
|
22731
22811
|
if (interval) {
|
|
@@ -22737,20 +22817,25 @@ const _PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
22737
22817
|
} catch {
|
|
22738
22818
|
}
|
|
22739
22819
|
}
|
|
22820
|
+
staticLogger().verbose(`[SYNC DEBUG] Pull interval set to ${loopInterval}ms for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
22740
22821
|
let isCancelled = false;
|
|
22741
22822
|
let timeout;
|
|
22742
22823
|
const executeLoop = async () => {
|
|
22743
22824
|
while (!isCancelled) {
|
|
22744
|
-
|
|
22825
|
+
staticLogger().verbose(`[SYNC DEBUG] Starting pull cycle for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
22745
22826
|
await this.executePull(driveId, trigger, onStrandUpdate, onError, onRevisions, onAcknowledge);
|
|
22827
|
+
staticLogger().verbose(`[SYNC DEBUG] Completed pull cycle for drive: ${driveId}, listenerId: ${trigger.data.listenerId}, waiting ${loopInterval}ms for next cycle`);
|
|
22746
22828
|
await new Promise((resolve) => {
|
|
22747
|
-
|
|
22829
|
+
staticLogger().verbose(`Scheduling next pull in ${loopInterval} ms`);
|
|
22748
22830
|
timeout = setTimeout(resolve, loopInterval);
|
|
22749
22831
|
});
|
|
22750
22832
|
}
|
|
22751
22833
|
};
|
|
22752
|
-
executeLoop().catch(
|
|
22834
|
+
executeLoop().catch((error) => {
|
|
22835
|
+
staticLogger().error(`Error in executeLoop for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error}`);
|
|
22836
|
+
});
|
|
22753
22837
|
return () => {
|
|
22838
|
+
staticLogger().verbose(`[SYNC DEBUG] Cancelling pull loop for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
|
|
22754
22839
|
isCancelled = true;
|
|
22755
22840
|
if (timeout !== void 0) {
|
|
22756
22841
|
clearTimeout(timeout);
|
|
@@ -22758,17 +22843,20 @@ const _PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
22758
22843
|
};
|
|
22759
22844
|
}
|
|
22760
22845
|
static async createPullResponderTrigger(driveId, url, options) {
|
|
22761
|
-
|
|
22846
|
+
staticLogger().verbose(`createPullResponderTrigger(drive: ${driveId}, url: ${url})`);
|
|
22762
22847
|
const { pullFilter, pullInterval } = options;
|
|
22763
|
-
const
|
|
22848
|
+
const filter = pullFilter ?? {
|
|
22764
22849
|
documentId: ["*"],
|
|
22765
22850
|
documentType: ["*"],
|
|
22766
22851
|
branch: ["*"],
|
|
22767
22852
|
scope: ["*"]
|
|
22768
|
-
}
|
|
22853
|
+
};
|
|
22854
|
+
const listenerId = await PullResponderTransmitter.registerPullResponder(driveId, url, filter);
|
|
22769
22855
|
const pullTrigger = {
|
|
22770
22856
|
id: generateUUID(),
|
|
22771
22857
|
type: "PullResponder",
|
|
22858
|
+
driveId,
|
|
22859
|
+
filter,
|
|
22772
22860
|
data: {
|
|
22773
22861
|
url,
|
|
22774
22862
|
listenerId,
|
|
@@ -22780,12 +22868,7 @@ const _PullResponderTransmitter = class _PullResponderTransmitter {
|
|
|
22780
22868
|
static isPullResponderTrigger(trigger) {
|
|
22781
22869
|
return trigger.type === "PullResponder";
|
|
22782
22870
|
}
|
|
22783
|
-
}
|
|
22784
|
-
__publicField(_PullResponderTransmitter, "staticLogger", childLogger([
|
|
22785
|
-
"PullResponderTransmitter",
|
|
22786
|
-
"static"
|
|
22787
|
-
]));
|
|
22788
|
-
let PullResponderTransmitter = _PullResponderTransmitter;
|
|
22871
|
+
}
|
|
22789
22872
|
var defaults;
|
|
22790
22873
|
var hasRequiredDefaults;
|
|
22791
22874
|
function requireDefaults() {
|
|
@@ -23002,6 +23085,7 @@ function isAtRevision(document, revisions) {
|
|
|
23002
23085
|
}
|
|
23003
23086
|
class BaseDocumentDriveServer {
|
|
23004
23087
|
constructor(documentModelModules, storage, documentStorage, cache, queueManager, eventEmitter, synchronizationManager, listenerManager, options) {
|
|
23088
|
+
__publicField(this, "logger", childLogger(["BaseDocumentDriveServer"]));
|
|
23005
23089
|
// external dependencies
|
|
23006
23090
|
__publicField(this, "documentModelModules");
|
|
23007
23091
|
__publicField(this, "storage");
|
|
@@ -23072,19 +23156,19 @@ class BaseDocumentDriveServer {
|
|
|
23072
23156
|
async _initialize() {
|
|
23073
23157
|
await this.listenerManager.initialize(this.handleListenerError);
|
|
23074
23158
|
await this.queueManager.init(this.queueDelegate, (error) => {
|
|
23075
|
-
logger
|
|
23159
|
+
this.logger.error(`Error initializing queue manager`, error);
|
|
23076
23160
|
errors.push(error);
|
|
23077
23161
|
});
|
|
23078
23162
|
try {
|
|
23079
23163
|
await this.defaultDrivesManager.removeOldremoteDrives();
|
|
23080
23164
|
} catch (error) {
|
|
23081
|
-
logger
|
|
23165
|
+
this.logger.error(error);
|
|
23082
23166
|
}
|
|
23083
23167
|
const errors = [];
|
|
23084
23168
|
const drives = await this.getDrives();
|
|
23085
23169
|
for (const drive of drives) {
|
|
23086
23170
|
await this._initializeDrive(drive).catch((error) => {
|
|
23087
|
-
logger
|
|
23171
|
+
this.logger.error(`Error initializing drive ${drive}`, error);
|
|
23088
23172
|
errors.push(error);
|
|
23089
23173
|
});
|
|
23090
23174
|
}
|
|
@@ -23114,7 +23198,7 @@ class BaseDocumentDriveServer {
|
|
|
23114
23198
|
return source.type === "local" ? "push" : "pull";
|
|
23115
23199
|
}
|
|
23116
23200
|
handleListenerError(error, driveId, listener) {
|
|
23117
|
-
logger
|
|
23201
|
+
this.logger.error(`Listener ${listener.listener.label ?? listener.listener.listenerId} error:`, error);
|
|
23118
23202
|
const status = error instanceof OperationError ? error.status : "ERROR";
|
|
23119
23203
|
this.synchronizationManager.updateSyncStatus(driveId, { push: status }, error);
|
|
23120
23204
|
}
|
|
@@ -23177,9 +23261,9 @@ class BaseDocumentDriveServer {
|
|
|
23177
23261
|
if (pushListener) {
|
|
23178
23262
|
this.getSynchronizationUnitsRevision(driveId, syncUnits).then((syncUnitRevisions) => {
|
|
23179
23263
|
for (const revision of syncUnitRevisions) {
|
|
23180
|
-
this.listenerManager.updateListenerRevision(pushListener.listenerId, driveId, revision.syncId, revision.revision).catch(logger
|
|
23264
|
+
this.listenerManager.updateListenerRevision(pushListener.listenerId, driveId, revision.syncId, revision.revision).catch(this.logger.error);
|
|
23181
23265
|
}
|
|
23182
|
-
}).catch(logger
|
|
23266
|
+
}).catch(this.logger.error);
|
|
23183
23267
|
}
|
|
23184
23268
|
}
|
|
23185
23269
|
});
|
|
@@ -23200,30 +23284,57 @@ class BaseDocumentDriveServer {
|
|
|
23200
23284
|
return this.triggerMap.delete(driveId);
|
|
23201
23285
|
}
|
|
23202
23286
|
async _initializeDrive(driveId) {
|
|
23203
|
-
var _a2, _b, _c, _d, _e, _f;
|
|
23287
|
+
var _a2, _b, _c, _d, _e, _f, _g, _h, _i;
|
|
23204
23288
|
const drive = await this.getDrive(driveId);
|
|
23289
|
+
this.logger.verbose(`[SYNC DEBUG] Initializing drive ${driveId} with slug "${drive.state.global.slug}"`);
|
|
23205
23290
|
await this.synchronizationManager.initializeDriveSyncStatus(driveId, drive);
|
|
23206
23291
|
if (this.shouldSyncRemoteDrive(drive)) {
|
|
23292
|
+
this.logger.verbose(`[SYNC DEBUG] Starting sync for remote drive ${driveId}`);
|
|
23207
23293
|
await this.startSyncRemoteDrive(driveId);
|
|
23208
23294
|
}
|
|
23295
|
+
this.logger.verbose(`[SYNC DEBUG] Processing ${drive.state.local.listeners.length} listeners for drive ${driveId}`);
|
|
23209
23296
|
for (const zodListener of drive.state.local.listeners) {
|
|
23210
23297
|
if (((_a2 = zodListener.callInfo) == null ? void 0 : _a2.transmitterType) === "SwitchboardPush") {
|
|
23298
|
+
this.logger.verbose(`[SYNC DEBUG] Setting up SwitchboardPush listener ${zodListener.listenerId} for drive ${driveId}`);
|
|
23211
23299
|
const transmitter = new SwitchboardPushTransmitter(((_b = zodListener.callInfo) == null ? void 0 : _b.data) ?? "");
|
|
23212
|
-
this.
|
|
23300
|
+
this.logger.verbose(`[SYNC DEBUG] Created SwitchboardPush transmitter with URL: ${((_c = zodListener.callInfo) == null ? void 0 : _c.data) || "none"}`);
|
|
23301
|
+
await this.listenerManager.setListener(driveId, {
|
|
23213
23302
|
block: zodListener.block,
|
|
23214
23303
|
driveId: drive.state.global.id,
|
|
23215
23304
|
filter: {
|
|
23216
|
-
branch: ((
|
|
23217
|
-
documentId: ((
|
|
23218
|
-
documentType: ((
|
|
23219
|
-
scope: ((
|
|
23305
|
+
branch: ((_d = zodListener.filter) == null ? void 0 : _d.branch) ?? [],
|
|
23306
|
+
documentId: ((_e = zodListener.filter) == null ? void 0 : _e.documentId) ?? [],
|
|
23307
|
+
documentType: ((_f = zodListener.filter) == null ? void 0 : _f.documentType) ?? [],
|
|
23308
|
+
scope: ((_g = zodListener.filter) == null ? void 0 : _g.scope) ?? []
|
|
23220
23309
|
},
|
|
23221
23310
|
listenerId: zodListener.listenerId,
|
|
23222
23311
|
callInfo: zodListener.callInfo,
|
|
23223
23312
|
system: zodListener.system,
|
|
23224
23313
|
label: zodListener.label ?? "",
|
|
23225
23314
|
transmitter
|
|
23315
|
+
}).then(() => {
|
|
23316
|
+
this.logger.verbose(`[SYNC DEBUG] Successfully set up listener ${zodListener.listenerId} for drive ${driveId}`);
|
|
23226
23317
|
});
|
|
23318
|
+
} else if (((_h = zodListener.callInfo) == null ? void 0 : _h.transmitterType) === "PullResponder") {
|
|
23319
|
+
this.logger.verbose(`[SYNC DEBUG] Setting up PullResponder listener ${zodListener.listenerId} for drive ${driveId}`);
|
|
23320
|
+
const pullResponderListener = {
|
|
23321
|
+
driveId,
|
|
23322
|
+
listenerId: zodListener.listenerId,
|
|
23323
|
+
block: false,
|
|
23324
|
+
filter: zodListener.filter,
|
|
23325
|
+
system: false,
|
|
23326
|
+
label: `PullResponder #${zodListener.listenerId}`,
|
|
23327
|
+
callInfo: {
|
|
23328
|
+
data: "",
|
|
23329
|
+
name: "PullResponder",
|
|
23330
|
+
transmitterType: "PullResponder"
|
|
23331
|
+
}
|
|
23332
|
+
};
|
|
23333
|
+
const pullResponder = new PullResponderTransmitter(pullResponderListener, this.listenerManager);
|
|
23334
|
+
pullResponderListener.transmitter = pullResponder;
|
|
23335
|
+
await this.listenerManager.setListener(driveId, pullResponderListener);
|
|
23336
|
+
} else {
|
|
23337
|
+
this.logger.error(`Skipping listener ${zodListener.listenerId} with unsupported type ${((_i = zodListener.callInfo) == null ? void 0 : _i.transmitterType) || "unknown"}`);
|
|
23227
23338
|
}
|
|
23228
23339
|
}
|
|
23229
23340
|
}
|
|
@@ -23322,7 +23433,7 @@ class BaseDocumentDriveServer {
|
|
|
23322
23433
|
}
|
|
23323
23434
|
}
|
|
23324
23435
|
} catch (e) {
|
|
23325
|
-
logger
|
|
23436
|
+
this.logger.error("Error getting drive from cache", e);
|
|
23326
23437
|
}
|
|
23327
23438
|
const driveStorage = document ?? await this.storage.getDrive(driveId);
|
|
23328
23439
|
const result = this._buildDocument(driveStorage, options);
|
|
@@ -23330,7 +23441,7 @@ class BaseDocumentDriveServer {
|
|
|
23330
23441
|
throw new Error(`Document with id ${driveId} is not a Document Drive`);
|
|
23331
23442
|
} else {
|
|
23332
23443
|
if (!(options == null ? void 0 : options.revisions)) {
|
|
23333
|
-
this.cache.setDrive(driveId, result).catch(logger
|
|
23444
|
+
this.cache.setDrive(driveId, result).catch(this.logger.error);
|
|
23334
23445
|
}
|
|
23335
23446
|
return result;
|
|
23336
23447
|
}
|
|
@@ -23342,14 +23453,14 @@ class BaseDocumentDriveServer {
|
|
|
23342
23453
|
return drive;
|
|
23343
23454
|
}
|
|
23344
23455
|
} catch (e) {
|
|
23345
|
-
logger
|
|
23456
|
+
this.logger.error("Error getting drive from cache", e);
|
|
23346
23457
|
}
|
|
23347
23458
|
const driveStorage = await this.storage.getDriveBySlug(slug);
|
|
23348
23459
|
const document = this._buildDocument(driveStorage, options);
|
|
23349
23460
|
if (!isDocumentDrive(document)) {
|
|
23350
23461
|
throw new Error(`Document with slug ${slug} is not a Document Drive`);
|
|
23351
23462
|
} else {
|
|
23352
|
-
this.cache.setDriveBySlug(slug, document).catch(logger
|
|
23463
|
+
this.cache.setDriveBySlug(slug, document).catch(this.logger.error);
|
|
23353
23464
|
return document;
|
|
23354
23465
|
}
|
|
23355
23466
|
}
|
|
@@ -23361,12 +23472,12 @@ class BaseDocumentDriveServer {
|
|
|
23361
23472
|
return cachedDocument;
|
|
23362
23473
|
}
|
|
23363
23474
|
} catch (e) {
|
|
23364
|
-
logger
|
|
23475
|
+
this.logger.error("Error getting document from cache", e);
|
|
23365
23476
|
}
|
|
23366
23477
|
const documentStorage = cachedDocument ?? await this.storage.getDocument(driveId, documentId);
|
|
23367
23478
|
const document = this._buildDocument(documentStorage, options);
|
|
23368
23479
|
if (!(options == null ? void 0 : options.revisions)) {
|
|
23369
|
-
this.cache.setDocument(documentId, document).catch(logger
|
|
23480
|
+
this.cache.setDocument(documentId, document).catch(this.logger.error);
|
|
23370
23481
|
}
|
|
23371
23482
|
return document;
|
|
23372
23483
|
}
|
|
@@ -23421,7 +23532,7 @@ class BaseDocumentDriveServer {
|
|
|
23421
23532
|
}
|
|
23422
23533
|
await this.listenerManager.removeSyncUnits(driveId, syncUnits);
|
|
23423
23534
|
} catch (error) {
|
|
23424
|
-
logger
|
|
23535
|
+
this.logger.warn("Error deleting document", error);
|
|
23425
23536
|
}
|
|
23426
23537
|
await this.cache.deleteDocument(documentId);
|
|
23427
23538
|
return this.storage.deleteDocument(driveId, documentId);
|
|
@@ -23595,33 +23706,35 @@ class BaseDocumentDriveServer {
|
|
|
23595
23706
|
if (result) {
|
|
23596
23707
|
return result;
|
|
23597
23708
|
}
|
|
23709
|
+
let jobId;
|
|
23710
|
+
const promise = new Promise((resolve, reject) => {
|
|
23711
|
+
const unsubscribe = this.queueManager.on("jobCompleted", (job, result2) => {
|
|
23712
|
+
if (job.jobId === jobId) {
|
|
23713
|
+
unsubscribe();
|
|
23714
|
+
unsubscribeError();
|
|
23715
|
+
resolve(result2);
|
|
23716
|
+
}
|
|
23717
|
+
});
|
|
23718
|
+
const unsubscribeError = this.queueManager.on("jobFailed", (job, error) => {
|
|
23719
|
+
if (job.jobId === jobId) {
|
|
23720
|
+
unsubscribe();
|
|
23721
|
+
unsubscribeError();
|
|
23722
|
+
reject(error);
|
|
23723
|
+
}
|
|
23724
|
+
});
|
|
23725
|
+
});
|
|
23598
23726
|
try {
|
|
23599
|
-
|
|
23727
|
+
jobId = await this.queueManager.addJob({
|
|
23600
23728
|
driveId,
|
|
23601
23729
|
documentId,
|
|
23602
23730
|
operations,
|
|
23603
23731
|
options
|
|
23604
23732
|
});
|
|
23605
|
-
return new Promise((resolve, reject) => {
|
|
23606
|
-
const unsubscribe = this.queueManager.on("jobCompleted", (job, result2) => {
|
|
23607
|
-
if (job.jobId === jobId) {
|
|
23608
|
-
unsubscribe();
|
|
23609
|
-
unsubscribeError();
|
|
23610
|
-
resolve(result2);
|
|
23611
|
-
}
|
|
23612
|
-
});
|
|
23613
|
-
const unsubscribeError = this.queueManager.on("jobFailed", (job, error) => {
|
|
23614
|
-
if (job.jobId === jobId) {
|
|
23615
|
-
unsubscribe();
|
|
23616
|
-
unsubscribeError();
|
|
23617
|
-
reject(error);
|
|
23618
|
-
}
|
|
23619
|
-
});
|
|
23620
|
-
});
|
|
23621
23733
|
} catch (error) {
|
|
23622
|
-
logger
|
|
23734
|
+
this.logger.error("Error adding job", error);
|
|
23623
23735
|
throw error;
|
|
23624
23736
|
}
|
|
23737
|
+
return promise;
|
|
23625
23738
|
}
|
|
23626
23739
|
async queueAction(driveId, documentId, action, options) {
|
|
23627
23740
|
return this.queueActions(driveId, documentId, [action], options);
|
|
@@ -23651,7 +23764,7 @@ class BaseDocumentDriveServer {
|
|
|
23651
23764
|
});
|
|
23652
23765
|
});
|
|
23653
23766
|
} catch (error) {
|
|
23654
|
-
logger
|
|
23767
|
+
this.logger.error("Error adding job", error);
|
|
23655
23768
|
throw error;
|
|
23656
23769
|
}
|
|
23657
23770
|
}
|
|
@@ -23682,7 +23795,7 @@ class BaseDocumentDriveServer {
|
|
|
23682
23795
|
});
|
|
23683
23796
|
});
|
|
23684
23797
|
} catch (error) {
|
|
23685
|
-
logger
|
|
23798
|
+
this.logger.error("Error adding drive job", error);
|
|
23686
23799
|
throw error;
|
|
23687
23800
|
}
|
|
23688
23801
|
}
|
|
@@ -23699,7 +23812,7 @@ class BaseDocumentDriveServer {
|
|
|
23699
23812
|
await this._addOperations(driveId, documentId, async (documentStorage) => {
|
|
23700
23813
|
const result2 = await this._processOperations(driveId, documentId, documentStorage, operations);
|
|
23701
23814
|
if (!result2.document) {
|
|
23702
|
-
logger
|
|
23815
|
+
this.logger.error("Invalid document");
|
|
23703
23816
|
throw result2.error ?? new Error("Invalid document");
|
|
23704
23817
|
}
|
|
23705
23818
|
document = result2.document;
|
|
@@ -23713,7 +23826,7 @@ class BaseDocumentDriveServer {
|
|
|
23713
23826
|
};
|
|
23714
23827
|
});
|
|
23715
23828
|
if (document) {
|
|
23716
|
-
this.cache.setDocument(documentId, document).catch(logger
|
|
23829
|
+
this.cache.setDocument(documentId, document).catch(this.logger.error);
|
|
23717
23830
|
}
|
|
23718
23831
|
const { scopes, branches } = operationsApplied.reduce((acc, operation) => {
|
|
23719
23832
|
if (!acc.scopes.includes(operation.scope)) {
|
|
@@ -23746,7 +23859,7 @@ class BaseDocumentDriveServer {
|
|
|
23746
23859
|
});
|
|
23747
23860
|
}
|
|
23748
23861
|
}).catch((error2) => {
|
|
23749
|
-
logger
|
|
23862
|
+
this.logger.error("Non handled error updating sync revision", error2);
|
|
23750
23863
|
this.synchronizationManager.updateSyncStatus(driveId, {
|
|
23751
23864
|
[operationSource]: "ERROR"
|
|
23752
23865
|
}, error2);
|
|
@@ -23848,7 +23961,7 @@ class BaseDocumentDriveServer {
|
|
|
23848
23961
|
});
|
|
23849
23962
|
});
|
|
23850
23963
|
} catch (error) {
|
|
23851
|
-
logger
|
|
23964
|
+
this.logger.error("Error adding drive job", error);
|
|
23852
23965
|
throw error;
|
|
23853
23966
|
}
|
|
23854
23967
|
}
|
|
@@ -23876,7 +23989,7 @@ class BaseDocumentDriveServer {
|
|
|
23876
23989
|
if (!document || !isDocumentDrive(document)) {
|
|
23877
23990
|
throw error ?? new Error("Invalid Document Drive document");
|
|
23878
23991
|
}
|
|
23879
|
-
this.cache.setDrive(driveId, document).catch(logger
|
|
23992
|
+
this.cache.setDrive(driveId, document).catch(this.logger.error);
|
|
23880
23993
|
const lastOperation = operationsApplied.filter((op) => op.scope === "global").slice().pop();
|
|
23881
23994
|
if (lastOperation) {
|
|
23882
23995
|
const newOp = operationsApplied.find((appliedOp) => !operations.find((o) => o.id === appliedOp.id && o.index === appliedOp.index && o.skip === appliedOp.skip && o.hash === appliedOp.hash));
|
|
@@ -23903,7 +24016,7 @@ class BaseDocumentDriveServer {
|
|
|
23903
24016
|
});
|
|
23904
24017
|
}
|
|
23905
24018
|
}).catch((error2) => {
|
|
23906
|
-
logger
|
|
24019
|
+
this.logger.error("Non handled error updating sync revision", error2);
|
|
23907
24020
|
this.synchronizationManager.updateSyncStatus(driveId, {
|
|
23908
24021
|
[operationSource]: "ERROR"
|
|
23909
24022
|
}, error2);
|
|
@@ -24005,9 +24118,26 @@ class BaseDocumentDriveServer {
|
|
|
24005
24118
|
scope: strand.scope,
|
|
24006
24119
|
branch: strand.branch
|
|
24007
24120
|
}));
|
|
24008
|
-
|
|
24009
|
-
|
|
24010
|
-
|
|
24121
|
+
let result;
|
|
24122
|
+
if (strand.documentId) {
|
|
24123
|
+
try {
|
|
24124
|
+
result = await this.queueOperations(strand.driveId, strand.documentId, operations, {
|
|
24125
|
+
source
|
|
24126
|
+
});
|
|
24127
|
+
} catch (error) {
|
|
24128
|
+
this.logger.error("Error queueing operations", error);
|
|
24129
|
+
throw error;
|
|
24130
|
+
}
|
|
24131
|
+
} else {
|
|
24132
|
+
try {
|
|
24133
|
+
result = await this.queueDriveOperations(strand.driveId, operations, {
|
|
24134
|
+
source
|
|
24135
|
+
});
|
|
24136
|
+
} catch (error) {
|
|
24137
|
+
this.logger.error("Error queueing operations", error);
|
|
24138
|
+
throw error;
|
|
24139
|
+
}
|
|
24140
|
+
}
|
|
24011
24141
|
if (result.status === "ERROR") {
|
|
24012
24142
|
const syncUnits = strand.documentId !== "" ? (await this.getSynchronizationUnitsIds(strand.driveId, [strand.documentId], [strand.scope], [strand.branch])).map((s) => s.syncId) : [strand.driveId];
|
|
24013
24143
|
const operationSource = this.getOperationSource(source);
|
|
@@ -24381,11 +24511,10 @@ const _ListenerManager = class _ListenerManager {
|
|
|
24381
24511
|
throw new Error("Maximum retries exhausted.");
|
|
24382
24512
|
}
|
|
24383
24513
|
const listenerUpdates = [];
|
|
24384
|
-
for (const [driveId,
|
|
24385
|
-
for (const [listenerId, listenerState] of
|
|
24514
|
+
for (const [driveId, listenerStateById] of this.listenerStateByDriveId) {
|
|
24515
|
+
for (const [listenerId, listenerState] of listenerStateById) {
|
|
24386
24516
|
const transmitter = listenerState.listener.transmitter;
|
|
24387
24517
|
if (!(transmitter == null ? void 0 : transmitter.transmit)) {
|
|
24388
|
-
this.logger.verbose(`Transmitter not set on listener: ${listenerId}`);
|
|
24389
24518
|
continue;
|
|
24390
24519
|
}
|
|
24391
24520
|
const syncUnits = await this.getListenerSyncUnits(driveId, listenerId);
|
|
@@ -24539,57 +24668,78 @@ const _ListenerManager = class _ListenerManager {
|
|
|
24539
24668
|
}
|
|
24540
24669
|
}
|
|
24541
24670
|
async getStrands(driveId, listenerId, options) {
|
|
24542
|
-
|
|
24671
|
+
this.logger.verbose(`[SYNC DEBUG] ListenerManager.getStrands called for drive: ${driveId}, listener: ${listenerId}, options: ${JSON.stringify(options || {})}`);
|
|
24672
|
+
let listenerState;
|
|
24673
|
+
try {
|
|
24674
|
+
listenerState = this.getListenerState(driveId, listenerId);
|
|
24675
|
+
this.logger.verbose(`[SYNC DEBUG] Found listener state for drive: ${driveId}, listener: ${listenerId}, status: ${listenerState.listenerStatus}`);
|
|
24676
|
+
} catch (error) {
|
|
24677
|
+
this.logger.error(`[SYNC DEBUG] Failed to find listener state for drive: ${driveId}, listener: ${listenerId}. Error: ${error}`);
|
|
24678
|
+
throw error;
|
|
24679
|
+
}
|
|
24543
24680
|
const strands = [];
|
|
24544
|
-
|
|
24545
|
-
|
|
24546
|
-
|
|
24547
|
-
|
|
24548
|
-
|
|
24549
|
-
|
|
24550
|
-
|
|
24551
|
-
|
|
24552
|
-
|
|
24553
|
-
|
|
24554
|
-
|
|
24555
|
-
|
|
24556
|
-
|
|
24557
|
-
|
|
24558
|
-
|
|
24559
|
-
|
|
24560
|
-
|
|
24561
|
-
|
|
24562
|
-
|
|
24563
|
-
|
|
24564
|
-
{
|
|
24565
|
-
|
|
24566
|
-
|
|
24567
|
-
|
|
24681
|
+
try {
|
|
24682
|
+
const syncUnits = await this.getListenerSyncUnits(driveId, listenerId);
|
|
24683
|
+
this.logger.verbose(`[SYNC DEBUG] Retrieved ${syncUnits.length} sync units for drive: ${driveId}, listener: ${listenerId}`);
|
|
24684
|
+
const limit = options == null ? void 0 : options.limit;
|
|
24685
|
+
let operationsCount = 0;
|
|
24686
|
+
const tasks = syncUnits.map((syncUnit) => async () => {
|
|
24687
|
+
if (limit && operationsCount >= limit) {
|
|
24688
|
+
return;
|
|
24689
|
+
}
|
|
24690
|
+
if (syncUnit.revision < 0) {
|
|
24691
|
+
this.logger.verbose(`[SYNC DEBUG] Skipping sync unit with negative revision: ${syncUnit.syncId}, revision: ${syncUnit.revision}`);
|
|
24692
|
+
return;
|
|
24693
|
+
}
|
|
24694
|
+
const entry = listenerState.syncUnits.get(syncUnit.syncId);
|
|
24695
|
+
if (entry && entry.listenerRev >= syncUnit.revision) {
|
|
24696
|
+
this.logger.verbose(`[SYNC DEBUG] Skipping sync unit - listener already up to date: ${syncUnit.syncId}, listenerRev: ${entry.listenerRev}, revision: ${syncUnit.revision}`);
|
|
24697
|
+
return;
|
|
24698
|
+
}
|
|
24699
|
+
const { documentId, scope, branch } = syncUnit;
|
|
24700
|
+
try {
|
|
24701
|
+
this.logger.verbose(`[SYNC DEBUG] Getting operations for syncUnit: ${syncUnit.syncId}, documentId: ${documentId}, scope: ${scope}, branch: ${branch}`);
|
|
24702
|
+
const operations = await this.syncManager.getOperationData(
|
|
24703
|
+
// DEAL WITH INVALID SYNC ID ERROR
|
|
24704
|
+
driveId,
|
|
24705
|
+
syncUnit.syncId,
|
|
24706
|
+
{
|
|
24707
|
+
since: options == null ? void 0 : options.since,
|
|
24708
|
+
fromRevision: (options == null ? void 0 : options.fromRevision) ?? (entry == null ? void 0 : entry.listenerRev),
|
|
24709
|
+
limit: limit ? limit - operationsCount : void 0
|
|
24710
|
+
}
|
|
24711
|
+
);
|
|
24712
|
+
this.logger.verbose(`[SYNC DEBUG] Retrieved ${operations.length} operations for syncUnit: ${syncUnit.syncId}`);
|
|
24713
|
+
if (!operations.length) {
|
|
24714
|
+
return;
|
|
24568
24715
|
}
|
|
24569
|
-
|
|
24570
|
-
|
|
24716
|
+
operationsCount += operations.length;
|
|
24717
|
+
strands.push({
|
|
24718
|
+
driveId,
|
|
24719
|
+
documentId,
|
|
24720
|
+
scope,
|
|
24721
|
+
branch,
|
|
24722
|
+
operations
|
|
24723
|
+
});
|
|
24724
|
+
this.logger.verbose(`[SYNC DEBUG] Added strand with ${operations.length} operations for syncUnit: ${syncUnit.syncId}`);
|
|
24725
|
+
} catch (error) {
|
|
24726
|
+
this.logger.error(`Error getting operations for syncUnit: ${syncUnit.syncId}, error: ${error}`);
|
|
24571
24727
|
return;
|
|
24572
24728
|
}
|
|
24573
|
-
|
|
24574
|
-
|
|
24575
|
-
|
|
24576
|
-
|
|
24577
|
-
|
|
24578
|
-
|
|
24579
|
-
|
|
24580
|
-
});
|
|
24581
|
-
|
|
24582
|
-
this.logger.error(error);
|
|
24583
|
-
return;
|
|
24584
|
-
}
|
|
24585
|
-
});
|
|
24586
|
-
if (this.options.sequentialUpdates) {
|
|
24587
|
-
for (const task of tasks) {
|
|
24588
|
-
await task();
|
|
24729
|
+
});
|
|
24730
|
+
if (this.options.sequentialUpdates) {
|
|
24731
|
+
this.logger.verbose(`[SYNC DEBUG] Processing ${tasks.length} sync units sequentially`);
|
|
24732
|
+
for (const task of tasks) {
|
|
24733
|
+
await task();
|
|
24734
|
+
}
|
|
24735
|
+
} else {
|
|
24736
|
+
this.logger.verbose(`[SYNC DEBUG] Processing ${tasks.length} sync units in parallel`);
|
|
24737
|
+
await Promise.all(tasks.map((task) => task()));
|
|
24589
24738
|
}
|
|
24590
|
-
}
|
|
24591
|
-
|
|
24739
|
+
} catch (error) {
|
|
24740
|
+
this.logger.error(`Error in getStrands: ${error}`);
|
|
24592
24741
|
}
|
|
24742
|
+
this.logger.verbose(`ListenerManager.getStrands returning ${strands.length} strands for drive: ${driveId}, listener: ${listenerId}`);
|
|
24593
24743
|
return strands;
|
|
24594
24744
|
}
|
|
24595
24745
|
getListenerState(driveId, listenerId) {
|
|
@@ -24653,26 +24803,33 @@ class SynchronizationManager {
|
|
|
24653
24803
|
}
|
|
24654
24804
|
async getSynchronizationUnits(driveId, documentId, scope, branch, documentType2) {
|
|
24655
24805
|
const synchronizationUnitsQuery = await this.getSynchronizationUnitsIds(driveId, documentId, scope, branch, documentType2);
|
|
24806
|
+
this.logger.verbose(`getSynchronizationUnits query: ${JSON.stringify(synchronizationUnitsQuery)}`);
|
|
24656
24807
|
return this.getSynchronizationUnitsRevision(driveId, synchronizationUnitsQuery);
|
|
24657
24808
|
}
|
|
24658
24809
|
async getSynchronizationUnitsRevision(driveId, syncUnitsQuery) {
|
|
24659
24810
|
const drive = await this.getDrive(driveId);
|
|
24660
24811
|
const revisions = await this.storage.getSynchronizationUnitsRevision(syncUnitsQuery);
|
|
24661
|
-
|
|
24662
|
-
|
|
24663
|
-
|
|
24664
|
-
|
|
24665
|
-
|
|
24666
|
-
|
|
24667
|
-
|
|
24668
|
-
|
|
24812
|
+
this.logger.verbose(`getSynchronizationUnitsRevision: ${JSON.stringify(revisions)}`);
|
|
24813
|
+
const synchronizationUnits = syncUnitsQuery.map((s) => ({
|
|
24814
|
+
...s,
|
|
24815
|
+
lastUpdated: drive.created,
|
|
24816
|
+
revision: -1
|
|
24817
|
+
}));
|
|
24818
|
+
for (const revision of revisions) {
|
|
24819
|
+
const syncUnit = synchronizationUnits.find((s) => revision.documentId === s.documentId && revision.scope === s.scope && revision.branch === s.branch);
|
|
24820
|
+
if (syncUnit) {
|
|
24821
|
+
syncUnit.revision = revision.revision;
|
|
24822
|
+
syncUnit.lastUpdated = revision.lastUpdated;
|
|
24823
|
+
}
|
|
24824
|
+
}
|
|
24825
|
+
return synchronizationUnits;
|
|
24669
24826
|
}
|
|
24670
24827
|
async getSynchronizationUnitsIds(driveId, documentId, scope, branch, documentType2) {
|
|
24671
24828
|
const drive = await this.getDrive(driveId);
|
|
24672
24829
|
const nodes = drive.state.global.nodes.filter((node) => isFileNode(node) && (!(documentId == null ? void 0 : documentId.length) || documentId.includes(node.id) || documentId.includes("*")) && (!(documentType2 == null ? void 0 : documentType2.length) || documentType2.includes(node.documentType) || documentType2.includes("*")));
|
|
24673
24830
|
if ((!documentId || documentId.includes("*") || documentId.includes("")) && (!(documentType2 == null ? void 0 : documentType2.length) || documentType2.includes("powerhouse/document-drive") || documentType2.includes("*"))) {
|
|
24674
24831
|
nodes.unshift({
|
|
24675
|
-
id:
|
|
24832
|
+
id: driveId,
|
|
24676
24833
|
documentType: "powerhouse/document-drive",
|
|
24677
24834
|
synchronizationUnits: [
|
|
24678
24835
|
{
|
|
@@ -24738,14 +24895,27 @@ class SynchronizationManager {
|
|
|
24738
24895
|
};
|
|
24739
24896
|
}
|
|
24740
24897
|
async getOperationData(driveId, syncId, filter) {
|
|
24898
|
+
this.logger.verbose(`[SYNC DEBUG] SynchronizationManager.getOperationData called for drive: ${driveId}, syncId: ${syncId}, filter: ${JSON.stringify(filter)}`);
|
|
24741
24899
|
const syncUnit = syncId === "0" ? { documentId: "", scope: "global" } : await this.getSynchronizationUnitIdInfo(driveId, syncId);
|
|
24742
24900
|
if (!syncUnit) {
|
|
24901
|
+
this.logger.error(`SYNC DEBUG] Invalid Sync Id ${syncId} in drive ${driveId}`);
|
|
24743
24902
|
throw new Error(`Invalid Sync Id ${syncId} in drive ${driveId}`);
|
|
24744
24903
|
}
|
|
24904
|
+
this.logger.verbose(`[SYNC DEBUG] Found sync unit: documentId: ${syncUnit.documentId}, scope: ${syncUnit.scope}`);
|
|
24745
24905
|
const document = syncId === "0" ? await this.getDrive(driveId) : await this.getDocument(driveId, syncUnit.documentId);
|
|
24906
|
+
this.logger.verbose(`[SYNC DEBUG] Retrieved document ${syncUnit.documentId} with type: ${document.documentType}`);
|
|
24746
24907
|
const operations = document.operations[syncUnit.scope] ?? [];
|
|
24908
|
+
this.logger.verbose(`[SYNC DEBUG] Found ${operations.length} total operations in scope ${syncUnit.scope}`);
|
|
24747
24909
|
const filteredOperations = operations.filter((operation) => Object.keys(filter).length === 0 || (filter.since === void 0 || isBefore(filter.since, operation.timestamp)) && (filter.fromRevision === void 0 || operation.index > filter.fromRevision));
|
|
24910
|
+
this.logger.verbose(`[SYNC DEBUG] Filtered to ${filteredOperations.length} operations based on filter criteria` + (filter.fromRevision !== void 0 ? ` (fromRevision: ${filter.fromRevision})` : ""));
|
|
24748
24911
|
const limitedOperations = filter.limit ? filteredOperations.slice(0, filter.limit) : filteredOperations;
|
|
24912
|
+
this.logger.verbose(`[SYNC DEBUG] Returning ${limitedOperations.length} operations after applying limit`);
|
|
24913
|
+
if (limitedOperations.length > 0) {
|
|
24914
|
+
const firstOp = limitedOperations[0];
|
|
24915
|
+
const lastOp = limitedOperations[limitedOperations.length - 1];
|
|
24916
|
+
this.logger.verbose(`[SYNC DEBUG] First operation: index=${firstOp.index}, type=${firstOp.type}`);
|
|
24917
|
+
this.logger.verbose(`[SYNC DEBUG] Last operation: index=${lastOp.index}, type=${lastOp.type}`);
|
|
24918
|
+
}
|
|
24749
24919
|
return limitedOperations.map((operation) => ({
|
|
24750
24920
|
hash: operation.hash,
|
|
24751
24921
|
index: operation.index,
|
|
@@ -25251,6 +25421,9 @@ function Modified(props) {
|
|
|
25251
25421
|
function Moved(props) {
|
|
25252
25422
|
return jsxs("svg", { ...props, width: "16", height: "16", viewBox: "0 0 16 16", fill: "none", xmlns: "http://www.w3.org/2000/svg", children: [jsx("path", { d: "M7.07143 1.96429C7.07143 2.2207 6.86356 2.42857 6.60714 2.42857H5.21429C3.67578 2.42857 2.42857 3.67578 2.42857 5.21429V10.7857C2.42857 12.3242 3.67578 13.5714 5.21429 13.5714H10.7857C12.3242 13.5714 13.5714 12.3242 13.5714 10.7857V9.39286C13.5714 9.13644 13.7793 8.92857 14.0357 8.92857C14.2921 8.92857 14.5 9.13644 14.5 9.39286V10.7857C14.5 12.8371 12.8371 14.5 10.7857 14.5H5.21429C3.16294 14.5 1.5 12.8371 1.5 10.7857V5.21429C1.5 3.16294 3.16294 1.5 5.21429 1.5H6.60714C6.86356 1.5 7.07143 1.70787 7.07143 1.96429Z", fill: "currentcolor" }), jsx("path", { d: "M14 6.5V2.5C14 2.22386 13.7761 2 13.5 2H9.5", stroke: "currentcolor", strokeLinecap: "round" }), jsx("path", { d: "M7 10L7.28346 9.29136C8.39378 6.51556 10.4269 4.20728 13.0403 2.75539L13.5 2.5", stroke: "currentcolor", strokeLinecap: "round" })] });
|
|
25253
25423
|
}
|
|
25424
|
+
function Npm(props) {
|
|
25425
|
+
return jsx("svg", { ...props, xmlns: "http://www.w3.org/2000/svg", viewBox: "0 0 128 128", children: jsx("path", { fill: "#cb3837", d: "M2 38.5h124v43.71H64v7.29H36.44v-7.29H2zm6.89 36.43h13.78V53.07h6.89v21.86h6.89V45.79H8.89zm34.44-29.14v36.42h13.78v-7.28h13.78V45.79zm13.78 7.29H64v14.56h-6.89zm20.67-7.29v29.14h13.78V53.07h6.89v21.86h6.89V53.07h6.89v21.86h6.89V45.79z" }) });
|
|
25426
|
+
}
|
|
25254
25427
|
function PackageManager(props) {
|
|
25255
25428
|
return jsx("svg", { ...props, width: "12", height: "12", viewBox: "0 0 12 12", fill: "currentcolor", children: jsx("path", { d: "M1.22323 -0.00109863C0.549226 -0.00109863 -0.00610352 0.533576 -0.00610352 1.20691V2.79089C-0.00610352 3.46423 0.549226 3.9989 1.22323 3.9989H4.0979C4.7719 3.9989 5.32723 3.46423 5.32723 2.79089V1.20691C5.32723 0.533576 4.7719 -0.00109863 4.0979 -0.00109863H1.22323ZM7.9939 -0.00109863C7.25723 -0.00109863 6.66056 0.595568 6.66056 1.33223V5.33223C6.66056 6.0689 7.25723 6.66557 7.9939 6.66557H10.6606C11.3972 6.66557 11.9939 6.0689 11.9939 5.33223V1.33223C11.9939 0.595568 11.3972 -0.00109863 10.6606 -0.00109863H7.9939ZM1.32723 5.33223C0.590563 5.33223 -0.00610352 5.9289 -0.00610352 6.66557V10.6656C-0.00610352 11.4022 0.590563 11.9989 1.32723 11.9989H3.9939C4.73056 11.9989 5.32723 11.4022 5.32723 10.6656V6.66557C5.32723 5.9289 4.73056 5.33223 3.9939 5.33223H1.32723ZM7.88989 7.9989C7.21589 7.9989 6.66056 8.53358 6.66056 9.20691V10.7909C6.66056 11.4642 7.21589 11.9989 7.88989 11.9989H10.7646C11.4386 11.9989 11.9939 11.4642 11.9939 10.7909V9.20691C11.9939 8.53358 11.4386 7.9989 10.7646 7.9989H7.88989Z" }) });
|
|
25256
25429
|
}
|
|
@@ -25439,6 +25612,7 @@ const iconComponents = {
|
|
|
25439
25612
|
M,
|
|
25440
25613
|
Modified,
|
|
25441
25614
|
Moved,
|
|
25615
|
+
Npm,
|
|
25442
25616
|
PackageManager,
|
|
25443
25617
|
Pencil,
|
|
25444
25618
|
PeopleFill,
|
|
@@ -25616,7 +25790,7 @@ const nodeOptionsMap = {
|
|
|
25616
25790
|
};
|
|
25617
25791
|
const name = "@powerhousedao/connect";
|
|
25618
25792
|
const productName = "Powerhouse-Connect";
|
|
25619
|
-
const version$1 = "1.0.
|
|
25793
|
+
const version$1 = "1.0.10-dev.0";
|
|
25620
25794
|
const description = "Powerhouse Connect";
|
|
25621
25795
|
const main = "./dist/index.html";
|
|
25622
25796
|
const type = "module";
|
|
@@ -31256,7 +31430,7 @@ if (window.__VITE_ENVS.MODE === "development") {
|
|
|
31256
31430
|
} else {
|
|
31257
31431
|
serviceWorkerManager.registerServiceWorker(false);
|
|
31258
31432
|
}
|
|
31259
|
-
const App = lazy(() => __vitePreload(() => import("./app-
|
|
31433
|
+
const App = lazy(() => __vitePreload(() => import("./app-CsiwsM42.js").then((n) => n.aN), true ? __vite__mapDeps([0,1,2]) : void 0));
|
|
31260
31434
|
const AppLoader = /* @__PURE__ */ jsx(Suspense, { children: /* @__PURE__ */ jsx(App, {}) });
|
|
31261
31435
|
const appLoader = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
|
|
31262
31436
|
__proto__: null,
|