@powerhousedao/connect 1.0.4-dev.0 → 1.0.4-dev.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. package/dist/assets/{app-CYjeY4R-.css → app-CcH3qGge.css} +100 -10
  2. package/dist/assets/{app-DRGUnpgP.js → app-CrkKw3iM.js} +12 -10
  3. package/dist/assets/{app-loader-DC8DXqh9.css → app-loader-DGByWxSG.css} +36 -4
  4. package/dist/assets/{app-loader-q6UCPwaf.js → app-loader-UcJ4Us8H.js} +238 -119
  5. package/dist/assets/{ccip-BMVv2Emt.js → ccip-By_pxEiI.js} +3 -3
  6. package/dist/assets/{content-CVGL3ZO5.js → content-Cztr255I.js} +3 -3
  7. package/dist/assets/{index-CR5B5rEB.js → index-CIzDiBJQ.js} +4 -4
  8. package/dist/assets/{index-BsB1NVuP.js → index-DL-uHasR.js} +3 -3
  9. package/dist/assets/{index-Bn8-eDIz.js → index-vxDIqVse.js} +3 -3
  10. package/dist/assets/{main.DL5ahUYG.js → main.b7W0Jdwz.js} +1 -1
  11. package/dist/index.html +1 -1
  12. package/dist/modules/@powerhousedao/reactor-browser/{chunk-6MBHOHAA.js → chunk-4GNNWOQN.js} +1 -1
  13. package/dist/modules/@powerhousedao/reactor-browser/{chunk-UDKYG6I4.js → chunk-AEE452AO.js} +1 -1
  14. package/dist/modules/@powerhousedao/reactor-browser/{chunk-IC6B3767.js → chunk-N7NRLUYA.js} +1 -1
  15. package/dist/modules/@powerhousedao/reactor-browser/{chunk-OYYVE7RP.js → chunk-Q5MVFG2N.js} +1 -1
  16. package/dist/modules/@powerhousedao/reactor-browser/{chunk-N5UNGAA6.js → chunk-ZZIQ3KGW.js} +229 -110
  17. package/dist/modules/@powerhousedao/reactor-browser/context/index.js +2 -2
  18. package/dist/modules/@powerhousedao/reactor-browser/context/read-mode.js +2 -2
  19. package/dist/modules/@powerhousedao/reactor-browser/hooks/index.js +3 -3
  20. package/dist/modules/@powerhousedao/reactor-browser/hooks/useDriveActions.js +2 -2
  21. package/dist/modules/@powerhousedao/reactor-browser/hooks/useDriveActionsWithUiNodes.js +3 -3
  22. package/dist/modules/@powerhousedao/reactor-browser/index.js +5 -5
  23. package/dist/modules/@powerhousedao/reactor-browser/reactor.js +2 -2
  24. package/dist/vite-envs.sh +1 -1
  25. package/package.json +6 -6
@@ -1,4 +1,4 @@
1
- const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=["assets/app-DRGUnpgP.js","assets/main.DL5ahUYG.js","assets/app-CYjeY4R-.css"])))=>i.map(i=>d[i]);
1
+ const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=["assets/app-CrkKw3iM.js","assets/main.b7W0Jdwz.js","assets/app-CcH3qGge.css"])))=>i.map(i=>d[i]);
2
2
  var __defProp = Object.defineProperty;
3
3
  var __typeError = (msg) => {
4
4
  throw TypeError(msg);
@@ -11,7 +11,7 @@ var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot
11
11
  var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), setter ? setter.call(obj, value) : member.set(obj, value), value);
12
12
  var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
13
13
  var _tags, _levelString, _errorHandler, _ConsoleLogger_instances, levelValue_get, _getDocumentModelModule, _drives, _ReadModeService_instances, parseGraphQLErrors_fn, fetchDrive_fn, _a, _ServiceWorkerManager_instances, handleServiceWorkerMessage_fn, handleServiceWorker_fn;
14
- import { _ as __vitePreload } from "./main.DL5ahUYG.js";
14
+ import { _ as __vitePreload } from "./main.b7W0Jdwz.js";
15
15
  import { jsx, jsxs } from "react/jsx-runtime";
16
16
  import { useState, useEffect, Suspense, lazy } from "react";
17
17
  function _mergeNamespaces(n, m) {
@@ -22530,8 +22530,18 @@ const _PullResponderTransmitter = class _PullResponderTransmitter {
22530
22530
  this.logger.verbose(`constructor(listener: ${listener.listenerId})`);
22531
22531
  }
22532
22532
  getStrands(options) {
22533
- this.logger.verbose(`getStrands(drive: ${this.listener.driveId}, listener: ${this.listener.listenerId})`);
22534
- return this.manager.getStrands(this.listener.driveId, this.listener.listenerId, options);
22533
+ this.logger.verbose(`[SYNC DEBUG] PullResponderTransmitter.getStrands called for drive: ${this.listener.driveId}, listener: ${this.listener.listenerId}, options: ${JSON.stringify(options || {})}`);
22534
+ return this.manager.getStrands(this.listener.driveId, this.listener.listenerId, options).then((strands) => {
22535
+ this.logger.verbose(`[SYNC DEBUG] PullResponderTransmitter.getStrands returning ${strands.length} strands for drive: ${this.listener.driveId}, listener: ${this.listener.listenerId}`);
22536
+ if (strands.length === 0) {
22537
+ this.logger.verbose(`[SYNC DEBUG] No strands returned for drive: ${this.listener.driveId}, listener: ${this.listener.listenerId}`);
22538
+ } else {
22539
+ for (const strand of strands) {
22540
+ this.logger.verbose(`[SYNC DEBUG] Strand for drive: ${strand.driveId}, document: ${strand.documentId}, scope: ${strand.scope}, operations: ${strand.operations.length}`);
22541
+ }
22542
+ }
22543
+ return strands;
22544
+ });
22535
22545
  }
22536
22546
  disconnect() {
22537
22547
  return Promise.resolve();
@@ -22572,7 +22582,7 @@ const _PullResponderTransmitter = class _PullResponderTransmitter {
22572
22582
  }
22573
22583
  static async pullStrands(driveId, url, listenerId, options) {
22574
22584
  var _a2;
22575
- this.staticLogger.verbose(`pullStrands(url: ${url}, listener: ${listenerId})`);
22585
+ this.staticLogger.verbose(`[SYNC DEBUG] PullResponderTransmitter.pullStrands called for drive: ${driveId}, url: ${url}, listener: ${listenerId}, options: ${JSON.stringify(options || {})}`);
22576
22586
  const result = await requestGraphql(url, gql`
22577
22587
  query strands($listenerId: ID!) {
22578
22588
  system {
@@ -22612,18 +22622,25 @@ const _PullResponderTransmitter = class _PullResponderTransmitter {
22612
22622
  `, { listenerId });
22613
22623
  const error = (_a2 = result.errors) == null ? void 0 : _a2.at(0);
22614
22624
  if (error) {
22625
+ this.staticLogger.verbose(`[SYNC DEBUG] Error pulling strands for drive: ${driveId}, listener: ${listenerId}, error: ${JSON.stringify(error)}`);
22615
22626
  throw error;
22616
22627
  }
22617
22628
  if (!result.system) {
22629
+ this.staticLogger.verbose(`[SYNC DEBUG] No system data returned when pulling strands for drive: ${driveId}, listener: ${listenerId}`);
22618
22630
  return [];
22619
22631
  }
22620
- return result.system.sync.strands.map((s) => ({
22632
+ const strands = result.system.sync.strands.map((s) => ({
22621
22633
  ...s,
22622
22634
  operations: s.operations.map((o) => ({
22623
22635
  ...o,
22624
22636
  input: JSON.parse(o.input)
22625
22637
  }))
22626
22638
  }));
22639
+ this.staticLogger.verbose(`[SYNC DEBUG] PullResponderTransmitter.pullStrands returning ${strands.length} strands for drive: ${driveId}, listener: ${listenerId}`);
22640
+ if (strands.length > 0) {
22641
+ this.staticLogger.verbose(`[SYNC DEBUG] Strands being returned: ${strands.map((s) => `${s.documentId}:${s.scope}`).join(", ")}`);
22642
+ }
22643
+ return strands;
22627
22644
  }
22628
22645
  static async acknowledgeStrands(url, listenerId, revisions) {
22629
22646
  this.staticLogger.verbose(`acknowledgeStrands(url: ${url}, listener: ${listenerId})`, revisions);
@@ -22657,75 +22674,124 @@ const _PullResponderTransmitter = class _PullResponderTransmitter {
22657
22674
  throw new Error("Error acknowledging strands");
22658
22675
  }
22659
22676
  }
22677
+ /**
22678
+ * This function will only throw if `onError` throws an error (or there is
22679
+ * an unintentionally unhandled error in the pull loop).
22680
+ *
22681
+ * All other errors are caught, logged, and passed to `onError`.
22682
+ *
22683
+ * Because of this, `onError` _may be called multiple times_.
22684
+ */
22660
22685
  static async executePull(driveId, trigger, onStrandUpdate, onError, onRevisions, onAcknowledge) {
22661
22686
  var _a2;
22662
22687
  this.staticLogger.verbose(`executePull(driveId: ${driveId}), trigger:`, trigger);
22688
+ this.staticLogger.info(`[SYNC DEBUG] PullResponderTransmitter.executePull starting for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
22689
+ const { url } = trigger.data;
22690
+ let strands;
22691
+ let error;
22663
22692
  try {
22664
- const { url, listenerId } = trigger.data;
22665
- const strands = await _PullResponderTransmitter.pullStrands(driveId, url, listenerId);
22666
- this.staticLogger.verbose("Pulled strands...");
22667
- if (!strands.length) {
22668
- onRevisions == null ? void 0 : onRevisions([]);
22669
- this.staticLogger.verbose("No new strands, skipping...");
22670
- return;
22671
- }
22672
- const listenerRevisions = [];
22673
- for (const strand of strands) {
22674
- const operations = strand.operations.map((op) => ({
22675
- ...op,
22676
- scope: strand.scope,
22677
- branch: strand.branch
22678
- }));
22679
- this.staticLogger.verbose("Processing strand...");
22680
- let error = void 0;
22681
- try {
22682
- const result = await onStrandUpdate(strand, {
22683
- type: "trigger",
22684
- trigger
22685
- });
22686
- if (result.error) {
22687
- throw result.error;
22693
+ strands = await _PullResponderTransmitter.pullStrands(driveId, url, trigger.data.listenerId);
22694
+ } catch (e) {
22695
+ error = e;
22696
+ const errors = error.response.errors;
22697
+ for (const error2 of errors) {
22698
+ if (error2.message === "Listener not found") {
22699
+ this.staticLogger.verbose(`[SYNC DEBUG] Auto-registering pull responder for drive: ${driveId}`);
22700
+ const listenerId = await _PullResponderTransmitter.registerPullResponder(trigger.driveId, url, trigger.filter);
22701
+ trigger.data.listenerId = listenerId;
22702
+ try {
22703
+ strands = await _PullResponderTransmitter.pullStrands(driveId, url, listenerId);
22704
+ this.staticLogger.verbose(`Successfully auto-registerd and pulled strands for drive: ${driveId}, listenerId: ${listenerId}`);
22705
+ } catch (error3) {
22706
+ this.staticLogger.error(`Could not resolve 'Listener not found' error by registering a new pull responder for drive: ${driveId}, listenerId: ${listenerId}: ${error3}`);
22707
+ onError(error3);
22708
+ return;
22688
22709
  }
22689
- } catch (e) {
22690
- error = e;
22691
- onError(error);
22710
+ break;
22692
22711
  }
22693
- listenerRevisions.push({
22694
- branch: strand.branch,
22695
- documentId: strand.documentId || "",
22696
- driveId: strand.driveId,
22697
- revision: ((_a2 = operations.pop()) == null ? void 0 : _a2.index) ?? -1,
22698
- scope: strand.scope,
22699
- status: error ? error instanceof OperationError ? error.status : "ERROR" : "SUCCESS",
22700
- error
22701
- });
22702
22712
  }
22703
- this.staticLogger.verbose("Processed strands...");
22704
- onRevisions == null ? void 0 : onRevisions(listenerRevisions);
22705
- this.staticLogger.verbose("Acknowledging strands...");
22706
- let success = false;
22713
+ }
22714
+ if (!strands) {
22715
+ this.staticLogger.error(`Error pulling strands for drive, and could not auto-register: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error}`);
22716
+ onError(error);
22717
+ return;
22718
+ }
22719
+ if (!strands.length) {
22720
+ this.staticLogger.verbose(`[SYNC DEBUG] No strands returned in pull cycle for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
22707
22721
  try {
22708
- await _PullResponderTransmitter.acknowledgeStrands(url, listenerId, listenerRevisions.map((revision) => {
22709
- const { error, ...rest } = revision;
22710
- return rest;
22711
- }));
22712
- success = true;
22713
- } catch (error) {
22714
- this.staticLogger.error("ACK error", error);
22715
- }
22716
- if (success) {
22717
- this.staticLogger.verbose("Acknowledged strands successfully.");
22718
- } else {
22719
- this.staticLogger.error("Failed to acknowledge strands");
22722
+ onRevisions == null ? void 0 : onRevisions([]);
22723
+ } catch (error2) {
22724
+ this.staticLogger.error(`Error calling onRevisions for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error2}`);
22725
+ onError(error2);
22720
22726
  }
22727
+ return;
22728
+ }
22729
+ this.staticLogger.verbose(`[SYNC DEBUG] Processing ${strands.length} strands in pull cycle for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
22730
+ const listenerRevisions = [];
22731
+ for (const strand of strands) {
22732
+ const operations = strand.operations.map((op) => ({
22733
+ ...op,
22734
+ scope: strand.scope,
22735
+ branch: strand.branch
22736
+ }));
22737
+ this.staticLogger.verbose(`[SYNC DEBUG] Processing strand for drive: ${strand.driveId}, document: ${strand.documentId}, scope: ${strand.scope}, with ${operations.length} operations`);
22738
+ let error2 = void 0;
22739
+ try {
22740
+ const result = await onStrandUpdate(strand, {
22741
+ type: "trigger",
22742
+ trigger
22743
+ });
22744
+ if (result.error) {
22745
+ throw result.error;
22746
+ }
22747
+ } catch (e) {
22748
+ this.staticLogger.error(`Error processing strand for drive: ${strand.driveId}, document: ${strand.documentId}, scope: ${strand.scope}, with ${operations.length} operations: ${e}`);
22749
+ error2 = e;
22750
+ onError(error2);
22751
+ }
22752
+ listenerRevisions.push({
22753
+ branch: strand.branch,
22754
+ documentId: strand.documentId || "",
22755
+ driveId: strand.driveId,
22756
+ revision: ((_a2 = operations.pop()) == null ? void 0 : _a2.index) ?? -1,
22757
+ scope: strand.scope,
22758
+ status: error2 ? error2 instanceof OperationError ? error2.status : "ERROR" : "SUCCESS",
22759
+ error: error2
22760
+ });
22761
+ }
22762
+ this.staticLogger.verbose("Processed strands...");
22763
+ try {
22764
+ onRevisions == null ? void 0 : onRevisions(listenerRevisions);
22765
+ } catch (error2) {
22766
+ this.staticLogger.error(`Error calling onRevisions for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error2}`);
22767
+ onError(error2);
22768
+ }
22769
+ this.staticLogger.verbose(`[SYNC DEBUG] Acknowledging ${listenerRevisions.length} strands for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
22770
+ let success = false;
22771
+ try {
22772
+ await _PullResponderTransmitter.acknowledgeStrands(url, trigger.data.listenerId, listenerRevisions.map((revision) => {
22773
+ const { error: error2, ...rest } = revision;
22774
+ return rest;
22775
+ }));
22776
+ success = true;
22777
+ } catch (error2) {
22778
+ this.staticLogger.error(`Error acknowledging strands for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error2}`);
22779
+ onError(error2);
22780
+ }
22781
+ if (success) {
22782
+ this.staticLogger.verbose(`[SYNC DEBUG] Successfully acknowledged strands for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
22783
+ } else {
22784
+ this.staticLogger.error("Failed to acknowledge strands");
22785
+ }
22786
+ try {
22721
22787
  onAcknowledge == null ? void 0 : onAcknowledge(success);
22722
- } catch (error) {
22723
- this.staticLogger.error("Pull error", error);
22724
- onError(error);
22788
+ } catch (error2) {
22789
+ this.staticLogger.error(`Error calling onAcknowledge for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error2}`);
22790
+ onError(error2);
22725
22791
  }
22726
22792
  }
22727
22793
  static setupPull(driveId, trigger, onStrandUpdate, onError, onRevisions, onAcknowledge) {
22728
- this.staticLogger.verbose(`setupPull(drive: ${driveId}), trigger:`, trigger);
22794
+ this.staticLogger.verbose(`[SYNC DEBUG] PullResponderTransmitter.setupPull initiated for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
22729
22795
  const { interval } = trigger.data;
22730
22796
  let loopInterval = PULL_DRIVE_INTERVAL;
22731
22797
  if (interval) {
@@ -22737,20 +22803,25 @@ const _PullResponderTransmitter = class _PullResponderTransmitter {
22737
22803
  } catch {
22738
22804
  }
22739
22805
  }
22806
+ this.staticLogger.verbose(`[SYNC DEBUG] Pull interval set to ${loopInterval}ms for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
22740
22807
  let isCancelled = false;
22741
22808
  let timeout;
22742
22809
  const executeLoop = async () => {
22743
22810
  while (!isCancelled) {
22744
- this.staticLogger.verbose("Execute loop...");
22811
+ this.staticLogger.verbose(`[SYNC DEBUG] Starting pull cycle for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
22745
22812
  await this.executePull(driveId, trigger, onStrandUpdate, onError, onRevisions, onAcknowledge);
22813
+ this.staticLogger.verbose(`[SYNC DEBUG] Completed pull cycle for drive: ${driveId}, listenerId: ${trigger.data.listenerId}, waiting ${loopInterval}ms for next cycle`);
22746
22814
  await new Promise((resolve) => {
22747
22815
  this.staticLogger.verbose(`Scheduling next pull in ${loopInterval} ms`);
22748
22816
  timeout = setTimeout(resolve, loopInterval);
22749
22817
  });
22750
22818
  }
22751
22819
  };
22752
- executeLoop().catch(this.staticLogger.error);
22820
+ executeLoop().catch((error) => {
22821
+ this.staticLogger.error(`Error in executeLoop for drive: ${driveId}, listenerId: ${trigger.data.listenerId}: ${error}`);
22822
+ });
22753
22823
  return () => {
22824
+ this.staticLogger.verbose(`[SYNC DEBUG] Cancelling pull loop for drive: ${driveId}, listenerId: ${trigger.data.listenerId}`);
22754
22825
  isCancelled = true;
22755
22826
  if (timeout !== void 0) {
22756
22827
  clearTimeout(timeout);
@@ -22760,15 +22831,18 @@ const _PullResponderTransmitter = class _PullResponderTransmitter {
22760
22831
  static async createPullResponderTrigger(driveId, url, options) {
22761
22832
  this.staticLogger.verbose(`createPullResponderTrigger(drive: ${driveId}, url: ${url})`);
22762
22833
  const { pullFilter, pullInterval } = options;
22763
- const listenerId = await _PullResponderTransmitter.registerPullResponder(driveId, url, pullFilter ?? {
22834
+ const filter = pullFilter ?? {
22764
22835
  documentId: ["*"],
22765
22836
  documentType: ["*"],
22766
22837
  branch: ["*"],
22767
22838
  scope: ["*"]
22768
- });
22839
+ };
22840
+ const listenerId = await _PullResponderTransmitter.registerPullResponder(driveId, url, filter);
22769
22841
  const pullTrigger = {
22770
22842
  id: generateUUID(),
22771
22843
  type: "PullResponder",
22844
+ driveId,
22845
+ filter,
22772
22846
  data: {
22773
22847
  url,
22774
22848
  listenerId,
@@ -23200,30 +23274,39 @@ class BaseDocumentDriveServer {
23200
23274
  return this.triggerMap.delete(driveId);
23201
23275
  }
23202
23276
  async _initializeDrive(driveId) {
23203
- var _a2, _b, _c, _d, _e, _f;
23277
+ var _a2, _b, _c, _d, _e, _f, _g, _h;
23204
23278
  const drive = await this.getDrive(driveId);
23279
+ logger$1.verbose(`[SYNC DEBUG] Initializing drive ${driveId} with slug "${drive.state.global.slug}"`);
23205
23280
  await this.synchronizationManager.initializeDriveSyncStatus(driveId, drive);
23206
23281
  if (this.shouldSyncRemoteDrive(drive)) {
23282
+ logger$1.verbose(`[SYNC DEBUG] Starting sync for remote drive ${driveId}`);
23207
23283
  await this.startSyncRemoteDrive(driveId);
23208
23284
  }
23285
+ logger$1.verbose(`[SYNC DEBUG] Processing ${drive.state.local.listeners.length} listeners for drive ${driveId}`);
23209
23286
  for (const zodListener of drive.state.local.listeners) {
23210
23287
  if (((_a2 = zodListener.callInfo) == null ? void 0 : _a2.transmitterType) === "SwitchboardPush") {
23288
+ logger$1.verbose(`[SYNC DEBUG] Setting up SwitchboardPush listener ${zodListener.listenerId} for drive ${driveId}`);
23211
23289
  const transmitter = new SwitchboardPushTransmitter(((_b = zodListener.callInfo) == null ? void 0 : _b.data) ?? "");
23212
- this.listenerManager.setListener(driveId, {
23290
+ logger$1.verbose(`[SYNC DEBUG] Created SwitchboardPush transmitter with URL: ${((_c = zodListener.callInfo) == null ? void 0 : _c.data) || "none"}`);
23291
+ await this.listenerManager.setListener(driveId, {
23213
23292
  block: zodListener.block,
23214
23293
  driveId: drive.state.global.id,
23215
23294
  filter: {
23216
- branch: ((_c = zodListener.filter) == null ? void 0 : _c.branch) ?? [],
23217
- documentId: ((_d = zodListener.filter) == null ? void 0 : _d.documentId) ?? [],
23218
- documentType: ((_e = zodListener.filter) == null ? void 0 : _e.documentType) ?? [],
23219
- scope: ((_f = zodListener.filter) == null ? void 0 : _f.scope) ?? []
23295
+ branch: ((_d = zodListener.filter) == null ? void 0 : _d.branch) ?? [],
23296
+ documentId: ((_e = zodListener.filter) == null ? void 0 : _e.documentId) ?? [],
23297
+ documentType: ((_f = zodListener.filter) == null ? void 0 : _f.documentType) ?? [],
23298
+ scope: ((_g = zodListener.filter) == null ? void 0 : _g.scope) ?? []
23220
23299
  },
23221
23300
  listenerId: zodListener.listenerId,
23222
23301
  callInfo: zodListener.callInfo,
23223
23302
  system: zodListener.system,
23224
23303
  label: zodListener.label ?? "",
23225
23304
  transmitter
23305
+ }).then(() => {
23306
+ logger$1.verbose(`[SYNC DEBUG] Successfully set up listener ${zodListener.listenerId} for drive ${driveId}`);
23226
23307
  });
23308
+ } else {
23309
+ logger$1.error(`Skipping listener ${zodListener.listenerId} with unsupported type ${((_h = zodListener.callInfo) == null ? void 0 : _h.transmitterType) || "unknown"}`);
23227
23310
  }
23228
23311
  }
23229
23312
  }
@@ -24539,57 +24622,78 @@ const _ListenerManager = class _ListenerManager {
24539
24622
  }
24540
24623
  }
24541
24624
  async getStrands(driveId, listenerId, options) {
24542
- const listenerState = this.getListenerState(driveId, listenerId);
24625
+ this.logger.verbose(`[SYNC DEBUG] ListenerManager.getStrands called for drive: ${driveId}, listener: ${listenerId}, options: ${JSON.stringify(options || {})}`);
24626
+ let listenerState;
24627
+ try {
24628
+ listenerState = this.getListenerState(driveId, listenerId);
24629
+ this.logger.verbose(`[SYNC DEBUG] Found listener state for drive: ${driveId}, listener: ${listenerId}, status: ${listenerState.listenerStatus}`);
24630
+ } catch (error) {
24631
+ this.logger.error(`[SYNC DEBUG] Failed to find listener state for drive: ${driveId}, listener: ${listenerId}. Error: ${error}`);
24632
+ throw error;
24633
+ }
24543
24634
  const strands = [];
24544
- const syncUnits = await this.getListenerSyncUnits(driveId, listenerId);
24545
- const limit = options == null ? void 0 : options.limit;
24546
- let operationsCount = 0;
24547
- const tasks = syncUnits.map((syncUnit) => async () => {
24548
- if (limit && operationsCount >= limit) {
24549
- return;
24550
- }
24551
- if (syncUnit.revision < 0) {
24552
- return;
24553
- }
24554
- const entry = listenerState.syncUnits.get(syncUnit.syncId);
24555
- if (entry && entry.listenerRev >= syncUnit.revision) {
24556
- return;
24557
- }
24558
- const { documentId, scope, branch } = syncUnit;
24559
- try {
24560
- const operations = await this.syncManager.getOperationData(
24561
- // DEAL WITH INVALID SYNC ID ERROR
24562
- driveId,
24563
- syncUnit.syncId,
24564
- {
24565
- since: options == null ? void 0 : options.since,
24566
- fromRevision: (options == null ? void 0 : options.fromRevision) ?? (entry == null ? void 0 : entry.listenerRev),
24567
- limit: limit ? limit - operationsCount : void 0
24635
+ try {
24636
+ const syncUnits = await this.getListenerSyncUnits(driveId, listenerId);
24637
+ this.logger.verbose(`[SYNC DEBUG] Retrieved ${syncUnits.length} sync units for drive: ${driveId}, listener: ${listenerId}`);
24638
+ const limit = options == null ? void 0 : options.limit;
24639
+ let operationsCount = 0;
24640
+ const tasks = syncUnits.map((syncUnit) => async () => {
24641
+ if (limit && operationsCount >= limit) {
24642
+ return;
24643
+ }
24644
+ if (syncUnit.revision < 0) {
24645
+ this.logger.verbose(`[SYNC DEBUG] Skipping sync unit with negative revision: ${syncUnit.syncId}, revision: ${syncUnit.revision}`);
24646
+ return;
24647
+ }
24648
+ const entry = listenerState.syncUnits.get(syncUnit.syncId);
24649
+ if (entry && entry.listenerRev >= syncUnit.revision) {
24650
+ this.logger.verbose(`[SYNC DEBUG] Skipping sync unit - listener already up to date: ${syncUnit.syncId}, listenerRev: ${entry.listenerRev}, revision: ${syncUnit.revision}`);
24651
+ return;
24652
+ }
24653
+ const { documentId, scope, branch } = syncUnit;
24654
+ try {
24655
+ this.logger.verbose(`[SYNC DEBUG] Getting operations for syncUnit: ${syncUnit.syncId}, documentId: ${documentId}, scope: ${scope}, branch: ${branch}`);
24656
+ const operations = await this.syncManager.getOperationData(
24657
+ // DEAL WITH INVALID SYNC ID ERROR
24658
+ driveId,
24659
+ syncUnit.syncId,
24660
+ {
24661
+ since: options == null ? void 0 : options.since,
24662
+ fromRevision: (options == null ? void 0 : options.fromRevision) ?? (entry == null ? void 0 : entry.listenerRev),
24663
+ limit: limit ? limit - operationsCount : void 0
24664
+ }
24665
+ );
24666
+ this.logger.verbose(`[SYNC DEBUG] Retrieved ${operations.length} operations for syncUnit: ${syncUnit.syncId}`);
24667
+ if (!operations.length) {
24668
+ return;
24568
24669
  }
24569
- );
24570
- if (!operations.length) {
24670
+ operationsCount += operations.length;
24671
+ strands.push({
24672
+ driveId,
24673
+ documentId,
24674
+ scope,
24675
+ branch,
24676
+ operations
24677
+ });
24678
+ this.logger.verbose(`[SYNC DEBUG] Added strand with ${operations.length} operations for syncUnit: ${syncUnit.syncId}`);
24679
+ } catch (error) {
24680
+ this.logger.error(`Error getting operations for syncUnit: ${syncUnit.syncId}, error: ${error}`);
24571
24681
  return;
24572
24682
  }
24573
- operationsCount += operations.length;
24574
- strands.push({
24575
- driveId,
24576
- documentId,
24577
- scope,
24578
- branch,
24579
- operations
24580
- });
24581
- } catch (error) {
24582
- this.logger.error(error);
24583
- return;
24584
- }
24585
- });
24586
- if (this.options.sequentialUpdates) {
24587
- for (const task of tasks) {
24588
- await task();
24683
+ });
24684
+ if (this.options.sequentialUpdates) {
24685
+ this.logger.verbose(`[SYNC DEBUG] Processing ${tasks.length} sync units sequentially`);
24686
+ for (const task of tasks) {
24687
+ await task();
24688
+ }
24689
+ } else {
24690
+ this.logger.verbose(`[SYNC DEBUG] Processing ${tasks.length} sync units in parallel`);
24691
+ await Promise.all(tasks.map((task) => task()));
24589
24692
  }
24590
- } else {
24591
- await Promise.all(tasks.map((task) => task()));
24693
+ } catch (error) {
24694
+ this.logger.error(`Error in getStrands: ${error}`);
24592
24695
  }
24696
+ this.logger.verbose(`ListenerManager.getStrands returning ${strands.length} strands for drive: ${driveId}, listener: ${listenerId}`);
24593
24697
  return strands;
24594
24698
  }
24595
24699
  getListenerState(driveId, listenerId) {
@@ -24653,11 +24757,13 @@ class SynchronizationManager {
24653
24757
  }
24654
24758
  async getSynchronizationUnits(driveId, documentId, scope, branch, documentType2) {
24655
24759
  const synchronizationUnitsQuery = await this.getSynchronizationUnitsIds(driveId, documentId, scope, branch, documentType2);
24760
+ this.logger.verbose(`getSynchronizationUnits query: ${JSON.stringify(synchronizationUnitsQuery)}`);
24656
24761
  return this.getSynchronizationUnitsRevision(driveId, synchronizationUnitsQuery);
24657
24762
  }
24658
24763
  async getSynchronizationUnitsRevision(driveId, syncUnitsQuery) {
24659
24764
  const drive = await this.getDrive(driveId);
24660
24765
  const revisions = await this.storage.getSynchronizationUnitsRevision(syncUnitsQuery);
24766
+ this.logger.verbose(`getSynchronizationUnitsRevision: ${JSON.stringify(revisions)}`);
24661
24767
  return syncUnitsQuery.map((s) => {
24662
24768
  var _a2;
24663
24769
  return {
@@ -24738,14 +24844,27 @@ class SynchronizationManager {
24738
24844
  };
24739
24845
  }
24740
24846
  async getOperationData(driveId, syncId, filter) {
24847
+ this.logger.verbose(`[SYNC DEBUG] SynchronizationManager.getOperationData called for drive: ${driveId}, syncId: ${syncId}, filter: ${JSON.stringify(filter)}`);
24741
24848
  const syncUnit = syncId === "0" ? { documentId: "", scope: "global" } : await this.getSynchronizationUnitIdInfo(driveId, syncId);
24742
24849
  if (!syncUnit) {
24850
+ this.logger.error(`SYNC DEBUG] Invalid Sync Id ${syncId} in drive ${driveId}`);
24743
24851
  throw new Error(`Invalid Sync Id ${syncId} in drive ${driveId}`);
24744
24852
  }
24853
+ this.logger.verbose(`[SYNC DEBUG] Found sync unit: documentId: ${syncUnit.documentId}, scope: ${syncUnit.scope}`);
24745
24854
  const document = syncId === "0" ? await this.getDrive(driveId) : await this.getDocument(driveId, syncUnit.documentId);
24855
+ this.logger.verbose(`[SYNC DEBUG] Retrieved document ${syncUnit.documentId} with type: ${document.documentType}`);
24746
24856
  const operations = document.operations[syncUnit.scope] ?? [];
24857
+ this.logger.verbose(`[SYNC DEBUG] Found ${operations.length} total operations in scope ${syncUnit.scope}`);
24747
24858
  const filteredOperations = operations.filter((operation) => Object.keys(filter).length === 0 || (filter.since === void 0 || isBefore(filter.since, operation.timestamp)) && (filter.fromRevision === void 0 || operation.index > filter.fromRevision));
24859
+ this.logger.verbose(`[SYNC DEBUG] Filtered to ${filteredOperations.length} operations based on filter criteria` + (filter.fromRevision !== void 0 ? ` (fromRevision: ${filter.fromRevision})` : ""));
24748
24860
  const limitedOperations = filter.limit ? filteredOperations.slice(0, filter.limit) : filteredOperations;
24861
+ this.logger.verbose(`[SYNC DEBUG] Returning ${limitedOperations.length} operations after applying limit`);
24862
+ if (limitedOperations.length > 0) {
24863
+ const firstOp = limitedOperations[0];
24864
+ const lastOp = limitedOperations[limitedOperations.length - 1];
24865
+ this.logger.verbose(`[SYNC DEBUG] First operation: index=${firstOp.index}, type=${firstOp.type}`);
24866
+ this.logger.verbose(`[SYNC DEBUG] Last operation: index=${lastOp.index}, type=${lastOp.type}`);
24867
+ }
24749
24868
  return limitedOperations.map((operation) => ({
24750
24869
  hash: operation.hash,
24751
24870
  index: operation.index,
@@ -25620,7 +25739,7 @@ const nodeOptionsMap = {
25620
25739
  };
25621
25740
  const name = "@powerhousedao/connect";
25622
25741
  const productName = "Powerhouse-Connect";
25623
- const version$1 = "1.0.4-dev.0";
25742
+ const version$1 = "1.0.4-dev.1";
25624
25743
  const description = "Powerhouse Connect";
25625
25744
  const main = "./dist/index.html";
25626
25745
  const type = "module";
@@ -31260,7 +31379,7 @@ if (window.__VITE_ENVS.MODE === "development") {
31260
31379
  } else {
31261
31380
  serviceWorkerManager.registerServiceWorker(false);
31262
31381
  }
31263
- const App = lazy(() => __vitePreload(() => import("./app-DRGUnpgP.js").then((n) => n.aN), true ? __vite__mapDeps([0,1,2]) : void 0));
31382
+ const App = lazy(() => __vitePreload(() => import("./app-CrkKw3iM.js").then((n) => n.aN), true ? __vite__mapDeps([0,1,2]) : void 0));
31264
31383
  const AppLoader = /* @__PURE__ */ jsx(Suspense, { children: /* @__PURE__ */ jsx(App, {}) });
31265
31384
  const appLoader = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
31266
31385
  __proto__: null,
@@ -1,7 +1,7 @@
1
- import { aD as BaseError, aE as getUrl, aF as stringify, aG as decodeErrorResult, aH as isAddressEqual, aI as call, aJ as concat, aK as encodeAbiParameters, aL as HttpRequestError, aM as isHex } from "./app-DRGUnpgP.js";
2
- import "./main.DL5ahUYG.js";
1
+ import { aD as BaseError, aE as getUrl, aF as stringify, aG as decodeErrorResult, aH as isAddressEqual, aI as call, aJ as concat, aK as encodeAbiParameters, aL as HttpRequestError, aM as isHex } from "./app-CrkKw3iM.js";
2
+ import "./main.b7W0Jdwz.js";
3
3
  import "react/jsx-runtime";
4
- import "./app-loader-q6UCPwaf.js";
4
+ import "./app-loader-UcJ4Us8H.js";
5
5
  import "react";
6
6
  import "@powerhousedao/reactor-browser";
7
7
  import "react-dom";
@@ -1,8 +1,8 @@
1
1
  import { jsx, jsxs, Fragment as Fragment$1 } from "react/jsx-runtime";
2
- import { t as twMerge, B as Button, m as mergeClassNameProps, E as ERROR, M as MISSING, C as CONFLICT, S as SUCCESS, h as SYNCING, I as INITIAL_SYNC, i as ConnectDropdownMenu, u as useOnClickOutside, j as useEventListener, k as useCopyToClipboard, l as Select, n as ENSAvatar, P as Provider, o as Root3, T as Trigger, p as Portal, q as Content2, v as validateInitialState, w as validateStateSchemaName, x as validateModules, y as useUnwrappedReactor, z as useConnectDid, A as useConnectCrypto, D as useTranslation, F as useModal, G as useAtomValue, H as themeAtom, J as useUser, K as useUserPermissions, L as useUiNodes, N as exportFile, O as useGetDocumentModelModule, Q as addActionContext, U as signOperation, V as useDocumentDriveServer, W as useHotkeys, X as useGetEditor, Y as isSameDocument, Z as useNavigate, _ as ErrorBoundary, $ as DriveLayout, a0 as SearchBar, a1 as useAsyncReactor, a2 as useFilteredDocumentModels, a3 as useDriveEditor, a4 as useDocumentDriveById } from "./app-DRGUnpgP.js";
2
+ import { t as twMerge, B as Button, m as mergeClassNameProps, E as ERROR, M as MISSING, C as CONFLICT, S as SUCCESS, h as SYNCING, I as INITIAL_SYNC, i as ConnectDropdownMenu, u as useOnClickOutside, j as useEventListener, k as useCopyToClipboard, l as Select, n as ENSAvatar, P as Provider, o as Root3, T as Trigger, p as Portal, q as Content2, v as validateInitialState, w as validateStateSchemaName, x as validateModules, y as useUnwrappedReactor, z as useConnectDid, A as useConnectCrypto, D as useTranslation, F as useModal, G as useAtomValue, H as themeAtom, J as useUser, K as useUserPermissions, L as useUiNodes, N as exportFile, O as useGetDocumentModelModule, Q as addActionContext, U as signOperation, V as useDocumentDriveServer, W as useHotkeys, X as useGetEditor, Y as isSameDocument, Z as useNavigate, _ as ErrorBoundary, $ as DriveLayout, a0 as SearchBar, a1 as useAsyncReactor, a2 as useFilteredDocumentModels, a3 as useDriveEditor, a4 as useDocumentDriveById } from "./app-CrkKw3iM.js";
3
3
  import * as React from "react";
4
4
  import React__default, { useState, useCallback, useMemo, useEffect, Fragment, useRef, useLayoutEffect, memo as memo$1, createElement, useSyncExternalStore, Suspense } from "react";
5
- import { _ as Icon, aX as getDimensions, aY as READ, aZ as nodeOptionsMap, a_ as defaultFileOptions, a$ as DELETE, b0 as RENAME, b1 as WRITE, b2 as DUPLICATE, b3 as defaultFolderOptions, b4 as garbageCollect, b5 as sortOperations, b6 as UI_NODE, aO as DRIVE, ay as FILE, b7 as undo, b8 as redo, ac as logger, b9 as useDocumentDispatch, aP as FOLDER, ak as driveDocumentModelModule } from "./app-loader-q6UCPwaf.js";
5
+ import { _ as Icon, aX as getDimensions, aY as READ, aZ as nodeOptionsMap, a_ as defaultFileOptions, a$ as DELETE, b0 as RENAME, b1 as WRITE, b2 as DUPLICATE, b3 as defaultFolderOptions, b4 as garbageCollect, b5 as sortOperations, b6 as UI_NODE, aO as DRIVE, ay as FILE, b7 as undo, b8 as redo, ac as logger, b9 as useDocumentDispatch, aP as FOLDER, ak as driveDocumentModelModule } from "./app-loader-UcJ4Us8H.js";
6
6
  import { useDocument, useDocumentEditorProps } from "@powerhousedao/reactor-browser";
7
7
  import { useUiNodesContext, FILE as FILE$1 } from "@powerhousedao/reactor-browser/hooks/useUiNodesContext";
8
8
  import { useDriveActionsWithUiNodes } from "@powerhousedao/reactor-browser/hooks/useDriveActionsWithUiNodes";
@@ -10,7 +10,7 @@ import { useDriveContext, DriveContextProvider } from "@powerhousedao/reactor-br
10
10
  import "@powerhousedao/reactor-browser/uiNodes/constants";
11
11
  import { makeDriveDocumentStateHook } from "@powerhousedao/reactor-browser/hooks/document-state";
12
12
  import { flushSync } from "react-dom";
13
- import "./main.DL5ahUYG.js";
13
+ import "./main.b7W0Jdwz.js";
14
14
  const PaginationButton = ({ active = false, ...props }) => {
15
15
  const className = twMerge("h-8 min-w-8 border border-solid border-gray-300 bg-white px-3 py-1 text-xs text-gray-900 hover:bg-gray-100", !active && "border-0");
16
16
  return jsx(Button, { color: "light", size: "small", ...mergeClassNameProps(props, className), children: props.children });
@@ -10,10 +10,10 @@ var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot
10
10
  var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), setter ? setter.call(obj, value) : member.set(obj, value), value);
11
11
  var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
12
12
  var _DB_NAME, _STORE_NAME, _KEY, _db, _BrowserKeyStorage_instances, useStore_fn, _subtleCrypto, _keyPair, _keyPairStorage, _did, _ConnectCrypto_instances, initCrypto_fn, initialize_fn, parseDid_fn, generateECDSAKeyPair_fn, exportKeyPair_fn, importKeyPair_fn, _sign, _verify, _store, _namespace, _BaseStorage_instances, buildKey_fn, _baseUrl, _store2, _connectId, _eventEmitter, _Renown_instances, updateUser_fn, getCredential_fn;
13
- import { _ as __vitePreload } from "./main.DL5ahUYG.js";
14
- import { aW as getAugmentedNamespace, $ as getDefaultExportFromCjs, ah as connectConfig, ac as logger } from "./app-loader-q6UCPwaf.js";
15
- import { e as eventsExports, R as RENOWN_URL, g as getEnsInfo } from "./app-DRGUnpgP.js";
16
- import { d, c, b, f, a, r, s } from "./app-DRGUnpgP.js";
13
+ import { _ as __vitePreload } from "./main.b7W0Jdwz.js";
14
+ import { aW as getAugmentedNamespace, $ as getDefaultExportFromCjs, ah as connectConfig, ac as logger } from "./app-loader-UcJ4Us8H.js";
15
+ import { e as eventsExports, R as RENOWN_URL, g as getEnsInfo } from "./app-CrkKw3iM.js";
16
+ import { d, c, b, f, a, r, s } from "./app-CrkKw3iM.js";
17
17
  import "react/jsx-runtime";
18
18
  import "react";
19
19
  import "@powerhousedao/reactor-browser";
@@ -1,8 +1,8 @@
1
- const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=["assets/app-loader-q6UCPwaf.js","assets/main.DL5ahUYG.js","assets/app-loader-DC8DXqh9.css"])))=>i.map(i=>d[i]);
2
- import { _ as __vitePreload } from "./main.DL5ahUYG.js";
1
+ const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=["assets/app-loader-UcJ4Us8H.js","assets/main.b7W0Jdwz.js","assets/app-loader-DGByWxSG.css"])))=>i.map(i=>d[i]);
2
+ import { _ as __vitePreload } from "./main.b7W0Jdwz.js";
3
3
  import { createRoot } from "react-dom/client";
4
4
  async function renderApp(element) {
5
- const AppLoader = await __vitePreload(() => import("./app-loader-q6UCPwaf.js").then((n) => n.c9), true ? __vite__mapDeps([0,1,2]) : void 0);
5
+ const AppLoader = await __vitePreload(() => import("./app-loader-UcJ4Us8H.js").then((n) => n.c9), true ? __vite__mapDeps([0,1,2]) : void 0);
6
6
  createRoot(element).render(AppLoader.default);
7
7
  }
8
8
  const AppElement = document.getElementById("app");
@@ -1,9 +1,9 @@
1
1
  import { jsx, jsxs } from "react/jsx-runtime";
2
- import { t as twMerge, a5 as clsx, a6 as get, a7 as set, a8 as appendErrors, a9 as useFormContext, aa as Primitive, ab as cva, ac as FormProvider, ad as Controller, ae as Slot, af as useForm, ag as Root2, ah as List, ai as Trigger, aj as Content, ak as setModelName, al as setModelId, am as setModelDescription, an as setModelExtension, ao as setAuthorName, ap as setAuthorWebsite, aq as setStateSchema, ar as setInitialState, as as addModule, at as setModuleName, au as deleteModule, av as addOperation, aw as setOperationName, ax as setOperationSchema, ay as setOperationDescription, az as deleteOperation, aA as addOperationError, aB as deleteOperationError, aC as setOperationErrorName } from "./app-DRGUnpgP.js";
3
- import { ba as inspect$1, bb as GraphQLError$1, bc as getNamedType, bd as invariant$2, be as isInputObjectType, bf as isScalarType, bg as isObjectType, bh as isInterfaceType, bi as isUnionType, bj as isEnumType, bk as print, bl as isPrintableAsBlockString, bm as Kind, bn as astFromValue$1, bo as DEFAULT_DEPRECATION_REASON, bp as isSpecifiedDirective, bq as isSpecifiedScalarType, br as isIntrospectionType, bs as z, bt as GraphQLScalarType, Z as pascalCase, bu as parse, bv as visit, bw as buildASTSchema, bx as extendSchema, by as getNullableType, bz as isListType, $ as getDefaultExportFromCjs, bA as snakeCase, bB as constantCase, bC as isAbstractType, bD as BREAK, bE as GraphQLInputObjectType, bF as GraphQLList, bG as GraphQLEnumType, bH as GraphQLObjectType, bI as GraphQLInterfaceType, bJ as SchemaMetaFieldDef, bK as TypeMetaFieldDef, bL as TypeNameMetaFieldDef, bM as isCompositeType, bN as isOutputType, bO as isInputType, bP as GraphQLBoolean, bQ as assertAbstractType, bR as doTypesOverlap, bS as DirectiveLocation, bT as specifiedRules$1, bU as NoUnusedFragmentsRule$1, bV as ExecutableDefinitionsRule$1, bW as validate$2, bX as validateSchema, bY as GraphQLID, bZ as GraphQLString, b_ as GraphQLFloat, b$ as GraphQLInt, c0 as GraphQLDirective, c1 as GraphQLUnionType, c2 as isNonNullType, c3 as GraphQLNonNull, c4 as isNamedType, c5 as isLeafType, c6 as GraphQLSchema, c7 as buildSchema, c8 as sentenceCase, am as hashKey } from "./app-loader-q6UCPwaf.js";
2
+ import { t as twMerge, a5 as clsx, a6 as get, a7 as set, a8 as appendErrors, a9 as useFormContext, aa as Primitive, ab as cva, ac as FormProvider, ad as Controller, ae as Slot, af as useForm, ag as Root2, ah as List, ai as Trigger, aj as Content, ak as setModelName, al as setModelId, am as setModelDescription, an as setModelExtension, ao as setAuthorName, ap as setAuthorWebsite, aq as setStateSchema, ar as setInitialState, as as addModule, at as setModuleName, au as deleteModule, av as addOperation, aw as setOperationName, ax as setOperationSchema, ay as setOperationDescription, az as deleteOperation, aA as addOperationError, aB as deleteOperationError, aC as setOperationErrorName } from "./app-CrkKw3iM.js";
3
+ import { ba as inspect$1, bb as GraphQLError$1, bc as getNamedType, bd as invariant$2, be as isInputObjectType, bf as isScalarType, bg as isObjectType, bh as isInterfaceType, bi as isUnionType, bj as isEnumType, bk as print, bl as isPrintableAsBlockString, bm as Kind, bn as astFromValue$1, bo as DEFAULT_DEPRECATION_REASON, bp as isSpecifiedDirective, bq as isSpecifiedScalarType, br as isIntrospectionType, bs as z, bt as GraphQLScalarType, Z as pascalCase, bu as parse, bv as visit, bw as buildASTSchema, bx as extendSchema, by as getNullableType, bz as isListType, $ as getDefaultExportFromCjs, bA as snakeCase, bB as constantCase, bC as isAbstractType, bD as BREAK, bE as GraphQLInputObjectType, bF as GraphQLList, bG as GraphQLEnumType, bH as GraphQLObjectType, bI as GraphQLInterfaceType, bJ as SchemaMetaFieldDef, bK as TypeMetaFieldDef, bL as TypeNameMetaFieldDef, bM as isCompositeType, bN as isOutputType, bO as isInputType, bP as GraphQLBoolean, bQ as assertAbstractType, bR as doTypesOverlap, bS as DirectiveLocation, bT as specifiedRules$1, bU as NoUnusedFragmentsRule$1, bV as ExecutableDefinitionsRule$1, bW as validate$2, bX as validateSchema, bY as GraphQLID, bZ as GraphQLString, b_ as GraphQLFloat, b$ as GraphQLInt, c0 as GraphQLDirective, c1 as GraphQLUnionType, c2 as isNonNullType, c3 as GraphQLNonNull, c4 as isNamedType, c5 as isLeafType, c6 as GraphQLSchema, c7 as buildSchema, c8 as sentenceCase, am as hashKey } from "./app-loader-UcJ4Us8H.js";
4
4
  import * as React from "react";
5
5
  import React__default, { forwardRef, useRef, useCallback, useImperativeHandle, useEffect, useState, createContext, useContext, memo, useId, useMemo } from "react";
6
- import "./main.DL5ahUYG.js";
6
+ import "./main.b7W0Jdwz.js";
7
7
  import "@powerhousedao/reactor-browser";
8
8
  import "react-dom";
9
9
  import "@powerhousedao/reactor-browser/hooks/useUiNodesContext";
@@ -99,7 +99,7 @@ const __vitePreload = function preload(baseModule, deps, importerUrl) {
99
99
  return baseModule().catch(handlePreloadError);
100
100
  });
101
101
  };
102
- __vitePreload(() => import("./index-BsB1NVuP.js"), true ? [] : void 0);
102
+ __vitePreload(() => import("./index-DL-uHasR.js"), true ? [] : void 0);
103
103
  export {
104
104
  __vitePreload as _
105
105
  };
package/dist/index.html CHANGED
@@ -94,4 +94,4 @@
94
94
  "@powerhousedao/reactor-browser/utils/index": "/modules/@powerhousedao/reactor-browser/utils/index.js",
95
95
  "@powerhousedao/reactor-browser/utils/signature": "/modules/@powerhousedao/reactor-browser/utils/signature.js"
96
96
  }
97
- }</script><script type="module" crossorigin="" src="/assets/main.DL5ahUYG.js"></script></head><body><div id="app"></div></body></html>
97
+ }</script><script type="module" crossorigin="" src="/assets/main.b7W0Jdwz.js"></script></head><body><div id="app"></div></body></html>