@mearie/core 0.5.1 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -360,15 +360,6 @@ const makeFieldKey = (selection, variables) => {
360
360
  return `${selection.name}@${args}`;
361
361
  };
362
362
  /**
363
- * Generates a unique key for tracking memoized denormalized results for structural sharing.
364
- * @internal
365
- * @param kind - The operation kind ('query', 'fragment', 'fragments').
366
- * @param name - The artifact name.
367
- * @param id - Serialized identifier (variables, entity key, etc.).
368
- * @returns A unique memo key.
369
- */
370
- const makeMemoKey = (kind, name, id) => `${kind}:${name}:${id}`;
371
- /**
372
363
  * Gets a unique key for tracking a field dependency.
373
364
  * @internal
374
365
  * @param storageKey Storage key (entity or root query key).
@@ -445,65 +436,48 @@ const isEqual = (a, b) => {
445
436
  }
446
437
  return false;
447
438
  };
439
+ const NormalizedKey = Symbol("mearie.normalized");
448
440
  /**
449
- * Recursively replaces a new value tree with the previous one wherever structurally equal,
450
- * preserving referential identity for unchanged subtrees.
451
- *
452
- * Returns `prev` (same reference) when the entire subtree is structurally equal.
441
+ * Marks a record as a normalized cache object so that {@link mergeFields}
442
+ * can distinguish it from opaque scalar values (e.g. JSON scalars).
443
+ * Only normalized records are deep-merged; unmarked objects are treated as
444
+ * atomic values and replaced entirely on write.
453
445
  * @internal
454
446
  */
455
- const replaceEqualDeep = (prev, next) => {
456
- if (prev === next) return prev;
457
- if (typeof prev !== typeof next || prev === null || next === null || typeof prev !== "object") return next;
458
- if (Array.isArray(prev)) {
459
- if (!Array.isArray(next)) return next;
460
- let allSame = prev.length === next.length;
461
- const result = [];
462
- for (const [i, item] of next.entries()) {
463
- const shared = i < prev.length ? replaceEqualDeep(prev[i], item) : item;
464
- result.push(shared);
465
- if (shared !== prev[i]) allSame = false;
466
- }
467
- return allSame ? prev : result;
468
- }
469
- if (Array.isArray(next)) return next;
470
- const prevObj = prev;
471
- const nextObj = next;
472
- const nextKeys = Object.keys(nextObj);
473
- const prevKeys = Object.keys(prevObj);
474
- let allSame = nextKeys.length === prevKeys.length;
475
- const result = {};
476
- for (const key of nextKeys) if (key in prevObj) {
477
- result[key] = replaceEqualDeep(prevObj[key], nextObj[key]);
478
- if (result[key] !== prevObj[key]) allSame = false;
479
- } else {
480
- result[key] = nextObj[key];
481
- allSame = false;
482
- }
483
- return allSame ? prev : result;
447
+ const markNormalized = (obj) => {
448
+ Object.defineProperty(obj, NormalizedKey, { value: true });
449
+ };
450
+ const isNormalizedRecord = (value) => {
451
+ return typeof value === "object" && value !== null && NormalizedKey in value;
484
452
  };
485
453
  /**
486
- * Deeply merges two values. Objects are recursively merged, arrays are element-wise merged,
487
- * entity links and primitives use last-write-wins.
454
+ * Deeply merges two values. When {@link deep} is false (default), only
455
+ * {@link markNormalized normalized} cache objects are recursively merged;
456
+ * unmarked plain objects (e.g. JSON scalars) are atomically replaced.
457
+ * When {@link deep} is true, all objects are recursively merged unconditionally.
458
+ * Arrays are element-wise merged, entity links and primitives use last-write-wins.
488
459
  * @internal
489
460
  */
490
- const mergeFieldValue = (existing, incoming) => {
461
+ const mergeFieldValue = (existing, incoming, deep) => {
491
462
  if (isNullish(existing) || isNullish(incoming)) return incoming;
492
463
  if (typeof existing !== "object" || typeof incoming !== "object") return incoming;
493
464
  if (isEntityLink(existing) || isEntityLink(incoming)) return incoming;
494
- if (Array.isArray(existing) && Array.isArray(incoming)) return incoming.map((item, i) => i < existing.length ? mergeFieldValue(existing[i], item) : item);
465
+ if (Array.isArray(existing) && Array.isArray(incoming)) return incoming.map((item, i) => i < existing.length ? mergeFieldValue(existing[i], item, deep) : item);
495
466
  if (Array.isArray(existing) || Array.isArray(incoming)) return incoming;
496
- mergeFields(existing, incoming);
467
+ if (!deep && !isNormalizedRecord(incoming)) return incoming;
468
+ mergeFields(existing, incoming, deep);
497
469
  return existing;
498
470
  };
499
471
  /**
500
- * Deeply merges source fields into target. Objects are recursively merged,
501
- * arrays are element-wise merged, entity links and primitives use last-write-wins.
472
+ * Deeply merges source fields into target.
473
+ * When {@link deep} is false (default), only {@link markNormalized normalized}
474
+ * objects are recursively merged; unmarked objects are atomically replaced.
475
+ * When {@link deep} is true, all objects are recursively merged unconditionally.
502
476
  * @internal
503
477
  */
504
- const mergeFields = (target, source) => {
478
+ const mergeFields = (target, source, deep) => {
505
479
  if (isNullish(source) || typeof source !== "object" || Array.isArray(source)) return;
506
- for (const key of Object.keys(source)) target[key] = mergeFieldValue(target[key], source[key]);
480
+ for (const key of Object.keys(source)) target[key] = mergeFieldValue(target[key], source[key], deep ?? false);
507
481
  };
508
482
  /**
509
483
  * Creates a FieldKey from a raw field name and optional arguments.
@@ -515,6 +489,48 @@ const mergeFields = (target, source) => {
515
489
  const makeFieldKeyFromArgs = (field, args) => {
516
490
  return `${field}@${args && Object.keys(args).length > 0 ? stringify(args) : "{}"}`;
517
491
  };
492
+ /**
493
+ * Type guard to check if a value is an array containing entity links.
494
+ * @internal
495
+ * @param value - Value to check.
496
+ * @returns True if the value is an array containing at least one entity link.
497
+ */
498
+ const isEntityLinkArray = (value) => {
499
+ if (!Array.isArray(value) || value.length === 0) return false;
500
+ for (const item of value) {
501
+ if (item === null || item === void 0) continue;
502
+ if (typeof item === "object" && !Array.isArray(item) && EntityLinkKey in item) return true;
503
+ if (Array.isArray(item) && isEntityLinkArray(item)) return true;
504
+ return false;
505
+ }
506
+ return false;
507
+ };
508
+ /**
509
+ * Compares two entity link arrays by their entity keys.
510
+ * @internal
511
+ * @param a - First entity link array.
512
+ * @param b - Second entity link array.
513
+ * @returns True if both arrays have the same entity keys at each position.
514
+ */
515
+ const isEntityLinkArrayEqual = (a, b) => {
516
+ if (a.length !== b.length) return false;
517
+ for (const [i, element] of a.entries()) if ((element?.[EntityLinkKey] ?? null) !== (b[i]?.[EntityLinkKey] ?? null)) return false;
518
+ return true;
519
+ };
520
+ /**
521
+ * Parses a dependency key into its storage key and field key components.
522
+ * @internal
523
+ * @param depKey - The dependency key to parse.
524
+ * @returns The storage key and field key.
525
+ */
526
+ const parseDependencyKey = (depKey) => {
527
+ const atIdx = depKey.indexOf("@");
528
+ const dotIdx = depKey.lastIndexOf(".", atIdx);
529
+ return {
530
+ storageKey: depKey.slice(0, dotIdx),
531
+ fieldKey: depKey.slice(dotIdx + 1)
532
+ };
533
+ };
518
534
 
519
535
  //#endregion
520
536
  //#region src/cache/normalize.ts
@@ -531,29 +547,31 @@ const normalize = (schemaMeta, selections, storage, data, variables, accessor) =
531
547
  if (keys.every((k) => k !== void 0 && k !== null)) return makeEntityKey(typename, keys);
532
548
  return null;
533
549
  };
534
- const normalizeField = (storageKey, selections, value) => {
550
+ const normalizeField = (storageKey, selections, value, parentType) => {
535
551
  if (isNullish(value)) return value;
536
- if (Array.isArray(value)) return value.map((item) => normalizeField(storageKey, selections, item));
552
+ if (Array.isArray(value)) return value.map((item) => normalizeField(storageKey, selections, item, parentType));
537
553
  const data = value;
538
- const typename = resolveTypename(selections, data);
554
+ const typename = resolveTypename(selections, data) ?? (parentType && schemaMeta.entities[parentType] ? parentType : void 0);
539
555
  const entityKey = resolveEntityKey(typename, data);
540
556
  if (entityKey) storageKey = entityKey;
541
557
  const fields = {};
542
558
  for (const selection of selections) if (selection.kind === "Field") {
543
559
  const fieldKey = makeFieldKey(selection, variables);
544
- const fieldValue = data[selection.alias ?? selection.name];
560
+ let fieldValue = data[selection.alias ?? selection.name];
561
+ if (selection.name === "__typename" && fieldValue === void 0 && typename) fieldValue = typename;
545
562
  if (storageKey !== null && selection.selections && typeof fieldValue === "object" && fieldValue !== null && !Array.isArray(fieldValue)) {
546
- const fieldTypename = resolveTypename(selection.selections, fieldValue);
563
+ const fieldTypename = resolveTypename(selection.selections, fieldValue) ?? (selection.type && schemaMeta.entities[selection.type] ? selection.type : void 0);
547
564
  if (fieldTypename && schemaMeta.entities[fieldTypename] && !resolveEntityKey(fieldTypename, fieldValue) && isEntityLink(storage[storageKey]?.[fieldKey])) continue;
548
565
  }
549
566
  const oldValue = storageKey === null ? void 0 : storage[storageKey]?.[fieldKey];
550
567
  if (storageKey !== null && (!selection.selections || isNullish(oldValue) || isNullish(fieldValue))) accessor?.(storageKey, fieldKey, oldValue, fieldValue);
551
- fields[fieldKey] = selection.selections ? normalizeField(null, selection.selections, fieldValue) : fieldValue;
568
+ fields[fieldKey] = selection.selections ? normalizeField(null, selection.selections, fieldValue, selection.type) : fieldValue;
552
569
  if (storageKey !== null && selection.selections && !isNullish(oldValue) && !isNullish(fieldValue) && !isEntityLink(fields[fieldKey]) && !isEqual(oldValue, fields[fieldKey])) accessor?.(storageKey, fieldKey, oldValue, fields[fieldKey]);
553
570
  } else if (selection.kind === "FragmentSpread" || selection.kind === "InlineFragment" && selection.on === typename) {
554
571
  const inner = normalizeField(storageKey, selection.selections, value);
555
572
  if (!isEntityLink(inner)) mergeFields(fields, inner);
556
573
  }
574
+ markNormalized(fields);
557
575
  if (entityKey) {
558
576
  const existing = storage[entityKey];
559
577
  if (existing) mergeFields(existing, fields);
@@ -576,59 +594,494 @@ const typenameFieldKey = makeFieldKey({
576
594
  name: "__typename",
577
595
  type: "String"
578
596
  }, {});
579
- const denormalize = (selections, storage, value, variables, accessor) => {
597
+ const denormalize = (selections, storage, value, variables, accessor, options) => {
580
598
  let partial = false;
581
- const denormalizeField = (storageKey, selections, value) => {
599
+ const denormalizeField = (storageKey, selections, value, path) => {
582
600
  if (isNullish(value)) return value;
583
- if (Array.isArray(value)) return value.map((item) => denormalizeField(storageKey, selections, item));
601
+ if (Array.isArray(value)) return value.map((item, i) => denormalizeField(storageKey, selections, item, [...path, i]));
584
602
  const data = value;
585
603
  if (isEntityLink(data)) {
586
604
  const entityKey = data[EntityLinkKey];
587
605
  const entity = storage[entityKey];
588
606
  if (!entity) {
589
- accessor?.(entityKey, typenameFieldKey);
607
+ accessor?.(entityKey, typenameFieldKey, path);
590
608
  partial = true;
591
609
  return null;
592
610
  }
593
- return denormalizeField(entityKey, selections, entity);
611
+ return denormalizeField(entityKey, selections, entity, path);
594
612
  }
595
613
  const fields = {};
596
614
  for (const selection of selections) if (selection.kind === "Field") {
597
615
  const fieldKey = makeFieldKey(selection, variables);
598
616
  const fieldValue = data[fieldKey];
599
- if (storageKey !== null) accessor?.(storageKey, fieldKey);
617
+ const fieldPath = [...path, selection.alias ?? selection.name];
618
+ if (storageKey !== null) accessor?.(storageKey, fieldKey, fieldPath, selection.selections);
600
619
  if (fieldValue === void 0) {
601
620
  partial = true;
602
621
  continue;
603
622
  }
604
623
  const name = selection.alias ?? selection.name;
605
- const value = selection.selections ? denormalizeField(null, selection.selections, fieldValue) : fieldValue;
606
- if (name in fields) mergeFields(fields, { [name]: value });
607
- else fields[name] = value;
624
+ const resolvedValue = selection.selections ? denormalizeField(null, selection.selections, fieldValue, fieldPath) : fieldValue;
625
+ if (name in fields) mergeFields(fields, { [name]: resolvedValue }, true);
626
+ else fields[name] = resolvedValue;
608
627
  } else if (selection.kind === "FragmentSpread") if (storageKey !== null && storageKey !== RootFieldKey) {
609
628
  fields[FragmentRefKey] = storageKey;
610
- if (selection.args) {
611
- const resolvedArgs = resolveArguments(selection.args, variables);
612
- const mergedVars = {
613
- ...variables,
614
- ...resolvedArgs
615
- };
616
- fields[FragmentVarsKey] = {
617
- ...fields[FragmentVarsKey],
618
- [selection.name]: mergedVars
619
- };
629
+ const merged = selection.args ? {
630
+ ...variables,
631
+ ...resolveArguments(selection.args, variables)
632
+ } : { ...variables };
633
+ fields[FragmentVarsKey] = {
634
+ ...fields[FragmentVarsKey],
635
+ [selection.name]: merged
636
+ };
637
+ if (accessor) {
638
+ if (denormalize(selection.selections, storage, { [EntityLinkKey]: storageKey }, variables, options?.trackFragmentDeps === false ? void 0 : accessor, options).partial) partial = true;
639
+ }
640
+ } else if (storageKey === RootFieldKey) {
641
+ fields[FragmentRefKey] = RootFieldKey;
642
+ const merged = selection.args ? {
643
+ ...variables,
644
+ ...resolveArguments(selection.args, variables)
645
+ } : { ...variables };
646
+ fields[FragmentVarsKey] = {
647
+ ...fields[FragmentVarsKey],
648
+ [selection.name]: merged
649
+ };
650
+ if (accessor) {
651
+ if (denormalize(selection.selections, storage, storage[RootFieldKey], variables, options?.trackFragmentDeps === false ? void 0 : accessor, options).partial) partial = true;
620
652
  }
621
- if (accessor) denormalize(selection.selections, storage, { [EntityLinkKey]: storageKey }, variables, accessor);
622
- } else mergeFields(fields, denormalizeField(storageKey, selection.selections, value));
623
- else if (selection.kind === "InlineFragment" && selection.on === data[typenameFieldKey]) mergeFields(fields, denormalizeField(storageKey, selection.selections, value));
653
+ } else mergeFields(fields, denormalizeField(storageKey, selection.selections, value, path), true);
654
+ else if (selection.kind === "InlineFragment" && selection.on === data[typenameFieldKey]) mergeFields(fields, denormalizeField(storageKey, selection.selections, value, path), true);
624
655
  return fields;
625
656
  };
626
657
  return {
627
- data: denormalizeField(RootFieldKey, selections, value),
658
+ data: denormalizeField(RootFieldKey, selections, value, []),
628
659
  partial
629
660
  };
630
661
  };
631
662
 
663
+ //#endregion
664
+ //#region src/cache/tree.ts
665
+ /**
666
+ * @internal
667
+ */
668
+ const buildEntryTree = (tuples, rootDepKey) => {
669
+ const root = {
670
+ depKey: rootDepKey ?? "__root",
671
+ children: /* @__PURE__ */ new Map()
672
+ };
673
+ for (const { storageKey, fieldKey, path, selections } of tuples) {
674
+ let current = root;
675
+ for (const element of path) {
676
+ const key = String(element);
677
+ let child = current.children.get(key);
678
+ if (!child) {
679
+ child = {
680
+ depKey: "",
681
+ children: /* @__PURE__ */ new Map()
682
+ };
683
+ current.children.set(key, child);
684
+ }
685
+ current = child;
686
+ }
687
+ current.depKey = makeDependencyKey(storageKey, fieldKey);
688
+ if (selections) current.selections = selections;
689
+ }
690
+ return root;
691
+ };
692
+ /**
693
+ * @internal
694
+ */
695
+ const findEntryTreeNode = (root, path) => {
696
+ let current = root;
697
+ for (const segment of path) {
698
+ if (!current) return void 0;
699
+ current = current.children.get(String(segment));
700
+ }
701
+ return current;
702
+ };
703
+ /**
704
+ * Removes all subscription entries for a given subscription from the subtree rooted at {@link node},
705
+ * and clears the node's children map. Both the subscription entries and the tree structure
706
+ * are cleaned up atomically to avoid stale references.
707
+ * @internal
708
+ */
709
+ const removeSubtreeEntries = (node, subscription, subscriptions) => {
710
+ const entries = subscriptions.get(node.depKey);
711
+ if (entries) {
712
+ for (const entry of entries) if (entry.subscription === subscription) {
713
+ entries.delete(entry);
714
+ break;
715
+ }
716
+ if (entries.size === 0) subscriptions.delete(node.depKey);
717
+ }
718
+ for (const child of node.children.values()) removeSubtreeEntries(child, subscription, subscriptions);
719
+ node.children.clear();
720
+ };
721
+ /**
722
+ * @internal
723
+ */
724
+ const snapshotFields = (node, storage) => {
725
+ const result = /* @__PURE__ */ new Map();
726
+ for (const [fieldName, child] of node.children) {
727
+ const { storageKey, fieldKey } = parseDependencyKey(child.depKey);
728
+ const fields = storage[storageKey];
729
+ if (fields) result.set(fieldName, fields[fieldKey]);
730
+ }
731
+ return result;
732
+ };
733
+ /**
734
+ * @internal
735
+ */
736
+ const partialDenormalize = (node, entity, basePath, rebuiltDepKeys, storage, subscriptions, subscription) => {
737
+ if (!node.selections) return {
738
+ data: null,
739
+ fieldValues: /* @__PURE__ */ new Map()
740
+ };
741
+ const tuples = [];
742
+ const { data } = denormalize(node.selections, storage, entity, subscription.variables, (storageKey, fieldKey, path, sels) => {
743
+ tuples.push({
744
+ storageKey,
745
+ fieldKey,
746
+ path: [...basePath, ...path],
747
+ selections: sels
748
+ });
749
+ }, { trackFragmentDeps: false });
750
+ node.children.clear();
751
+ const fieldValues = /* @__PURE__ */ new Map();
752
+ for (const tuple of tuples) {
753
+ const depKey = makeDependencyKey(tuple.storageKey, tuple.fieldKey);
754
+ rebuiltDepKeys.add(depKey);
755
+ const relativePath = tuple.path.slice(basePath.length);
756
+ let current = node;
757
+ for (const element of relativePath) {
758
+ const key = String(element);
759
+ let child = current.children.get(key);
760
+ if (!child) {
761
+ child = {
762
+ depKey: "",
763
+ children: /* @__PURE__ */ new Map()
764
+ };
765
+ current.children.set(key, child);
766
+ }
767
+ current = child;
768
+ }
769
+ current.depKey = depKey;
770
+ if (tuple.selections) current.selections = tuple.selections;
771
+ const entry = {
772
+ path: tuple.path,
773
+ subscription
774
+ };
775
+ let entrySet = subscriptions.get(depKey);
776
+ if (!entrySet) {
777
+ entrySet = /* @__PURE__ */ new Set();
778
+ subscriptions.set(depKey, entrySet);
779
+ }
780
+ entrySet.add(entry);
781
+ if (relativePath.length === 1) {
782
+ const fieldName = String(relativePath[0]);
783
+ if (data && typeof data === "object") fieldValues.set(fieldName, data[fieldName]);
784
+ }
785
+ }
786
+ return {
787
+ data,
788
+ fieldValues
789
+ };
790
+ };
791
+ const updateSubtreePaths = (node, basePath, newIndex, baseLen, subscription, subscriptions) => {
792
+ const entries = subscriptions.get(node.depKey);
793
+ if (entries) {
794
+ for (const entry of entries) if (entry.subscription === subscription && entry.path.length > baseLen) entry.path = [
795
+ ...basePath,
796
+ newIndex,
797
+ ...entry.path.slice(baseLen + 1)
798
+ ];
799
+ }
800
+ for (const child of node.children.values()) updateSubtreePaths(child, basePath, newIndex, baseLen, subscription, subscriptions);
801
+ };
802
+ /**
803
+ * @internal
804
+ */
805
+ const rebuildArrayIndices = (node, entry, subscriptions) => {
806
+ const basePath = entry.path;
807
+ const baseLen = basePath.length;
808
+ const children = [...node.children.entries()].toSorted(([a], [b]) => Number(a) - Number(b));
809
+ node.children.clear();
810
+ for (const [newIdx, child_] of children.entries()) {
811
+ const [, child] = child_;
812
+ const newKey = String(newIdx);
813
+ node.children.set(newKey, child);
814
+ updateSubtreePaths(child, basePath, newIdx, baseLen, entry.subscription, subscriptions);
815
+ }
816
+ };
817
+
818
+ //#endregion
819
+ //#region src/cache/diff.ts
820
+ /**
821
+ * Finds the common prefix and suffix boundaries between two key arrays.
822
+ * @internal
823
+ */
824
+ const findCommonBounds = (oldKeys, newKeys) => {
825
+ let start = 0;
826
+ while (start < oldKeys.length && start < newKeys.length && oldKeys[start] === newKeys[start]) start++;
827
+ let oldEnd = oldKeys.length;
828
+ let newEnd = newKeys.length;
829
+ while (oldEnd > start && newEnd > start && oldKeys[oldEnd - 1] === newKeys[newEnd - 1]) {
830
+ oldEnd--;
831
+ newEnd--;
832
+ }
833
+ return {
834
+ start,
835
+ oldEnd,
836
+ newEnd
837
+ };
838
+ };
839
+ /**
840
+ * Computes swap operations to reorder oldKeys into newKeys order using selection sort.
841
+ * @internal
842
+ */
843
+ const computeSwaps = (oldKeys, newKeys) => {
844
+ const working = [...oldKeys];
845
+ const swaps = [];
846
+ for (const [i, newKey] of newKeys.entries()) {
847
+ if (working[i] === newKey) continue;
848
+ const j = working.indexOf(newKey, i + 1);
849
+ if (j === -1) continue;
850
+ [working[i], working[j]] = [working[j], working[i]];
851
+ swaps.push({
852
+ i,
853
+ j
854
+ });
855
+ }
856
+ return swaps;
857
+ };
858
+
859
+ //#endregion
860
+ //#region src/cache/change.ts
861
+ /**
862
+ * @internal
863
+ */
864
+ const classifyChanges = (changedKeys) => {
865
+ const structural = [];
866
+ const scalar = [];
867
+ for (const [depKey, { oldValue, newValue }] of changedKeys) {
868
+ if (isEntityLink(oldValue) && isEntityLink(newValue) && oldValue[EntityLinkKey] === newValue[EntityLinkKey]) continue;
869
+ if (isEntityLinkArray(oldValue) && isEntityLinkArray(newValue) && isEntityLinkArrayEqual(oldValue, newValue)) continue;
870
+ if (isEntityLink(oldValue) || isEntityLink(newValue) || isEntityLinkArray(oldValue) || isEntityLinkArray(newValue)) structural.push({
871
+ depKey,
872
+ oldValue,
873
+ newValue
874
+ });
875
+ else scalar.push({
876
+ depKey,
877
+ newValue
878
+ });
879
+ }
880
+ return {
881
+ structural,
882
+ scalar
883
+ };
884
+ };
885
+ /**
886
+ * @internal
887
+ */
888
+ const processStructuralChange = (entry, node, oldValue, newValue, rebuiltDepKeys, storage, subscriptions) => {
889
+ const patches = [];
890
+ if (isEntityLink(oldValue) || isEntityLink(newValue)) {
891
+ if (isNullish(newValue)) {
892
+ removeSubtreeEntries(node, entry.subscription, subscriptions);
893
+ patches.push({
894
+ type: "set",
895
+ path: entry.path,
896
+ value: null
897
+ });
898
+ return patches;
899
+ }
900
+ if (isNullish(oldValue)) {
901
+ const entity = storage[newValue[EntityLinkKey]];
902
+ if (entity) {
903
+ const { data } = partialDenormalize(node, entity, entry.path, rebuiltDepKeys, storage, subscriptions, entry.subscription);
904
+ patches.push({
905
+ type: "set",
906
+ path: entry.path,
907
+ value: data
908
+ });
909
+ } else patches.push({
910
+ type: "set",
911
+ path: entry.path,
912
+ value: null
913
+ });
914
+ return patches;
915
+ }
916
+ const oldFields = snapshotFields(node, storage);
917
+ removeSubtreeEntries(node, entry.subscription, subscriptions);
918
+ const newEntity = storage[newValue[EntityLinkKey]];
919
+ if (!newEntity) {
920
+ patches.push({
921
+ type: "set",
922
+ path: entry.path,
923
+ value: null
924
+ });
925
+ return patches;
926
+ }
927
+ const { fieldValues: newFields } = partialDenormalize(node, newEntity, entry.path, rebuiltDepKeys, storage, subscriptions, entry.subscription);
928
+ for (const [fieldName, newVal] of newFields) if (!isEqual(oldFields.get(fieldName), newVal)) patches.push({
929
+ type: "set",
930
+ path: [...entry.path, fieldName],
931
+ value: newVal
932
+ });
933
+ for (const [fieldName] of oldFields) if (!newFields.has(fieldName)) patches.push({
934
+ type: "set",
935
+ path: [...entry.path, fieldName],
936
+ value: null
937
+ });
938
+ return patches;
939
+ }
940
+ if (isEntityLinkArray(oldValue) || isEntityLinkArray(newValue)) {
941
+ const oldArr = Array.isArray(oldValue) ? oldValue : [];
942
+ const newArr = Array.isArray(newValue) ? newValue : [];
943
+ const oldKeys = oldArr.map((item) => item !== null && item !== void 0 && typeof item === "object" && EntityLinkKey in item ? item[EntityLinkKey] : null);
944
+ const newKeys = newArr.map((item) => item !== null && item !== void 0 && typeof item === "object" && EntityLinkKey in item ? item[EntityLinkKey] : null);
945
+ const { start, oldEnd, newEnd } = findCommonBounds(oldKeys, newKeys);
946
+ const oldMiddle = oldKeys.slice(start, oldEnd);
947
+ const newMiddle = newKeys.slice(start, newEnd);
948
+ const newMiddleSet = new Set(newMiddle.filter((k) => k !== null));
949
+ const oldMiddleSet = new Set(oldMiddle.filter((k) => k !== null));
950
+ const removedIndices = [];
951
+ for (let i = oldMiddle.length - 1; i >= 0; i--) {
952
+ const key = oldMiddle[i];
953
+ if (key !== null && !newMiddleSet.has(key)) removedIndices.push(start + i);
954
+ }
955
+ for (const idx of removedIndices) {
956
+ const childKey = String(idx);
957
+ const child = node.children.get(childKey);
958
+ if (child) {
959
+ removeSubtreeEntries(child, entry.subscription, subscriptions);
960
+ node.children.delete(childKey);
961
+ }
962
+ patches.push({
963
+ type: "splice",
964
+ path: entry.path,
965
+ index: idx,
966
+ deleteCount: 1,
967
+ items: []
968
+ });
969
+ }
970
+ compactChildren(node);
971
+ const retainedOld = oldMiddle.filter((k) => k !== null && newMiddleSet.has(k));
972
+ const retainedNew = newMiddle.filter((k) => k !== null && oldMiddleSet.has(k));
973
+ if (retainedOld.length > 0) {
974
+ const swaps = computeSwaps(retainedOld, retainedNew);
975
+ for (const { i, j } of swaps) {
976
+ const absI = start + i;
977
+ const absJ = start + j;
978
+ patches.push({
979
+ type: "swap",
980
+ path: entry.path,
981
+ i: absI,
982
+ j: absJ
983
+ });
984
+ const childI = node.children.get(String(absI));
985
+ const childJ = node.children.get(String(absJ));
986
+ if (childI && childJ) {
987
+ node.children.set(String(absI), childJ);
988
+ node.children.set(String(absJ), childI);
989
+ }
990
+ }
991
+ }
992
+ const siblingSelections = findSiblingSelections(node);
993
+ const addedKeys = newMiddle.filter((k) => k !== null && !oldMiddleSet.has(k));
994
+ for (const key of addedKeys) {
995
+ const idx = start + newMiddle.indexOf(key);
996
+ shiftChildrenRight(node, idx);
997
+ const entity = storage[key];
998
+ const insertNode = {
999
+ depKey: "",
1000
+ children: /* @__PURE__ */ new Map(),
1001
+ ...siblingSelections && { selections: siblingSelections }
1002
+ };
1003
+ if (entity) {
1004
+ const { data } = partialDenormalize(insertNode, entity, [...entry.path, idx], rebuiltDepKeys, storage, subscriptions, entry.subscription);
1005
+ node.children.set(String(idx), insertNode);
1006
+ patches.push({
1007
+ type: "splice",
1008
+ path: entry.path,
1009
+ index: idx,
1010
+ deleteCount: 0,
1011
+ items: [data]
1012
+ });
1013
+ } else {
1014
+ node.children.set(String(idx), insertNode);
1015
+ patches.push({
1016
+ type: "splice",
1017
+ path: entry.path,
1018
+ index: idx,
1019
+ deleteCount: 0,
1020
+ items: [null]
1021
+ });
1022
+ }
1023
+ }
1024
+ rebuildArrayIndices(node, entry, subscriptions);
1025
+ return patches;
1026
+ }
1027
+ return patches;
1028
+ };
1029
+ const compactChildren = (node) => {
1030
+ const sorted = [...node.children.entries()].toSorted(([a], [b]) => Number(a) - Number(b));
1031
+ node.children.clear();
1032
+ for (const [i, element] of sorted.entries()) node.children.set(String(i), element[1]);
1033
+ };
1034
+ const findSiblingSelections = (node) => {
1035
+ for (const child of node.children.values()) if (child.selections) return child.selections;
1036
+ return node.selections;
1037
+ };
1038
+ const shiftChildrenRight = (node, fromIndex) => {
1039
+ const entries = [...node.children.entries()].toSorted(([a], [b]) => Number(a) - Number(b));
1040
+ node.children.clear();
1041
+ for (const [key, child] of entries) {
1042
+ const idx = Number(key);
1043
+ if (idx >= fromIndex) node.children.set(String(idx + 1), child);
1044
+ else node.children.set(key, child);
1045
+ }
1046
+ };
1047
+ /**
1048
+ * @internal
1049
+ */
1050
+ const generatePatches = (changedKeys, subscriptions, storage) => {
1051
+ const patchesBySubscription = /* @__PURE__ */ new Map();
1052
+ const rebuiltDepKeys = /* @__PURE__ */ new Set();
1053
+ const { structural, scalar } = classifyChanges(changedKeys);
1054
+ for (const { depKey, oldValue, newValue } of structural) {
1055
+ const entries = subscriptions.get(depKey);
1056
+ if (!entries) continue;
1057
+ for (const entry of entries) {
1058
+ const node = findEntryTreeNode(entry.subscription.entryTree, entry.path);
1059
+ if (!node) continue;
1060
+ const patches = processStructuralChange(entry, node, oldValue, newValue, rebuiltDepKeys, storage, subscriptions);
1061
+ if (patches.length > 0) {
1062
+ const existing = patchesBySubscription.get(entry.subscription) ?? [];
1063
+ existing.push(...patches);
1064
+ patchesBySubscription.set(entry.subscription, existing);
1065
+ }
1066
+ }
1067
+ }
1068
+ for (const { depKey, newValue } of scalar) {
1069
+ if (rebuiltDepKeys.has(depKey)) continue;
1070
+ const entries = subscriptions.get(depKey);
1071
+ if (!entries) continue;
1072
+ for (const entry of entries) {
1073
+ const existing = patchesBySubscription.get(entry.subscription) ?? [];
1074
+ existing.push({
1075
+ type: "set",
1076
+ path: entry.path,
1077
+ value: newValue
1078
+ });
1079
+ patchesBySubscription.set(entry.subscription, existing);
1080
+ }
1081
+ }
1082
+ return patchesBySubscription;
1083
+ };
1084
+
632
1085
  //#endregion
633
1086
  //#region src/cache/cache.ts
634
1087
  /**
@@ -639,7 +1092,6 @@ var Cache = class {
639
1092
  #schemaMeta;
640
1093
  #storage = { [RootFieldKey]: {} };
641
1094
  #subscriptions = /* @__PURE__ */ new Map();
642
- #memo = /* @__PURE__ */ new Map();
643
1095
  #stale = /* @__PURE__ */ new Set();
644
1096
  #optimisticKeys = [];
645
1097
  #optimisticLayers = /* @__PURE__ */ new Map();
@@ -674,22 +1126,35 @@ var Cache = class {
674
1126
  */
675
1127
  writeOptimistic(key, artifact, variables, data) {
676
1128
  const layerStorage = { [RootFieldKey]: {} };
677
- const dependencies = /* @__PURE__ */ new Set();
1129
+ const layerDependencies = /* @__PURE__ */ new Set();
678
1130
  normalize(this.#schemaMeta, artifact.selections, layerStorage, data, variables, (storageKey, fieldKey) => {
679
- dependencies.add(makeDependencyKey(storageKey, fieldKey));
1131
+ layerDependencies.add(makeDependencyKey(storageKey, fieldKey));
680
1132
  });
1133
+ const oldValues = /* @__PURE__ */ new Map();
1134
+ const currentView = this.#getStorageView();
1135
+ for (const depKey of layerDependencies) {
1136
+ const { storageKey: sk, fieldKey: fk } = this.#parseDepKey(depKey);
1137
+ oldValues.set(depKey, currentView[sk]?.[fk]);
1138
+ }
681
1139
  this.#optimisticKeys.push(key);
682
1140
  this.#optimisticLayers.set(key, {
683
1141
  storage: layerStorage,
684
- dependencies
1142
+ dependencies: layerDependencies
685
1143
  });
686
1144
  this.#storageView = null;
687
- const subscriptions = /* @__PURE__ */ new Set();
688
- for (const depKey of dependencies) {
689
- const ss = this.#subscriptions.get(depKey);
690
- if (ss) for (const s of ss) subscriptions.add(s);
1145
+ const newView = this.#getStorageView();
1146
+ const changedKeys = /* @__PURE__ */ new Map();
1147
+ for (const depKey of layerDependencies) {
1148
+ const { storageKey: sk, fieldKey: fk } = this.#parseDepKey(depKey);
1149
+ const newVal = newView[sk]?.[fk];
1150
+ const oldVal = oldValues.get(depKey);
1151
+ if (oldVal !== newVal) changedKeys.set(depKey, {
1152
+ oldValue: oldVal,
1153
+ newValue: newVal
1154
+ });
691
1155
  }
692
- for (const subscription of subscriptions) subscription.listener();
1156
+ const patchesBySubscription = generatePatches(changedKeys, this.#subscriptions, newView);
1157
+ for (const [subscription, patches] of patchesBySubscription) subscription.listener(patches);
693
1158
  }
694
1159
  /**
695
1160
  * Removes an optimistic layer and notifies affected subscribers.
@@ -699,42 +1164,71 @@ var Cache = class {
699
1164
  removeOptimistic(key) {
700
1165
  const layer = this.#optimisticLayers.get(key);
701
1166
  if (!layer) return;
1167
+ const currentView = this.#getStorageView();
1168
+ const oldValues = /* @__PURE__ */ new Map();
1169
+ for (const depKey of layer.dependencies) {
1170
+ const { storageKey: sk, fieldKey: fk } = this.#parseDepKey(depKey);
1171
+ oldValues.set(depKey, currentView[sk]?.[fk]);
1172
+ }
702
1173
  this.#optimisticLayers.delete(key);
703
1174
  this.#optimisticKeys = this.#optimisticKeys.filter((k) => k !== key);
704
1175
  this.#storageView = null;
705
- const subscriptions = /* @__PURE__ */ new Set();
1176
+ const newView = this.#getStorageView();
1177
+ const changedKeys = /* @__PURE__ */ new Map();
706
1178
  for (const depKey of layer.dependencies) {
707
- const ss = this.#subscriptions.get(depKey);
708
- if (ss) for (const s of ss) subscriptions.add(s);
1179
+ const { storageKey: sk, fieldKey: fk } = this.#parseDepKey(depKey);
1180
+ const newVal = newView[sk]?.[fk];
1181
+ const oldVal = oldValues.get(depKey);
1182
+ if (oldVal !== newVal) changedKeys.set(depKey, {
1183
+ oldValue: oldVal,
1184
+ newValue: newVal
1185
+ });
709
1186
  }
710
- for (const subscription of subscriptions) subscription.listener();
1187
+ const patchesBySubscription = generatePatches(changedKeys, this.#subscriptions, newView);
1188
+ for (const [subscription, patches] of patchesBySubscription) subscription.listener(patches);
711
1189
  }
712
1190
  /**
713
1191
  * Writes a query result to the cache, normalizing entities.
1192
+ * In addition to field-level stale clearing, this also clears entity-level stale entries
1193
+ * (e.g., `"User:1"`) when any field of that entity is written, because {@link invalidate}
1194
+ * supports entity-level invalidation without specifying a field.
714
1195
  * @param artifact - GraphQL document artifact.
715
1196
  * @param variables - Query variables.
716
1197
  * @param data - Query result data.
717
1198
  */
718
1199
  writeQuery(artifact, variables, data) {
719
- const dependencies = /* @__PURE__ */ new Set();
720
- const subscriptions = /* @__PURE__ */ new Set();
1200
+ const changedKeys = /* @__PURE__ */ new Map();
1201
+ const staleClearedKeys = /* @__PURE__ */ new Set();
721
1202
  const entityStaleCleared = /* @__PURE__ */ new Set();
722
1203
  normalize(this.#schemaMeta, artifact.selections, this.#storage, data, variables, (storageKey, fieldKey, oldValue, newValue) => {
723
1204
  const depKey = makeDependencyKey(storageKey, fieldKey);
724
- if (this.#stale.delete(depKey)) dependencies.add(depKey);
1205
+ if (this.#stale.delete(depKey)) staleClearedKeys.add(depKey);
725
1206
  if (!entityStaleCleared.has(storageKey) && this.#stale.delete(storageKey)) entityStaleCleared.add(storageKey);
726
- if (oldValue !== newValue) dependencies.add(depKey);
1207
+ if (oldValue !== newValue) changedKeys.set(depKey, {
1208
+ oldValue,
1209
+ newValue
1210
+ });
727
1211
  });
728
- for (const entityKey of entityStaleCleared) this.#collectSubscriptions(entityKey, void 0, subscriptions);
729
- for (const dependency of dependencies) {
730
- const ss = this.#subscriptions.get(dependency);
731
- if (ss) for (const s of ss) subscriptions.add(s);
1212
+ const patchesBySubscription = generatePatches(changedKeys, this.#subscriptions, this.#storage);
1213
+ for (const [subscription, patches] of patchesBySubscription) subscription.listener(patches);
1214
+ const staleOnlySubscriptions = /* @__PURE__ */ new Set();
1215
+ for (const depKey of staleClearedKeys) {
1216
+ if (changedKeys.has(depKey)) continue;
1217
+ const entries = this.#subscriptions.get(depKey);
1218
+ if (entries) {
1219
+ for (const entry of entries) if (!patchesBySubscription.has(entry.subscription)) staleOnlySubscriptions.add(entry.subscription);
1220
+ }
1221
+ }
1222
+ for (const entityKey of entityStaleCleared) {
1223
+ const prefix = `${entityKey}.`;
1224
+ for (const [depKey, entries] of this.#subscriptions) if (depKey.startsWith(prefix)) {
1225
+ for (const entry of entries) if (!patchesBySubscription.has(entry.subscription)) staleOnlySubscriptions.add(entry.subscription);
1226
+ }
732
1227
  }
733
- for (const subscription of subscriptions) subscription.listener();
1228
+ for (const subscription of staleOnlySubscriptions) subscription.listener(null);
734
1229
  }
735
1230
  /**
736
1231
  * Reads a query result from the cache, denormalizing entities if available.
737
- * Uses structural sharing to preserve referential identity for unchanged subtrees.
738
1232
  * @param artifact - GraphQL document artifact.
739
1233
  * @param variables - Query variables.
740
1234
  * @returns Denormalized query result or null if not found.
@@ -749,74 +1243,170 @@ var Cache = class {
749
1243
  data: null,
750
1244
  stale: false
751
1245
  };
752
- const key = makeMemoKey("query", artifact.name, stringify(variables));
753
- const prev = this.#memo.get(key);
754
- const result = prev === void 0 ? data : replaceEqualDeep(prev, data);
755
- this.#memo.set(key, result);
756
1246
  return {
757
- data: result,
1247
+ data,
758
1248
  stale
759
1249
  };
760
1250
  }
761
1251
  /**
762
- * Subscribes to cache invalidations for a specific query.
1252
+ * Subscribes to cache changes for a specific query.
763
1253
  * @param artifact - GraphQL document artifact.
764
1254
  * @param variables - Query variables.
765
- * @param listener - Callback function to invoke on cache invalidation.
766
- * @returns Unsubscribe function.
1255
+ * @param listener - Callback function to invoke on cache changes.
1256
+ * @returns Object containing initial data, stale status, unsubscribe function, and subscription.
767
1257
  */
768
1258
  subscribeQuery(artifact, variables, listener) {
769
- const dependencies = /* @__PURE__ */ new Set();
1259
+ let stale = false;
1260
+ const tuples = [];
770
1261
  const storageView = this.#getStorageView();
771
- denormalize(artifact.selections, storageView, storageView[RootFieldKey], variables, (storageKey, fieldKey) => {
772
- const dependencyKey = makeDependencyKey(storageKey, fieldKey);
773
- dependencies.add(dependencyKey);
774
- });
775
- return this.#subscribe(dependencies, listener);
1262
+ const { data, partial } = denormalize(artifact.selections, storageView, storageView[RootFieldKey], variables, (storageKey, fieldKey, path, selections) => {
1263
+ tuples.push({
1264
+ storageKey,
1265
+ fieldKey,
1266
+ path,
1267
+ selections
1268
+ });
1269
+ if (this.#stale.has(storageKey) || this.#stale.has(makeDependencyKey(storageKey, fieldKey))) stale = true;
1270
+ }, { trackFragmentDeps: false });
1271
+ const entryTree = buildEntryTree(tuples);
1272
+ const subscription = {
1273
+ listener,
1274
+ selections: artifact.selections,
1275
+ variables,
1276
+ entryTree
1277
+ };
1278
+ for (const tuple of tuples) {
1279
+ const depKey = makeDependencyKey(tuple.storageKey, tuple.fieldKey);
1280
+ const entry = {
1281
+ path: tuple.path,
1282
+ subscription
1283
+ };
1284
+ let entrySet = this.#subscriptions.get(depKey);
1285
+ if (!entrySet) {
1286
+ entrySet = /* @__PURE__ */ new Set();
1287
+ this.#subscriptions.set(depKey, entrySet);
1288
+ }
1289
+ entrySet.add(entry);
1290
+ }
1291
+ const unsubscribe = () => {
1292
+ this.#removeSubscriptionFromTree(entryTree, subscription);
1293
+ };
1294
+ return {
1295
+ data: partial ? null : data,
1296
+ stale,
1297
+ unsubscribe,
1298
+ subscription
1299
+ };
776
1300
  }
777
1301
  /**
778
1302
  * Reads a fragment from the cache for a specific entity.
779
- * Uses structural sharing to preserve referential identity for unchanged subtrees.
780
1303
  * @param artifact - GraphQL fragment artifact.
781
1304
  * @param fragmentRef - Fragment reference containing entity key.
782
1305
  * @returns Denormalized fragment data or null if not found or invalid.
783
1306
  */
784
1307
  readFragment(artifact, fragmentRef) {
785
- const entityKey = fragmentRef[FragmentRefKey];
1308
+ const storageKey = fragmentRef[FragmentRefKey];
786
1309
  const fragmentVars = getFragmentVars(fragmentRef, artifact.name);
787
1310
  const storageView = this.#getStorageView();
788
- if (!storageView[entityKey]) return {
1311
+ let stale = false;
1312
+ const value = storageView[storageKey];
1313
+ if (!value) return {
789
1314
  data: null,
790
1315
  stale: false
791
1316
  };
792
- let stale = false;
793
- const { data, partial } = denormalize(artifact.selections, storageView, { [EntityLinkKey]: entityKey }, fragmentVars, (storageKey, fieldKey) => {
794
- if (this.#stale.has(storageKey) || this.#stale.has(makeDependencyKey(storageKey, fieldKey))) stale = true;
1317
+ const { data, partial } = denormalize(artifact.selections, storageView, storageKey === RootFieldKey ? value : { [EntityLinkKey]: storageKey }, fragmentVars, (sk, fieldKey) => {
1318
+ if (this.#stale.has(sk) || this.#stale.has(makeDependencyKey(sk, fieldKey))) stale = true;
795
1319
  });
796
1320
  if (partial) return {
797
1321
  data: null,
798
1322
  stale: false
799
1323
  };
800
- const argsId = Object.keys(fragmentVars).length > 0 ? entityKey + stringify(fragmentVars) : entityKey;
801
- const key = makeMemoKey("fragment", artifact.name, argsId);
802
- const prev = this.#memo.get(key);
803
- const result = prev === void 0 ? data : replaceEqualDeep(prev, data);
804
- this.#memo.set(key, result);
805
1324
  return {
806
- data: result,
1325
+ data,
807
1326
  stale
808
1327
  };
809
1328
  }
1329
+ /**
1330
+ * Subscribes to cache changes for a specific fragment.
1331
+ * @param artifact - GraphQL fragment artifact.
1332
+ * @param fragmentRef - Fragment reference containing entity key.
1333
+ * @param listener - Callback function to invoke on cache changes.
1334
+ * @returns Object containing initial data, stale status, unsubscribe function, and subscription.
1335
+ */
810
1336
  subscribeFragment(artifact, fragmentRef, listener) {
811
- const entityKey = fragmentRef[FragmentRefKey];
1337
+ const storageKey = fragmentRef[FragmentRefKey];
812
1338
  const fragmentVars = getFragmentVars(fragmentRef, artifact.name);
813
- const dependencies = /* @__PURE__ */ new Set();
814
1339
  const storageView = this.#getStorageView();
815
- denormalize(artifact.selections, storageView, { [EntityLinkKey]: entityKey }, fragmentVars, (storageKey, fieldKey) => {
816
- const dependencyKey = makeDependencyKey(storageKey, fieldKey);
817
- dependencies.add(dependencyKey);
818
- });
819
- return this.#subscribe(dependencies, listener);
1340
+ const value = storageKey === RootFieldKey ? storageView[RootFieldKey] : storageView[storageKey];
1341
+ if (!value) {
1342
+ const entryTree = buildEntryTree([]);
1343
+ return {
1344
+ data: null,
1345
+ stale: false,
1346
+ unsubscribe: () => {},
1347
+ subscription: {
1348
+ listener,
1349
+ selections: artifact.selections,
1350
+ variables: fragmentVars,
1351
+ entryTree
1352
+ }
1353
+ };
1354
+ }
1355
+ let stale = false;
1356
+ const tuples = [];
1357
+ const denormalizeValue = storageKey === RootFieldKey ? value : { [EntityLinkKey]: storageKey };
1358
+ const { data, partial } = denormalize(artifact.selections, storageView, denormalizeValue, fragmentVars, (sk, fieldKey, path, selections) => {
1359
+ tuples.push({
1360
+ storageKey: sk,
1361
+ fieldKey,
1362
+ path,
1363
+ selections
1364
+ });
1365
+ if (this.#stale.has(sk) || this.#stale.has(makeDependencyKey(sk, fieldKey))) stale = true;
1366
+ }, { trackFragmentDeps: false });
1367
+ if (partial) {
1368
+ const entryTree = buildEntryTree([]);
1369
+ return {
1370
+ data: null,
1371
+ stale: false,
1372
+ unsubscribe: () => {},
1373
+ subscription: {
1374
+ listener,
1375
+ selections: artifact.selections,
1376
+ variables: fragmentVars,
1377
+ entryTree
1378
+ }
1379
+ };
1380
+ }
1381
+ const entryTree = buildEntryTree(tuples, storageKey === RootFieldKey ? void 0 : storageKey);
1382
+ const subscription = {
1383
+ listener,
1384
+ selections: artifact.selections,
1385
+ variables: fragmentVars,
1386
+ entryTree
1387
+ };
1388
+ for (const tuple of tuples) {
1389
+ const depKey = makeDependencyKey(tuple.storageKey, tuple.fieldKey);
1390
+ const entry = {
1391
+ path: tuple.path,
1392
+ subscription
1393
+ };
1394
+ let entrySet = this.#subscriptions.get(depKey);
1395
+ if (!entrySet) {
1396
+ entrySet = /* @__PURE__ */ new Set();
1397
+ this.#subscriptions.set(depKey, entrySet);
1398
+ }
1399
+ entrySet.add(entry);
1400
+ }
1401
+ const unsubscribe = () => {
1402
+ this.#removeSubscriptionFromTree(entryTree, subscription);
1403
+ };
1404
+ return {
1405
+ data: partial ? null : data,
1406
+ stale,
1407
+ unsubscribe,
1408
+ subscription
1409
+ };
820
1410
  }
821
1411
  readFragments(artifact, fragmentRefs) {
822
1412
  const results = [];
@@ -830,42 +1420,35 @@ var Cache = class {
830
1420
  if (result.stale) stale = true;
831
1421
  results.push(result.data);
832
1422
  }
833
- const entityKeys = fragmentRefs.map((ref) => ref[FragmentRefKey]);
834
- const key = makeMemoKey("fragments", artifact.name, entityKeys.join(","));
835
- const prev = this.#memo.get(key);
836
- const result = prev === void 0 ? results : replaceEqualDeep(prev, results);
837
- this.#memo.set(key, result);
838
1423
  return {
839
- data: result,
1424
+ data: results,
840
1425
  stale
841
1426
  };
842
1427
  }
843
1428
  subscribeFragments(artifact, fragmentRefs, listener) {
844
- const dependencies = /* @__PURE__ */ new Set();
845
- const storageView = this.#getStorageView();
1429
+ const unsubscribes = [];
846
1430
  for (const ref of fragmentRefs) {
847
- const entityKey = ref[FragmentRefKey];
848
- const fragmentVars = getFragmentVars(ref, artifact.name);
849
- denormalize(artifact.selections, storageView, { [EntityLinkKey]: entityKey }, fragmentVars, (storageKey, fieldKey) => {
850
- dependencies.add(makeDependencyKey(storageKey, fieldKey));
851
- });
1431
+ const { unsubscribe } = this.subscribeFragment(artifact, ref, listener);
1432
+ unsubscribes.push(unsubscribe);
852
1433
  }
853
- return this.#subscribe(dependencies, listener);
1434
+ return () => {
1435
+ for (const unsub of unsubscribes) unsub();
1436
+ };
854
1437
  }
855
1438
  /**
856
1439
  * Invalidates one or more cache entries and notifies affected subscribers.
857
1440
  * @param targets - Cache entries to invalidate.
858
1441
  */
859
1442
  invalidate(...targets) {
860
- const subscriptions = /* @__PURE__ */ new Set();
1443
+ const affectedSubscriptions = /* @__PURE__ */ new Set();
861
1444
  for (const target of targets) if (target.__typename === "Query") if ("$field" in target) {
862
1445
  const fieldKey = makeFieldKeyFromArgs(target.$field, target.$args);
863
1446
  const depKey = makeDependencyKey(RootFieldKey, fieldKey);
864
1447
  this.#stale.add(depKey);
865
- this.#collectSubscriptions(RootFieldKey, fieldKey, subscriptions);
1448
+ this.#collectSubscriptions(RootFieldKey, fieldKey, affectedSubscriptions);
866
1449
  } else {
867
1450
  this.#stale.add(RootFieldKey);
868
- this.#collectSubscriptions(RootFieldKey, void 0, subscriptions);
1451
+ this.#collectSubscriptions(RootFieldKey, void 0, affectedSubscriptions);
869
1452
  }
870
1453
  else {
871
1454
  const keyFields = this.#schemaMeta.entities[target.__typename]?.keyFields;
@@ -875,10 +1458,10 @@ var Cache = class {
875
1458
  if ("$field" in target) {
876
1459
  const fieldKey = makeFieldKeyFromArgs(target.$field, target.$args);
877
1460
  this.#stale.add(makeDependencyKey(entityKey, fieldKey));
878
- this.#collectSubscriptions(entityKey, fieldKey, subscriptions);
1461
+ this.#collectSubscriptions(entityKey, fieldKey, affectedSubscriptions);
879
1462
  } else {
880
1463
  this.#stale.add(entityKey);
881
- this.#collectSubscriptions(entityKey, void 0, subscriptions);
1464
+ this.#collectSubscriptions(entityKey, void 0, affectedSubscriptions);
882
1465
  }
883
1466
  } else {
884
1467
  const prefix = `${target.__typename}:`;
@@ -887,15 +1470,30 @@ var Cache = class {
887
1470
  if ("$field" in target) {
888
1471
  const fieldKey = makeFieldKeyFromArgs(target.$field, target.$args);
889
1472
  this.#stale.add(makeDependencyKey(entityKey, fieldKey));
890
- this.#collectSubscriptions(entityKey, fieldKey, subscriptions);
1473
+ this.#collectSubscriptions(entityKey, fieldKey, affectedSubscriptions);
891
1474
  } else {
892
1475
  this.#stale.add(entityKey);
893
- this.#collectSubscriptions(entityKey, void 0, subscriptions);
1476
+ this.#collectSubscriptions(entityKey, void 0, affectedSubscriptions);
894
1477
  }
895
1478
  }
896
1479
  }
897
1480
  }
898
- for (const subscription of subscriptions) subscription.listener();
1481
+ for (const subscription of affectedSubscriptions) subscription.listener(null);
1482
+ }
1483
+ /**
1484
+ * Checks if a subscription has stale data.
1485
+ * @internal
1486
+ */
1487
+ isStale(subscription) {
1488
+ const check = (node) => {
1489
+ if (node.depKey.includes("@")) {
1490
+ const { storageKey } = parseDependencyKey(node.depKey);
1491
+ if (this.#stale.has(storageKey) || this.#stale.has(node.depKey)) return true;
1492
+ }
1493
+ for (const child of node.children.values()) if (check(child)) return true;
1494
+ return false;
1495
+ };
1496
+ return check(subscription.entryTree);
899
1497
  }
900
1498
  #hasKeyFields(target, keyFields) {
901
1499
  return keyFields.every((f) => f in target);
@@ -903,48 +1501,43 @@ var Cache = class {
903
1501
  #collectSubscriptions(storageKey, fieldKey, out) {
904
1502
  if (fieldKey === void 0) {
905
1503
  const prefix = `${storageKey}.`;
906
- for (const [depKey, ss] of this.#subscriptions) if (depKey.startsWith(prefix)) for (const s of ss) out.add(s);
1504
+ for (const [depKey, entries] of this.#subscriptions) if (depKey.startsWith(prefix)) for (const entry of entries) out.add(entry.subscription);
907
1505
  } else {
908
1506
  const depKey = makeDependencyKey(storageKey, fieldKey);
909
- const ss = this.#subscriptions.get(depKey);
910
- if (ss) for (const s of ss) out.add(s);
1507
+ const entries = this.#subscriptions.get(depKey);
1508
+ if (entries) for (const entry of entries) out.add(entry.subscription);
911
1509
  }
912
1510
  }
913
- #subscribe(dependencies, listener) {
914
- const subscription = { listener };
915
- for (const dependency of dependencies) {
916
- const subscriptions = this.#subscriptions.get(dependency) ?? /* @__PURE__ */ new Set();
917
- subscriptions.add(subscription);
918
- this.#subscriptions.set(dependency, subscriptions);
919
- }
920
- return () => {
921
- for (const dependency of dependencies) {
922
- const subscriptions = this.#subscriptions.get(dependency);
923
- subscriptions?.delete(subscription);
924
- if (subscriptions?.size === 0) this.#subscriptions.delete(dependency);
1511
+ #removeSubscriptionFromTree(node, subscription) {
1512
+ const entries = this.#subscriptions.get(node.depKey);
1513
+ if (entries) {
1514
+ for (const entry of entries) if (entry.subscription === subscription) {
1515
+ entries.delete(entry);
1516
+ break;
925
1517
  }
926
- };
1518
+ if (entries.size === 0) this.#subscriptions.delete(node.depKey);
1519
+ }
1520
+ for (const child of node.children.values()) this.#removeSubscriptionFromTree(child, subscription);
1521
+ }
1522
+ #parseDepKey(depKey) {
1523
+ return parseDependencyKey(depKey);
927
1524
  }
928
1525
  /**
929
- * Extracts a serializable snapshot of the cache storage and structural sharing state.
1526
+ * Extracts a serializable snapshot of the cache storage.
930
1527
  * Optimistic layers are excluded because they represent transient in-flight state.
931
1528
  */
932
1529
  extract() {
933
- return {
934
- storage: structuredClone(this.#storage),
935
- memo: Object.fromEntries(this.#memo)
936
- };
1530
+ return { storage: structuredClone(this.#storage) };
937
1531
  }
938
1532
  /**
939
1533
  * Hydrates the cache with a previously extracted snapshot.
940
1534
  */
941
1535
  hydrate(snapshot) {
942
- const { storage, memo } = snapshot;
1536
+ const { storage } = snapshot;
943
1537
  for (const [key, fields] of Object.entries(storage)) this.#storage[key] = {
944
1538
  ...this.#storage[key],
945
1539
  ...fields
946
1540
  };
947
- for (const [key, value] of Object.entries(memo)) this.#memo.set(key, value);
948
1541
  this.#storageView = null;
949
1542
  }
950
1543
  /**
@@ -953,7 +1546,6 @@ var Cache = class {
953
1546
  clear() {
954
1547
  this.#storage = { [RootFieldKey]: {} };
955
1548
  this.#subscriptions.clear();
956
- this.#memo.clear();
957
1549
  this.#stale.clear();
958
1550
  this.#optimisticKeys = [];
959
1551
  this.#optimisticLayers.clear();
@@ -989,6 +1581,9 @@ const cacheExchange = (options = {}) => {
989
1581
  clear: () => cache.clear()
990
1582
  },
991
1583
  io: (ops$) => {
1584
+ const subscriptionHasData = /* @__PURE__ */ new Map();
1585
+ const resubscribe$ = require_make.makeSubject();
1586
+ const refetch$ = require_make.makeSubject();
992
1587
  const fragment$ = require_make.pipe(ops$, require_make.filter((op) => op.variant === "request" && op.artifact.kind === "fragment"), require_make.mergeMap((op) => {
993
1588
  const fragmentRef = op.metadata?.fragment?.ref;
994
1589
  if (!fragmentRef) return require_make.fromValue({
@@ -996,77 +1591,152 @@ const cacheExchange = (options = {}) => {
996
1591
  errors: [new ExchangeError("Fragment operation missing fragment.ref in metadata. This usually happens when the wrong fragment reference was passed.", { exchangeName: "cache" })]
997
1592
  });
998
1593
  if (isFragmentRefArray(fragmentRef)) {
999
- const trigger = require_make.makeSubject();
1000
- const teardown$ = require_make.pipe(ops$, require_make.filter((operation) => operation.variant === "teardown" && operation.key === op.key), require_make.tap(() => trigger.complete()));
1001
- return require_make.pipe(require_make.merge(require_make.fromValue(void 0), trigger.source), require_make.switchMap(() => require_make.fromSubscription(() => cache.readFragments(op.artifact, fragmentRef), () => cache.subscribeFragments(op.artifact, fragmentRef, async () => {
1002
- await Promise.resolve();
1003
- trigger.next();
1004
- }))), require_make.takeUntil(teardown$), require_make.map(({ data, stale }) => ({
1594
+ const results = require_make.makeSubject();
1595
+ const unsubscribes = [];
1596
+ const fragmentSubscriptions = [];
1597
+ for (const [index, ref] of fragmentRef.entries()) {
1598
+ const patchListener = (patches) => {
1599
+ if (patches) {
1600
+ const indexedPatches = patches.map((patch) => ({
1601
+ ...patch,
1602
+ path: [index, ...patch.path]
1603
+ }));
1604
+ results.next({
1605
+ operation: op,
1606
+ metadata: { cache: { patches: indexedPatches } },
1607
+ errors: []
1608
+ });
1609
+ } else {
1610
+ const sub = fragmentSubscriptions[index];
1611
+ if (sub && cache.isStale(sub)) {
1612
+ const { data, stale } = cache.readFragments(op.artifact, fragmentRef);
1613
+ if (data !== null) results.next({
1614
+ operation: op,
1615
+ data,
1616
+ ...stale && { metadata: { cache: { stale: true } } },
1617
+ errors: []
1618
+ });
1619
+ }
1620
+ }
1621
+ };
1622
+ const { unsubscribe, subscription } = cache.subscribeFragment(op.artifact, ref, patchListener);
1623
+ unsubscribes.push(unsubscribe);
1624
+ fragmentSubscriptions.push(subscription);
1625
+ }
1626
+ const { data: initialData, stale: initialStale } = cache.readFragments(op.artifact, fragmentRef);
1627
+ const teardown$ = require_make.pipe(ops$, require_make.filter((operation) => operation.variant === "teardown" && operation.key === op.key), require_make.tap(() => {
1628
+ for (const unsub of unsubscribes) unsub();
1629
+ results.complete();
1630
+ }));
1631
+ return require_make.pipe(require_make.merge(require_make.fromValue({
1005
1632
  operation: op,
1006
- data,
1007
- ...stale && { metadata: { cache: { stale: true } } },
1633
+ data: initialData,
1634
+ ...initialStale && { metadata: { cache: { stale: true } } },
1008
1635
  errors: []
1009
- })));
1636
+ }), results.source), require_make.takeUntil(teardown$));
1010
1637
  }
1011
1638
  if (!isFragmentRef(fragmentRef)) return require_make.fromValue({
1012
1639
  operation: op,
1013
1640
  data: fragmentRef,
1014
1641
  errors: []
1015
1642
  });
1016
- const trigger = require_make.makeSubject();
1017
- const teardown$ = require_make.pipe(ops$, require_make.filter((operation) => operation.variant === "teardown" && operation.key === op.key), require_make.tap(() => trigger.complete()));
1018
- return require_make.pipe(require_make.merge(require_make.fromValue(void 0), trigger.source), require_make.switchMap(() => require_make.fromSubscription(() => cache.readFragment(op.artifact, fragmentRef), () => cache.subscribeFragment(op.artifact, fragmentRef, async () => {
1019
- await Promise.resolve();
1020
- trigger.next();
1021
- }))), require_make.takeUntil(teardown$), require_make.map(({ data, stale }) => ({
1643
+ const results = require_make.makeSubject();
1644
+ let currentUnsubscribe = null;
1645
+ let currentSubscription = null;
1646
+ const patchListener = (patches) => {
1647
+ if (patches) results.next({
1648
+ operation: op,
1649
+ metadata: { cache: { patches } },
1650
+ errors: []
1651
+ });
1652
+ else if (currentSubscription) {
1653
+ if (cache.isStale(currentSubscription)) {
1654
+ const { data: staleData } = cache.readFragment(op.artifact, fragmentRef);
1655
+ if (staleData !== null) results.next({
1656
+ operation: op,
1657
+ data: staleData,
1658
+ metadata: { cache: { stale: true } },
1659
+ errors: []
1660
+ });
1661
+ }
1662
+ }
1663
+ };
1664
+ const { data, stale, unsubscribe, subscription } = cache.subscribeFragment(op.artifact, fragmentRef, patchListener);
1665
+ currentUnsubscribe = unsubscribe;
1666
+ currentSubscription = subscription;
1667
+ const teardown$ = require_make.pipe(ops$, require_make.filter((operation) => operation.variant === "teardown" && operation.key === op.key), require_make.tap(() => {
1668
+ if (currentUnsubscribe) currentUnsubscribe();
1669
+ results.complete();
1670
+ }));
1671
+ return require_make.pipe(require_make.merge(data === null ? empty() : require_make.fromValue({
1022
1672
  operation: op,
1023
1673
  data,
1024
1674
  ...stale && { metadata: { cache: { stale: true } } },
1025
1675
  errors: []
1026
- })));
1676
+ }), results.source), require_make.takeUntil(teardown$));
1027
1677
  }));
1028
1678
  const nonCache$ = require_make.pipe(ops$, require_make.filter((op) => op.variant === "request" && (op.artifact.kind === "mutation" || op.artifact.kind === "subscription" || op.artifact.kind === "query" && fetchPolicy === "network-only")), require_make.tap((op) => {
1029
1679
  if (op.artifact.kind === "mutation" && op.metadata?.cache?.optimisticResponse) cache.writeOptimistic(op.key, op.artifact, op.variables, op.metadata.cache.optimisticResponse);
1030
1680
  }));
1031
1681
  const query$ = require_make.pipe(ops$, require_make.filter((op) => op.variant === "request" && op.artifact.kind === "query" && fetchPolicy !== "network-only"), require_make.share());
1032
- const refetch$ = require_make.makeSubject();
1033
1682
  return require_make.merge(fragment$, require_make.pipe(query$, require_make.mergeMap((op) => {
1034
- const trigger = require_make.makeSubject();
1035
- let hasData = false;
1036
- const teardown$ = require_make.pipe(ops$, require_make.filter((operation) => operation.variant === "teardown" && operation.key === op.key), require_make.tap(() => trigger.complete()));
1037
- return require_make.pipe(require_make.merge(require_make.fromValue(void 0), trigger.source), require_make.switchMap(() => require_make.fromSubscription(() => cache.readQuery(op.artifact, op.variables), () => cache.subscribeQuery(op.artifact, op.variables, async () => {
1038
- await Promise.resolve();
1039
- trigger.next();
1040
- }))), require_make.takeUntil(teardown$), require_make.mergeMap(({ data, stale }) => {
1041
- if (data !== null && !stale) {
1042
- hasData = true;
1043
- return require_make.fromValue({
1044
- operation: op,
1045
- data,
1046
- errors: []
1047
- });
1048
- }
1049
- if (data !== null && stale) {
1050
- hasData = true;
1051
- refetch$.next(op);
1052
- return require_make.fromValue({
1053
- operation: op,
1054
- data,
1055
- metadata: { cache: { stale: true } },
1056
- errors: []
1057
- });
1058
- }
1059
- if (hasData) {
1060
- refetch$.next(op);
1061
- return empty();
1062
- }
1063
- if (fetchPolicy === "cache-only") return require_make.fromValue({
1064
- operation: op,
1065
- data: null,
1066
- errors: []
1067
- });
1683
+ const results = require_make.makeSubject();
1684
+ let currentUnsubscribe = null;
1685
+ let currentSubscription = null;
1686
+ let initialized = false;
1687
+ const doSubscribe = () => {
1688
+ if (currentUnsubscribe) currentUnsubscribe();
1689
+ const patchListener = (patches) => {
1690
+ if (patches) {
1691
+ if (!initialized) return;
1692
+ results.next({
1693
+ operation: op,
1694
+ metadata: { cache: { patches } },
1695
+ errors: []
1696
+ });
1697
+ } else if (currentSubscription) {
1698
+ if (cache.isStale(currentSubscription)) {
1699
+ const { data: staleData } = cache.readQuery(op.artifact, op.variables);
1700
+ if (staleData !== null) results.next({
1701
+ operation: op,
1702
+ data: staleData,
1703
+ metadata: { cache: { stale: true } },
1704
+ errors: []
1705
+ });
1706
+ refetch$.next(op);
1707
+ }
1708
+ }
1709
+ };
1710
+ const result = cache.subscribeQuery(op.artifact, op.variables, patchListener);
1711
+ currentUnsubscribe = result.unsubscribe;
1712
+ currentSubscription = result.subscription;
1713
+ return result;
1714
+ };
1715
+ const { data, stale } = doSubscribe();
1716
+ subscriptionHasData.set(op.key, data !== null);
1717
+ if (data !== null) initialized = true;
1718
+ const teardown$ = require_make.pipe(ops$, require_make.filter((o) => o.variant === "teardown" && o.key === op.key), require_make.tap(() => {
1719
+ if (currentUnsubscribe) currentUnsubscribe();
1720
+ subscriptionHasData.delete(op.key);
1721
+ results.complete();
1722
+ }));
1723
+ const resubStream$ = require_make.pipe(resubscribe$.source, require_make.filter((key) => key === op.key), require_make.mergeMap(() => {
1724
+ doSubscribe();
1725
+ initialized = true;
1068
1726
  return empty();
1069
1727
  }));
1728
+ const stream$ = require_make.pipe(require_make.merge(data === null ? fetchPolicy === "cache-only" ? require_make.fromValue({
1729
+ operation: op,
1730
+ data: null,
1731
+ errors: []
1732
+ }) : empty() : require_make.fromValue({
1733
+ operation: op,
1734
+ data,
1735
+ ...stale && { metadata: { cache: { stale: true } } },
1736
+ errors: []
1737
+ }), results.source, resubStream$), require_make.takeUntil(teardown$));
1738
+ if (stale) refetch$.next(op);
1739
+ return stream$;
1070
1740
  }), require_make.filter(() => fetchPolicy === "cache-only" || fetchPolicy === "cache-and-network" || fetchPolicy === "cache-first")), require_make.pipe(require_make.merge(nonCache$, require_make.pipe(query$, require_make.filter((op) => {
1071
1741
  const { data } = cache.readQuery(op.artifact, op.variables);
1072
1742
  return fetchPolicy === "cache-and-network" || data === null;
@@ -1074,8 +1744,22 @@ const cacheExchange = (options = {}) => {
1074
1744
  if (result.operation.variant === "request" && result.operation.artifact.kind === "mutation" && result.operation.metadata?.cache?.optimisticResponse) cache.removeOptimistic(result.operation.key);
1075
1745
  if (result.operation.variant === "request" && result.data) cache.writeQuery(result.operation.artifact, result.operation.variables, result.data);
1076
1746
  if (result.operation.variant !== "request" || result.operation.artifact.kind !== "query" || fetchPolicy === "network-only" || !!(result.errors && result.errors.length > 0)) return require_make.fromValue(result);
1747
+ if (subscriptionHasData.get(result.operation.key)) {
1748
+ const { data } = cache.readQuery(result.operation.artifact, result.operation.variables);
1749
+ if (data !== null) return empty();
1750
+ return require_make.fromValue({
1751
+ operation: result.operation,
1752
+ data: void 0,
1753
+ errors: [new ExchangeError("Cache failed to denormalize the network response. This is likely a bug in the cache normalizer.", { exchangeName: "cache" })]
1754
+ });
1755
+ }
1756
+ subscriptionHasData.set(result.operation.key, true);
1757
+ resubscribe$.next(result.operation.key);
1077
1758
  const { data } = cache.readQuery(result.operation.artifact, result.operation.variables);
1078
- if (data !== null) return empty();
1759
+ if (data !== null) return require_make.fromValue({
1760
+ ...result,
1761
+ data
1762
+ });
1079
1763
  return require_make.fromValue({
1080
1764
  operation: result.operation,
1081
1765
  data: void 0,
@@ -1087,6 +1771,99 @@ const cacheExchange = (options = {}) => {
1087
1771
  };
1088
1772
  };
1089
1773
 
1774
+ //#endregion
1775
+ //#region src/cache/patch.ts
1776
+ const copyNode = (node) => Array.isArray(node) ? [...node] : { ...node };
1777
+ const shallowCopyPath = (root, path) => {
1778
+ if (path.length === 0) return root;
1779
+ let result = copyNode(root);
1780
+ const top = result;
1781
+ for (let i = 0; i < path.length - 1; i++) {
1782
+ const key = path[i];
1783
+ result[key] = copyNode(result[key]);
1784
+ result = result[key];
1785
+ }
1786
+ return top;
1787
+ };
1788
+ /**
1789
+ * Sets a value at a nested path within an object.
1790
+ * @param obj - The object to modify.
1791
+ * @param path - The path to the target location.
1792
+ * @param value - The value to set.
1793
+ */
1794
+ const setPath = (obj, path, value) => {
1795
+ let current = obj;
1796
+ for (let i = 0; i < path.length - 1; i++) current = current[path[i]];
1797
+ current[path.at(-1)] = value;
1798
+ };
1799
+ /**
1800
+ * Gets a value at a nested path within an object.
1801
+ * @param obj - The object to read from.
1802
+ * @param path - The path to the target location.
1803
+ * @returns The value at the path, or the object itself if path is empty.
1804
+ */
1805
+ const getPath = (obj, path) => {
1806
+ let current = obj;
1807
+ for (const segment of path) {
1808
+ if (current === void 0 || current === null) return void 0;
1809
+ current = current[segment];
1810
+ }
1811
+ return current;
1812
+ };
1813
+ /**
1814
+ * Applies cache patches to data immutably, shallow-copying only along changed paths.
1815
+ */
1816
+ const applyPatchesImmutable = (data, patches) => {
1817
+ if (patches.length === 0) return data;
1818
+ let result = data;
1819
+ for (const patch of patches) if (patch.type === "set") {
1820
+ if (patch.path.length === 0) {
1821
+ result = patch.value;
1822
+ continue;
1823
+ }
1824
+ result = shallowCopyPath(result, patch.path);
1825
+ let target = result;
1826
+ for (let i = 0; i < patch.path.length - 1; i++) target = target[patch.path[i]];
1827
+ target[patch.path.at(-1)] = patch.value;
1828
+ } else if (patch.type === "splice") {
1829
+ result = shallowCopyPath(result, patch.path);
1830
+ let target = result;
1831
+ for (const segment of patch.path) target = target[segment];
1832
+ const arr = [...target];
1833
+ arr.splice(patch.index, patch.deleteCount, ...patch.items);
1834
+ let parent = result;
1835
+ for (let i = 0; i < patch.path.length - 1; i++) parent = parent[patch.path[i]];
1836
+ parent[patch.path.at(-1)] = arr;
1837
+ } else if (patch.type === "swap") {
1838
+ result = shallowCopyPath(result, patch.path);
1839
+ let target = result;
1840
+ for (const segment of patch.path) target = target[segment];
1841
+ const arr = [...target];
1842
+ [arr[patch.i], arr[patch.j]] = [arr[patch.j], arr[patch.i]];
1843
+ let parent = result;
1844
+ for (let i = 0; i < patch.path.length - 1; i++) parent = parent[patch.path[i]];
1845
+ parent[patch.path.at(-1)] = arr;
1846
+ }
1847
+ return result;
1848
+ };
1849
+ /**
1850
+ * Applies cache patches to a mutable target object in place.
1851
+ * @param target - The mutable object to apply patches to.
1852
+ * @param patches - The patches to apply.
1853
+ * @returns The new root value if a root-level set patch was applied, otherwise undefined.
1854
+ */
1855
+ const applyPatchesMutable = (target, patches) => {
1856
+ let root;
1857
+ for (const patch of patches) if (patch.type === "set") if (patch.path.length === 0) root = patch.value;
1858
+ else setPath(target, patch.path, patch.value);
1859
+ else if (patch.type === "splice") getPath(target, patch.path).splice(patch.index, patch.deleteCount, ...patch.items);
1860
+ else if (patch.type === "swap") {
1861
+ const arr = getPath(target, patch.path);
1862
+ [arr[patch.i], arr[patch.j]] = [arr[patch.j], arr[patch.i]];
1863
+ }
1864
+ return root;
1865
+ };
1866
+
1090
1867
  //#endregion
1091
1868
  //#region src/exchanges/retry.ts
1092
1869
  const defaultShouldRetry = (error) => isExchangeError(error, "http") && error.extensions?.statusCode !== void 0 && error.extensions.statusCode >= 500;
@@ -1563,15 +2340,19 @@ exports.Client = Client;
1563
2340
  exports.ExchangeError = ExchangeError;
1564
2341
  exports.GraphQLError = GraphQLError;
1565
2342
  exports.RequiredFieldError = RequiredFieldError;
2343
+ exports.applyPatchesImmutable = applyPatchesImmutable;
2344
+ exports.applyPatchesMutable = applyPatchesMutable;
1566
2345
  exports.cacheExchange = cacheExchange;
1567
2346
  exports.createClient = createClient;
1568
2347
  exports.dedupExchange = dedupExchange;
1569
2348
  exports.fragmentExchange = fragmentExchange;
2349
+ exports.getPath = getPath;
1570
2350
  exports.httpExchange = httpExchange;
1571
2351
  exports.isAggregatedError = isAggregatedError;
1572
2352
  exports.isExchangeError = isExchangeError;
1573
2353
  exports.isGraphQLError = isGraphQLError;
1574
2354
  exports.requiredExchange = requiredExchange;
1575
2355
  exports.retryExchange = retryExchange;
2356
+ exports.setPath = setPath;
1576
2357
  exports.stringify = stringify;
1577
2358
  exports.subscriptionExchange = subscriptionExchange;