@mearie/core 0.5.1 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,4 +1,4 @@
1
- import { C as mergeMap, S as filter, a as finalize, b as merge, c as share, d as map, i as fromValue, l as takeUntil, m as collect, n as fromSubscription, o as initialize, r as makeSubject, s as switchMap, t as make, u as take, v as fromArray, w as pipe, x as fromPromise, y as tap } from "./make-C7I1YIXm.mjs";
1
+ import { C as mergeMap, S as filter, a as finalize, b as merge, c as share, d as map, i as fromValue, l as takeUntil, m as collect, o as initialize, r as makeSubject, t as make, u as take, v as fromArray, w as pipe, x as fromPromise, y as tap } from "./make-C7I1YIXm.mjs";
2
2
 
3
3
  //#region src/errors.ts
4
4
  /**
@@ -359,15 +359,6 @@ const makeFieldKey = (selection, variables) => {
359
359
  return `${selection.name}@${args}`;
360
360
  };
361
361
  /**
362
- * Generates a unique key for tracking memoized denormalized results for structural sharing.
363
- * @internal
364
- * @param kind - The operation kind ('query', 'fragment', 'fragments').
365
- * @param name - The artifact name.
366
- * @param id - Serialized identifier (variables, entity key, etc.).
367
- * @returns A unique memo key.
368
- */
369
- const makeMemoKey = (kind, name, id) => `${kind}:${name}:${id}`;
370
- /**
371
362
  * Gets a unique key for tracking a field dependency.
372
363
  * @internal
373
364
  * @param storageKey Storage key (entity or root query key).
@@ -444,65 +435,48 @@ const isEqual = (a, b) => {
444
435
  }
445
436
  return false;
446
437
  };
438
+ const NormalizedKey = Symbol("mearie.normalized");
447
439
  /**
448
- * Recursively replaces a new value tree with the previous one wherever structurally equal,
449
- * preserving referential identity for unchanged subtrees.
450
- *
451
- * Returns `prev` (same reference) when the entire subtree is structurally equal.
440
+ * Marks a record as a normalized cache object so that {@link mergeFields}
441
+ * can distinguish it from opaque scalar values (e.g. JSON scalars).
442
+ * Only normalized records are deep-merged; unmarked objects are treated as
443
+ * atomic values and replaced entirely on write.
452
444
  * @internal
453
445
  */
454
- const replaceEqualDeep = (prev, next) => {
455
- if (prev === next) return prev;
456
- if (typeof prev !== typeof next || prev === null || next === null || typeof prev !== "object") return next;
457
- if (Array.isArray(prev)) {
458
- if (!Array.isArray(next)) return next;
459
- let allSame = prev.length === next.length;
460
- const result = [];
461
- for (const [i, item] of next.entries()) {
462
- const shared = i < prev.length ? replaceEqualDeep(prev[i], item) : item;
463
- result.push(shared);
464
- if (shared !== prev[i]) allSame = false;
465
- }
466
- return allSame ? prev : result;
467
- }
468
- if (Array.isArray(next)) return next;
469
- const prevObj = prev;
470
- const nextObj = next;
471
- const nextKeys = Object.keys(nextObj);
472
- const prevKeys = Object.keys(prevObj);
473
- let allSame = nextKeys.length === prevKeys.length;
474
- const result = {};
475
- for (const key of nextKeys) if (key in prevObj) {
476
- result[key] = replaceEqualDeep(prevObj[key], nextObj[key]);
477
- if (result[key] !== prevObj[key]) allSame = false;
478
- } else {
479
- result[key] = nextObj[key];
480
- allSame = false;
481
- }
482
- return allSame ? prev : result;
446
+ const markNormalized = (obj) => {
447
+ Object.defineProperty(obj, NormalizedKey, { value: true });
448
+ };
449
+ const isNormalizedRecord = (value) => {
450
+ return typeof value === "object" && value !== null && NormalizedKey in value;
483
451
  };
484
452
  /**
485
- * Deeply merges two values. Objects are recursively merged, arrays are element-wise merged,
486
- * entity links and primitives use last-write-wins.
453
+ * Deeply merges two values. When {@link deep} is false (default), only
454
+ * {@link markNormalized normalized} cache objects are recursively merged;
455
+ * unmarked plain objects (e.g. JSON scalars) are atomically replaced.
456
+ * When {@link deep} is true, all objects are recursively merged unconditionally.
457
+ * Arrays are element-wise merged, entity links and primitives use last-write-wins.
487
458
  * @internal
488
459
  */
489
- const mergeFieldValue = (existing, incoming) => {
460
+ const mergeFieldValue = (existing, incoming, deep) => {
490
461
  if (isNullish(existing) || isNullish(incoming)) return incoming;
491
462
  if (typeof existing !== "object" || typeof incoming !== "object") return incoming;
492
463
  if (isEntityLink(existing) || isEntityLink(incoming)) return incoming;
493
- if (Array.isArray(existing) && Array.isArray(incoming)) return incoming.map((item, i) => i < existing.length ? mergeFieldValue(existing[i], item) : item);
464
+ if (Array.isArray(existing) && Array.isArray(incoming)) return incoming.map((item, i) => i < existing.length ? mergeFieldValue(existing[i], item, deep) : item);
494
465
  if (Array.isArray(existing) || Array.isArray(incoming)) return incoming;
495
- mergeFields(existing, incoming);
466
+ if (!deep && !isNormalizedRecord(incoming)) return incoming;
467
+ mergeFields(existing, incoming, deep);
496
468
  return existing;
497
469
  };
498
470
  /**
499
- * Deeply merges source fields into target. Objects are recursively merged,
500
- * arrays are element-wise merged, entity links and primitives use last-write-wins.
471
+ * Deeply merges source fields into target.
472
+ * When {@link deep} is false (default), only {@link markNormalized normalized}
473
+ * objects are recursively merged; unmarked objects are atomically replaced.
474
+ * When {@link deep} is true, all objects are recursively merged unconditionally.
501
475
  * @internal
502
476
  */
503
- const mergeFields = (target, source) => {
477
+ const mergeFields = (target, source, deep) => {
504
478
  if (isNullish(source) || typeof source !== "object" || Array.isArray(source)) return;
505
- for (const key of Object.keys(source)) target[key] = mergeFieldValue(target[key], source[key]);
479
+ for (const key of Object.keys(source)) target[key] = mergeFieldValue(target[key], source[key], deep ?? false);
506
480
  };
507
481
  /**
508
482
  * Creates a FieldKey from a raw field name and optional arguments.
@@ -514,6 +488,48 @@ const mergeFields = (target, source) => {
514
488
  const makeFieldKeyFromArgs = (field, args) => {
515
489
  return `${field}@${args && Object.keys(args).length > 0 ? stringify(args) : "{}"}`;
516
490
  };
491
+ /**
492
+ * Type guard to check if a value is an array containing entity links.
493
+ * @internal
494
+ * @param value - Value to check.
495
+ * @returns True if the value is an array containing at least one entity link.
496
+ */
497
+ const isEntityLinkArray = (value) => {
498
+ if (!Array.isArray(value) || value.length === 0) return false;
499
+ for (const item of value) {
500
+ if (item === null || item === void 0) continue;
501
+ if (typeof item === "object" && !Array.isArray(item) && EntityLinkKey in item) return true;
502
+ if (Array.isArray(item) && isEntityLinkArray(item)) return true;
503
+ return false;
504
+ }
505
+ return false;
506
+ };
507
+ /**
508
+ * Compares two entity link arrays by their entity keys.
509
+ * @internal
510
+ * @param a - First entity link array.
511
+ * @param b - Second entity link array.
512
+ * @returns True if both arrays have the same entity keys at each position.
513
+ */
514
+ const isEntityLinkArrayEqual = (a, b) => {
515
+ if (a.length !== b.length) return false;
516
+ for (const [i, element] of a.entries()) if ((element?.[EntityLinkKey] ?? null) !== (b[i]?.[EntityLinkKey] ?? null)) return false;
517
+ return true;
518
+ };
519
+ /**
520
+ * Parses a dependency key into its storage key and field key components.
521
+ * @internal
522
+ * @param depKey - The dependency key to parse.
523
+ * @returns The storage key and field key.
524
+ */
525
+ const parseDependencyKey = (depKey) => {
526
+ const atIdx = depKey.indexOf("@");
527
+ const dotIdx = depKey.lastIndexOf(".", atIdx);
528
+ return {
529
+ storageKey: depKey.slice(0, dotIdx),
530
+ fieldKey: depKey.slice(dotIdx + 1)
531
+ };
532
+ };
517
533
 
518
534
  //#endregion
519
535
  //#region src/cache/normalize.ts
@@ -530,29 +546,31 @@ const normalize = (schemaMeta, selections, storage, data, variables, accessor) =
530
546
  if (keys.every((k) => k !== void 0 && k !== null)) return makeEntityKey(typename, keys);
531
547
  return null;
532
548
  };
533
- const normalizeField = (storageKey, selections, value) => {
549
+ const normalizeField = (storageKey, selections, value, parentType) => {
534
550
  if (isNullish(value)) return value;
535
- if (Array.isArray(value)) return value.map((item) => normalizeField(storageKey, selections, item));
551
+ if (Array.isArray(value)) return value.map((item) => normalizeField(storageKey, selections, item, parentType));
536
552
  const data = value;
537
- const typename = resolveTypename(selections, data);
553
+ const typename = resolveTypename(selections, data) ?? (parentType && schemaMeta.entities[parentType] ? parentType : void 0);
538
554
  const entityKey = resolveEntityKey(typename, data);
539
555
  if (entityKey) storageKey = entityKey;
540
556
  const fields = {};
541
557
  for (const selection of selections) if (selection.kind === "Field") {
542
558
  const fieldKey = makeFieldKey(selection, variables);
543
- const fieldValue = data[selection.alias ?? selection.name];
559
+ let fieldValue = data[selection.alias ?? selection.name];
560
+ if (selection.name === "__typename" && fieldValue === void 0 && typename) fieldValue = typename;
544
561
  if (storageKey !== null && selection.selections && typeof fieldValue === "object" && fieldValue !== null && !Array.isArray(fieldValue)) {
545
- const fieldTypename = resolveTypename(selection.selections, fieldValue);
562
+ const fieldTypename = resolveTypename(selection.selections, fieldValue) ?? (selection.type && schemaMeta.entities[selection.type] ? selection.type : void 0);
546
563
  if (fieldTypename && schemaMeta.entities[fieldTypename] && !resolveEntityKey(fieldTypename, fieldValue) && isEntityLink(storage[storageKey]?.[fieldKey])) continue;
547
564
  }
548
565
  const oldValue = storageKey === null ? void 0 : storage[storageKey]?.[fieldKey];
549
566
  if (storageKey !== null && (!selection.selections || isNullish(oldValue) || isNullish(fieldValue))) accessor?.(storageKey, fieldKey, oldValue, fieldValue);
550
- fields[fieldKey] = selection.selections ? normalizeField(null, selection.selections, fieldValue) : fieldValue;
567
+ fields[fieldKey] = selection.selections ? normalizeField(null, selection.selections, fieldValue, selection.type) : fieldValue;
551
568
  if (storageKey !== null && selection.selections && !isNullish(oldValue) && !isNullish(fieldValue) && !isEntityLink(fields[fieldKey]) && !isEqual(oldValue, fields[fieldKey])) accessor?.(storageKey, fieldKey, oldValue, fields[fieldKey]);
552
569
  } else if (selection.kind === "FragmentSpread" || selection.kind === "InlineFragment" && selection.on === typename) {
553
570
  const inner = normalizeField(storageKey, selection.selections, value);
554
571
  if (!isEntityLink(inner)) mergeFields(fields, inner);
555
572
  }
573
+ markNormalized(fields);
556
574
  if (entityKey) {
557
575
  const existing = storage[entityKey];
558
576
  if (existing) mergeFields(existing, fields);
@@ -575,59 +593,494 @@ const typenameFieldKey = makeFieldKey({
575
593
  name: "__typename",
576
594
  type: "String"
577
595
  }, {});
578
- const denormalize = (selections, storage, value, variables, accessor) => {
596
+ const denormalize = (selections, storage, value, variables, accessor, options) => {
579
597
  let partial = false;
580
- const denormalizeField = (storageKey, selections, value) => {
598
+ const denormalizeField = (storageKey, selections, value, path) => {
581
599
  if (isNullish(value)) return value;
582
- if (Array.isArray(value)) return value.map((item) => denormalizeField(storageKey, selections, item));
600
+ if (Array.isArray(value)) return value.map((item, i) => denormalizeField(storageKey, selections, item, [...path, i]));
583
601
  const data = value;
584
602
  if (isEntityLink(data)) {
585
603
  const entityKey = data[EntityLinkKey];
586
604
  const entity = storage[entityKey];
587
605
  if (!entity) {
588
- accessor?.(entityKey, typenameFieldKey);
606
+ accessor?.(entityKey, typenameFieldKey, path);
589
607
  partial = true;
590
608
  return null;
591
609
  }
592
- return denormalizeField(entityKey, selections, entity);
610
+ return denormalizeField(entityKey, selections, entity, path);
593
611
  }
594
612
  const fields = {};
595
613
  for (const selection of selections) if (selection.kind === "Field") {
596
614
  const fieldKey = makeFieldKey(selection, variables);
597
615
  const fieldValue = data[fieldKey];
598
- if (storageKey !== null) accessor?.(storageKey, fieldKey);
616
+ const fieldPath = [...path, selection.alias ?? selection.name];
617
+ if (storageKey !== null) accessor?.(storageKey, fieldKey, fieldPath, selection.selections);
599
618
  if (fieldValue === void 0) {
600
619
  partial = true;
601
620
  continue;
602
621
  }
603
622
  const name = selection.alias ?? selection.name;
604
- const value = selection.selections ? denormalizeField(null, selection.selections, fieldValue) : fieldValue;
605
- if (name in fields) mergeFields(fields, { [name]: value });
606
- else fields[name] = value;
623
+ const resolvedValue = selection.selections ? denormalizeField(null, selection.selections, fieldValue, fieldPath) : fieldValue;
624
+ if (name in fields) mergeFields(fields, { [name]: resolvedValue }, true);
625
+ else fields[name] = resolvedValue;
607
626
  } else if (selection.kind === "FragmentSpread") if (storageKey !== null && storageKey !== RootFieldKey) {
608
627
  fields[FragmentRefKey] = storageKey;
609
- if (selection.args) {
610
- const resolvedArgs = resolveArguments(selection.args, variables);
611
- const mergedVars = {
612
- ...variables,
613
- ...resolvedArgs
614
- };
615
- fields[FragmentVarsKey] = {
616
- ...fields[FragmentVarsKey],
617
- [selection.name]: mergedVars
618
- };
628
+ const merged = selection.args ? {
629
+ ...variables,
630
+ ...resolveArguments(selection.args, variables)
631
+ } : { ...variables };
632
+ fields[FragmentVarsKey] = {
633
+ ...fields[FragmentVarsKey],
634
+ [selection.name]: merged
635
+ };
636
+ if (accessor) {
637
+ if (denormalize(selection.selections, storage, { [EntityLinkKey]: storageKey }, variables, options?.trackFragmentDeps === false ? void 0 : accessor, options).partial) partial = true;
638
+ }
639
+ } else if (storageKey === RootFieldKey) {
640
+ fields[FragmentRefKey] = RootFieldKey;
641
+ const merged = selection.args ? {
642
+ ...variables,
643
+ ...resolveArguments(selection.args, variables)
644
+ } : { ...variables };
645
+ fields[FragmentVarsKey] = {
646
+ ...fields[FragmentVarsKey],
647
+ [selection.name]: merged
648
+ };
649
+ if (accessor) {
650
+ if (denormalize(selection.selections, storage, storage[RootFieldKey], variables, options?.trackFragmentDeps === false ? void 0 : accessor, options).partial) partial = true;
619
651
  }
620
- if (accessor) denormalize(selection.selections, storage, { [EntityLinkKey]: storageKey }, variables, accessor);
621
- } else mergeFields(fields, denormalizeField(storageKey, selection.selections, value));
622
- else if (selection.kind === "InlineFragment" && selection.on === data[typenameFieldKey]) mergeFields(fields, denormalizeField(storageKey, selection.selections, value));
652
+ } else mergeFields(fields, denormalizeField(storageKey, selection.selections, value, path), true);
653
+ else if (selection.kind === "InlineFragment" && selection.on === data[typenameFieldKey]) mergeFields(fields, denormalizeField(storageKey, selection.selections, value, path), true);
623
654
  return fields;
624
655
  };
625
656
  return {
626
- data: denormalizeField(RootFieldKey, selections, value),
657
+ data: denormalizeField(RootFieldKey, selections, value, []),
627
658
  partial
628
659
  };
629
660
  };
630
661
 
662
+ //#endregion
663
+ //#region src/cache/tree.ts
664
+ /**
665
+ * @internal
666
+ */
667
+ const buildEntryTree = (tuples, rootDepKey) => {
668
+ const root = {
669
+ depKey: rootDepKey ?? "__root",
670
+ children: /* @__PURE__ */ new Map()
671
+ };
672
+ for (const { storageKey, fieldKey, path, selections } of tuples) {
673
+ let current = root;
674
+ for (const element of path) {
675
+ const key = String(element);
676
+ let child = current.children.get(key);
677
+ if (!child) {
678
+ child = {
679
+ depKey: "",
680
+ children: /* @__PURE__ */ new Map()
681
+ };
682
+ current.children.set(key, child);
683
+ }
684
+ current = child;
685
+ }
686
+ current.depKey = makeDependencyKey(storageKey, fieldKey);
687
+ if (selections) current.selections = selections;
688
+ }
689
+ return root;
690
+ };
691
+ /**
692
+ * @internal
693
+ */
694
+ const findEntryTreeNode = (root, path) => {
695
+ let current = root;
696
+ for (const segment of path) {
697
+ if (!current) return void 0;
698
+ current = current.children.get(String(segment));
699
+ }
700
+ return current;
701
+ };
702
+ /**
703
+ * Removes all subscription entries for a given subscription from the subtree rooted at {@link node},
704
+ * and clears the node's children map. Both the subscription entries and the tree structure
705
+ * are cleaned up atomically to avoid stale references.
706
+ * @internal
707
+ */
708
+ const removeSubtreeEntries = (node, subscription, subscriptions) => {
709
+ const entries = subscriptions.get(node.depKey);
710
+ if (entries) {
711
+ for (const entry of entries) if (entry.subscription === subscription) {
712
+ entries.delete(entry);
713
+ break;
714
+ }
715
+ if (entries.size === 0) subscriptions.delete(node.depKey);
716
+ }
717
+ for (const child of node.children.values()) removeSubtreeEntries(child, subscription, subscriptions);
718
+ node.children.clear();
719
+ };
720
+ /**
721
+ * @internal
722
+ */
723
+ const snapshotFields = (node, storage) => {
724
+ const result = /* @__PURE__ */ new Map();
725
+ for (const [fieldName, child] of node.children) {
726
+ const { storageKey, fieldKey } = parseDependencyKey(child.depKey);
727
+ const fields = storage[storageKey];
728
+ if (fields) result.set(fieldName, fields[fieldKey]);
729
+ }
730
+ return result;
731
+ };
732
+ /**
733
+ * @internal
734
+ */
735
+ const partialDenormalize = (node, entity, basePath, rebuiltDepKeys, storage, subscriptions, subscription) => {
736
+ if (!node.selections) return {
737
+ data: null,
738
+ fieldValues: /* @__PURE__ */ new Map()
739
+ };
740
+ const tuples = [];
741
+ const { data } = denormalize(node.selections, storage, entity, subscription.variables, (storageKey, fieldKey, path, sels) => {
742
+ tuples.push({
743
+ storageKey,
744
+ fieldKey,
745
+ path: [...basePath, ...path],
746
+ selections: sels
747
+ });
748
+ }, { trackFragmentDeps: false });
749
+ node.children.clear();
750
+ const fieldValues = /* @__PURE__ */ new Map();
751
+ for (const tuple of tuples) {
752
+ const depKey = makeDependencyKey(tuple.storageKey, tuple.fieldKey);
753
+ rebuiltDepKeys.add(depKey);
754
+ const relativePath = tuple.path.slice(basePath.length);
755
+ let current = node;
756
+ for (const element of relativePath) {
757
+ const key = String(element);
758
+ let child = current.children.get(key);
759
+ if (!child) {
760
+ child = {
761
+ depKey: "",
762
+ children: /* @__PURE__ */ new Map()
763
+ };
764
+ current.children.set(key, child);
765
+ }
766
+ current = child;
767
+ }
768
+ current.depKey = depKey;
769
+ if (tuple.selections) current.selections = tuple.selections;
770
+ const entry = {
771
+ path: tuple.path,
772
+ subscription
773
+ };
774
+ let entrySet = subscriptions.get(depKey);
775
+ if (!entrySet) {
776
+ entrySet = /* @__PURE__ */ new Set();
777
+ subscriptions.set(depKey, entrySet);
778
+ }
779
+ entrySet.add(entry);
780
+ if (relativePath.length === 1) {
781
+ const fieldName = String(relativePath[0]);
782
+ if (data && typeof data === "object") fieldValues.set(fieldName, data[fieldName]);
783
+ }
784
+ }
785
+ return {
786
+ data,
787
+ fieldValues
788
+ };
789
+ };
790
+ const updateSubtreePaths = (node, basePath, newIndex, baseLen, subscription, subscriptions) => {
791
+ const entries = subscriptions.get(node.depKey);
792
+ if (entries) {
793
+ for (const entry of entries) if (entry.subscription === subscription && entry.path.length > baseLen) entry.path = [
794
+ ...basePath,
795
+ newIndex,
796
+ ...entry.path.slice(baseLen + 1)
797
+ ];
798
+ }
799
+ for (const child of node.children.values()) updateSubtreePaths(child, basePath, newIndex, baseLen, subscription, subscriptions);
800
+ };
801
+ /**
802
+ * @internal
803
+ */
804
+ const rebuildArrayIndices = (node, entry, subscriptions) => {
805
+ const basePath = entry.path;
806
+ const baseLen = basePath.length;
807
+ const children = [...node.children.entries()].toSorted(([a], [b]) => Number(a) - Number(b));
808
+ node.children.clear();
809
+ for (const [newIdx, child_] of children.entries()) {
810
+ const [, child] = child_;
811
+ const newKey = String(newIdx);
812
+ node.children.set(newKey, child);
813
+ updateSubtreePaths(child, basePath, newIdx, baseLen, entry.subscription, subscriptions);
814
+ }
815
+ };
816
+
817
+ //#endregion
818
+ //#region src/cache/diff.ts
819
+ /**
820
+ * Finds the common prefix and suffix boundaries between two key arrays.
821
+ * @internal
822
+ */
823
+ const findCommonBounds = (oldKeys, newKeys) => {
824
+ let start = 0;
825
+ while (start < oldKeys.length && start < newKeys.length && oldKeys[start] === newKeys[start]) start++;
826
+ let oldEnd = oldKeys.length;
827
+ let newEnd = newKeys.length;
828
+ while (oldEnd > start && newEnd > start && oldKeys[oldEnd - 1] === newKeys[newEnd - 1]) {
829
+ oldEnd--;
830
+ newEnd--;
831
+ }
832
+ return {
833
+ start,
834
+ oldEnd,
835
+ newEnd
836
+ };
837
+ };
838
+ /**
839
+ * Computes swap operations to reorder oldKeys into newKeys order using selection sort.
840
+ * @internal
841
+ */
842
+ const computeSwaps = (oldKeys, newKeys) => {
843
+ const working = [...oldKeys];
844
+ const swaps = [];
845
+ for (const [i, newKey] of newKeys.entries()) {
846
+ if (working[i] === newKey) continue;
847
+ const j = working.indexOf(newKey, i + 1);
848
+ if (j === -1) continue;
849
+ [working[i], working[j]] = [working[j], working[i]];
850
+ swaps.push({
851
+ i,
852
+ j
853
+ });
854
+ }
855
+ return swaps;
856
+ };
857
+
858
+ //#endregion
859
+ //#region src/cache/change.ts
860
+ /**
861
+ * @internal
862
+ */
863
+ const classifyChanges = (changedKeys) => {
864
+ const structural = [];
865
+ const scalar = [];
866
+ for (const [depKey, { oldValue, newValue }] of changedKeys) {
867
+ if (isEntityLink(oldValue) && isEntityLink(newValue) && oldValue[EntityLinkKey] === newValue[EntityLinkKey]) continue;
868
+ if (isEntityLinkArray(oldValue) && isEntityLinkArray(newValue) && isEntityLinkArrayEqual(oldValue, newValue)) continue;
869
+ if (isEntityLink(oldValue) || isEntityLink(newValue) || isEntityLinkArray(oldValue) || isEntityLinkArray(newValue)) structural.push({
870
+ depKey,
871
+ oldValue,
872
+ newValue
873
+ });
874
+ else scalar.push({
875
+ depKey,
876
+ newValue
877
+ });
878
+ }
879
+ return {
880
+ structural,
881
+ scalar
882
+ };
883
+ };
884
+ /**
885
+ * @internal
886
+ */
887
+ const processStructuralChange = (entry, node, oldValue, newValue, rebuiltDepKeys, storage, subscriptions) => {
888
+ const patches = [];
889
+ if (isEntityLink(oldValue) || isEntityLink(newValue)) {
890
+ if (isNullish(newValue)) {
891
+ removeSubtreeEntries(node, entry.subscription, subscriptions);
892
+ patches.push({
893
+ type: "set",
894
+ path: entry.path,
895
+ value: null
896
+ });
897
+ return patches;
898
+ }
899
+ if (isNullish(oldValue)) {
900
+ const entity = storage[newValue[EntityLinkKey]];
901
+ if (entity) {
902
+ const { data } = partialDenormalize(node, entity, entry.path, rebuiltDepKeys, storage, subscriptions, entry.subscription);
903
+ patches.push({
904
+ type: "set",
905
+ path: entry.path,
906
+ value: data
907
+ });
908
+ } else patches.push({
909
+ type: "set",
910
+ path: entry.path,
911
+ value: null
912
+ });
913
+ return patches;
914
+ }
915
+ const oldFields = snapshotFields(node, storage);
916
+ removeSubtreeEntries(node, entry.subscription, subscriptions);
917
+ const newEntity = storage[newValue[EntityLinkKey]];
918
+ if (!newEntity) {
919
+ patches.push({
920
+ type: "set",
921
+ path: entry.path,
922
+ value: null
923
+ });
924
+ return patches;
925
+ }
926
+ const { fieldValues: newFields } = partialDenormalize(node, newEntity, entry.path, rebuiltDepKeys, storage, subscriptions, entry.subscription);
927
+ for (const [fieldName, newVal] of newFields) if (!isEqual(oldFields.get(fieldName), newVal)) patches.push({
928
+ type: "set",
929
+ path: [...entry.path, fieldName],
930
+ value: newVal
931
+ });
932
+ for (const [fieldName] of oldFields) if (!newFields.has(fieldName)) patches.push({
933
+ type: "set",
934
+ path: [...entry.path, fieldName],
935
+ value: null
936
+ });
937
+ return patches;
938
+ }
939
+ if (isEntityLinkArray(oldValue) || isEntityLinkArray(newValue)) {
940
+ const oldArr = Array.isArray(oldValue) ? oldValue : [];
941
+ const newArr = Array.isArray(newValue) ? newValue : [];
942
+ const oldKeys = oldArr.map((item) => item !== null && item !== void 0 && typeof item === "object" && EntityLinkKey in item ? item[EntityLinkKey] : null);
943
+ const newKeys = newArr.map((item) => item !== null && item !== void 0 && typeof item === "object" && EntityLinkKey in item ? item[EntityLinkKey] : null);
944
+ const { start, oldEnd, newEnd } = findCommonBounds(oldKeys, newKeys);
945
+ const oldMiddle = oldKeys.slice(start, oldEnd);
946
+ const newMiddle = newKeys.slice(start, newEnd);
947
+ const newMiddleSet = new Set(newMiddle.filter((k) => k !== null));
948
+ const oldMiddleSet = new Set(oldMiddle.filter((k) => k !== null));
949
+ const removedIndices = [];
950
+ for (let i = oldMiddle.length - 1; i >= 0; i--) {
951
+ const key = oldMiddle[i];
952
+ if (key !== null && !newMiddleSet.has(key)) removedIndices.push(start + i);
953
+ }
954
+ for (const idx of removedIndices) {
955
+ const childKey = String(idx);
956
+ const child = node.children.get(childKey);
957
+ if (child) {
958
+ removeSubtreeEntries(child, entry.subscription, subscriptions);
959
+ node.children.delete(childKey);
960
+ }
961
+ patches.push({
962
+ type: "splice",
963
+ path: entry.path,
964
+ index: idx,
965
+ deleteCount: 1,
966
+ items: []
967
+ });
968
+ }
969
+ compactChildren(node);
970
+ const retainedOld = oldMiddle.filter((k) => k !== null && newMiddleSet.has(k));
971
+ const retainedNew = newMiddle.filter((k) => k !== null && oldMiddleSet.has(k));
972
+ if (retainedOld.length > 0) {
973
+ const swaps = computeSwaps(retainedOld, retainedNew);
974
+ for (const { i, j } of swaps) {
975
+ const absI = start + i;
976
+ const absJ = start + j;
977
+ patches.push({
978
+ type: "swap",
979
+ path: entry.path,
980
+ i: absI,
981
+ j: absJ
982
+ });
983
+ const childI = node.children.get(String(absI));
984
+ const childJ = node.children.get(String(absJ));
985
+ if (childI && childJ) {
986
+ node.children.set(String(absI), childJ);
987
+ node.children.set(String(absJ), childI);
988
+ }
989
+ }
990
+ }
991
+ const siblingSelections = findSiblingSelections(node);
992
+ const addedKeys = newMiddle.filter((k) => k !== null && !oldMiddleSet.has(k));
993
+ for (const key of addedKeys) {
994
+ const idx = start + newMiddle.indexOf(key);
995
+ shiftChildrenRight(node, idx);
996
+ const entity = storage[key];
997
+ const insertNode = {
998
+ depKey: "",
999
+ children: /* @__PURE__ */ new Map(),
1000
+ ...siblingSelections && { selections: siblingSelections }
1001
+ };
1002
+ if (entity) {
1003
+ const { data } = partialDenormalize(insertNode, entity, [...entry.path, idx], rebuiltDepKeys, storage, subscriptions, entry.subscription);
1004
+ node.children.set(String(idx), insertNode);
1005
+ patches.push({
1006
+ type: "splice",
1007
+ path: entry.path,
1008
+ index: idx,
1009
+ deleteCount: 0,
1010
+ items: [data]
1011
+ });
1012
+ } else {
1013
+ node.children.set(String(idx), insertNode);
1014
+ patches.push({
1015
+ type: "splice",
1016
+ path: entry.path,
1017
+ index: idx,
1018
+ deleteCount: 0,
1019
+ items: [null]
1020
+ });
1021
+ }
1022
+ }
1023
+ rebuildArrayIndices(node, entry, subscriptions);
1024
+ return patches;
1025
+ }
1026
+ return patches;
1027
+ };
1028
+ const compactChildren = (node) => {
1029
+ const sorted = [...node.children.entries()].toSorted(([a], [b]) => Number(a) - Number(b));
1030
+ node.children.clear();
1031
+ for (const [i, element] of sorted.entries()) node.children.set(String(i), element[1]);
1032
+ };
1033
+ const findSiblingSelections = (node) => {
1034
+ for (const child of node.children.values()) if (child.selections) return child.selections;
1035
+ return node.selections;
1036
+ };
1037
+ const shiftChildrenRight = (node, fromIndex) => {
1038
+ const entries = [...node.children.entries()].toSorted(([a], [b]) => Number(a) - Number(b));
1039
+ node.children.clear();
1040
+ for (const [key, child] of entries) {
1041
+ const idx = Number(key);
1042
+ if (idx >= fromIndex) node.children.set(String(idx + 1), child);
1043
+ else node.children.set(key, child);
1044
+ }
1045
+ };
1046
+ /**
1047
+ * @internal
1048
+ */
1049
+ const generatePatches = (changedKeys, subscriptions, storage) => {
1050
+ const patchesBySubscription = /* @__PURE__ */ new Map();
1051
+ const rebuiltDepKeys = /* @__PURE__ */ new Set();
1052
+ const { structural, scalar } = classifyChanges(changedKeys);
1053
+ for (const { depKey, oldValue, newValue } of structural) {
1054
+ const entries = subscriptions.get(depKey);
1055
+ if (!entries) continue;
1056
+ for (const entry of entries) {
1057
+ const node = findEntryTreeNode(entry.subscription.entryTree, entry.path);
1058
+ if (!node) continue;
1059
+ const patches = processStructuralChange(entry, node, oldValue, newValue, rebuiltDepKeys, storage, subscriptions);
1060
+ if (patches.length > 0) {
1061
+ const existing = patchesBySubscription.get(entry.subscription) ?? [];
1062
+ existing.push(...patches);
1063
+ patchesBySubscription.set(entry.subscription, existing);
1064
+ }
1065
+ }
1066
+ }
1067
+ for (const { depKey, newValue } of scalar) {
1068
+ if (rebuiltDepKeys.has(depKey)) continue;
1069
+ const entries = subscriptions.get(depKey);
1070
+ if (!entries) continue;
1071
+ for (const entry of entries) {
1072
+ const existing = patchesBySubscription.get(entry.subscription) ?? [];
1073
+ existing.push({
1074
+ type: "set",
1075
+ path: entry.path,
1076
+ value: newValue
1077
+ });
1078
+ patchesBySubscription.set(entry.subscription, existing);
1079
+ }
1080
+ }
1081
+ return patchesBySubscription;
1082
+ };
1083
+
631
1084
  //#endregion
632
1085
  //#region src/cache/cache.ts
633
1086
  /**
@@ -638,7 +1091,6 @@ var Cache = class {
638
1091
  #schemaMeta;
639
1092
  #storage = { [RootFieldKey]: {} };
640
1093
  #subscriptions = /* @__PURE__ */ new Map();
641
- #memo = /* @__PURE__ */ new Map();
642
1094
  #stale = /* @__PURE__ */ new Set();
643
1095
  #optimisticKeys = [];
644
1096
  #optimisticLayers = /* @__PURE__ */ new Map();
@@ -673,22 +1125,35 @@ var Cache = class {
673
1125
  */
674
1126
  writeOptimistic(key, artifact, variables, data) {
675
1127
  const layerStorage = { [RootFieldKey]: {} };
676
- const dependencies = /* @__PURE__ */ new Set();
1128
+ const layerDependencies = /* @__PURE__ */ new Set();
677
1129
  normalize(this.#schemaMeta, artifact.selections, layerStorage, data, variables, (storageKey, fieldKey) => {
678
- dependencies.add(makeDependencyKey(storageKey, fieldKey));
1130
+ layerDependencies.add(makeDependencyKey(storageKey, fieldKey));
679
1131
  });
1132
+ const oldValues = /* @__PURE__ */ new Map();
1133
+ const currentView = this.#getStorageView();
1134
+ for (const depKey of layerDependencies) {
1135
+ const { storageKey: sk, fieldKey: fk } = this.#parseDepKey(depKey);
1136
+ oldValues.set(depKey, currentView[sk]?.[fk]);
1137
+ }
680
1138
  this.#optimisticKeys.push(key);
681
1139
  this.#optimisticLayers.set(key, {
682
1140
  storage: layerStorage,
683
- dependencies
1141
+ dependencies: layerDependencies
684
1142
  });
685
1143
  this.#storageView = null;
686
- const subscriptions = /* @__PURE__ */ new Set();
687
- for (const depKey of dependencies) {
688
- const ss = this.#subscriptions.get(depKey);
689
- if (ss) for (const s of ss) subscriptions.add(s);
1144
+ const newView = this.#getStorageView();
1145
+ const changedKeys = /* @__PURE__ */ new Map();
1146
+ for (const depKey of layerDependencies) {
1147
+ const { storageKey: sk, fieldKey: fk } = this.#parseDepKey(depKey);
1148
+ const newVal = newView[sk]?.[fk];
1149
+ const oldVal = oldValues.get(depKey);
1150
+ if (oldVal !== newVal) changedKeys.set(depKey, {
1151
+ oldValue: oldVal,
1152
+ newValue: newVal
1153
+ });
690
1154
  }
691
- for (const subscription of subscriptions) subscription.listener();
1155
+ const patchesBySubscription = generatePatches(changedKeys, this.#subscriptions, newView);
1156
+ for (const [subscription, patches] of patchesBySubscription) subscription.listener(patches);
692
1157
  }
693
1158
  /**
694
1159
  * Removes an optimistic layer and notifies affected subscribers.
@@ -698,42 +1163,71 @@ var Cache = class {
698
1163
  removeOptimistic(key) {
699
1164
  const layer = this.#optimisticLayers.get(key);
700
1165
  if (!layer) return;
1166
+ const currentView = this.#getStorageView();
1167
+ const oldValues = /* @__PURE__ */ new Map();
1168
+ for (const depKey of layer.dependencies) {
1169
+ const { storageKey: sk, fieldKey: fk } = this.#parseDepKey(depKey);
1170
+ oldValues.set(depKey, currentView[sk]?.[fk]);
1171
+ }
701
1172
  this.#optimisticLayers.delete(key);
702
1173
  this.#optimisticKeys = this.#optimisticKeys.filter((k) => k !== key);
703
1174
  this.#storageView = null;
704
- const subscriptions = /* @__PURE__ */ new Set();
1175
+ const newView = this.#getStorageView();
1176
+ const changedKeys = /* @__PURE__ */ new Map();
705
1177
  for (const depKey of layer.dependencies) {
706
- const ss = this.#subscriptions.get(depKey);
707
- if (ss) for (const s of ss) subscriptions.add(s);
1178
+ const { storageKey: sk, fieldKey: fk } = this.#parseDepKey(depKey);
1179
+ const newVal = newView[sk]?.[fk];
1180
+ const oldVal = oldValues.get(depKey);
1181
+ if (oldVal !== newVal) changedKeys.set(depKey, {
1182
+ oldValue: oldVal,
1183
+ newValue: newVal
1184
+ });
708
1185
  }
709
- for (const subscription of subscriptions) subscription.listener();
1186
+ const patchesBySubscription = generatePatches(changedKeys, this.#subscriptions, newView);
1187
+ for (const [subscription, patches] of patchesBySubscription) subscription.listener(patches);
710
1188
  }
711
1189
  /**
712
1190
  * Writes a query result to the cache, normalizing entities.
1191
+ * In addition to field-level stale clearing, this also clears entity-level stale entries
1192
+ * (e.g., `"User:1"`) when any field of that entity is written, because {@link invalidate}
1193
+ * supports entity-level invalidation without specifying a field.
713
1194
  * @param artifact - GraphQL document artifact.
714
1195
  * @param variables - Query variables.
715
1196
  * @param data - Query result data.
716
1197
  */
717
1198
  writeQuery(artifact, variables, data) {
718
- const dependencies = /* @__PURE__ */ new Set();
719
- const subscriptions = /* @__PURE__ */ new Set();
1199
+ const changedKeys = /* @__PURE__ */ new Map();
1200
+ const staleClearedKeys = /* @__PURE__ */ new Set();
720
1201
  const entityStaleCleared = /* @__PURE__ */ new Set();
721
1202
  normalize(this.#schemaMeta, artifact.selections, this.#storage, data, variables, (storageKey, fieldKey, oldValue, newValue) => {
722
1203
  const depKey = makeDependencyKey(storageKey, fieldKey);
723
- if (this.#stale.delete(depKey)) dependencies.add(depKey);
1204
+ if (this.#stale.delete(depKey)) staleClearedKeys.add(depKey);
724
1205
  if (!entityStaleCleared.has(storageKey) && this.#stale.delete(storageKey)) entityStaleCleared.add(storageKey);
725
- if (oldValue !== newValue) dependencies.add(depKey);
1206
+ if (oldValue !== newValue) changedKeys.set(depKey, {
1207
+ oldValue,
1208
+ newValue
1209
+ });
726
1210
  });
727
- for (const entityKey of entityStaleCleared) this.#collectSubscriptions(entityKey, void 0, subscriptions);
728
- for (const dependency of dependencies) {
729
- const ss = this.#subscriptions.get(dependency);
730
- if (ss) for (const s of ss) subscriptions.add(s);
1211
+ const patchesBySubscription = generatePatches(changedKeys, this.#subscriptions, this.#storage);
1212
+ for (const [subscription, patches] of patchesBySubscription) subscription.listener(patches);
1213
+ const staleOnlySubscriptions = /* @__PURE__ */ new Set();
1214
+ for (const depKey of staleClearedKeys) {
1215
+ if (changedKeys.has(depKey)) continue;
1216
+ const entries = this.#subscriptions.get(depKey);
1217
+ if (entries) {
1218
+ for (const entry of entries) if (!patchesBySubscription.has(entry.subscription)) staleOnlySubscriptions.add(entry.subscription);
1219
+ }
1220
+ }
1221
+ for (const entityKey of entityStaleCleared) {
1222
+ const prefix = `${entityKey}.`;
1223
+ for (const [depKey, entries] of this.#subscriptions) if (depKey.startsWith(prefix)) {
1224
+ for (const entry of entries) if (!patchesBySubscription.has(entry.subscription)) staleOnlySubscriptions.add(entry.subscription);
1225
+ }
731
1226
  }
732
- for (const subscription of subscriptions) subscription.listener();
1227
+ for (const subscription of staleOnlySubscriptions) subscription.listener(null);
733
1228
  }
734
1229
  /**
735
1230
  * Reads a query result from the cache, denormalizing entities if available.
736
- * Uses structural sharing to preserve referential identity for unchanged subtrees.
737
1231
  * @param artifact - GraphQL document artifact.
738
1232
  * @param variables - Query variables.
739
1233
  * @returns Denormalized query result or null if not found.
@@ -748,74 +1242,170 @@ var Cache = class {
748
1242
  data: null,
749
1243
  stale: false
750
1244
  };
751
- const key = makeMemoKey("query", artifact.name, stringify(variables));
752
- const prev = this.#memo.get(key);
753
- const result = prev === void 0 ? data : replaceEqualDeep(prev, data);
754
- this.#memo.set(key, result);
755
1245
  return {
756
- data: result,
1246
+ data,
757
1247
  stale
758
1248
  };
759
1249
  }
760
1250
  /**
761
- * Subscribes to cache invalidations for a specific query.
1251
+ * Subscribes to cache changes for a specific query.
762
1252
  * @param artifact - GraphQL document artifact.
763
1253
  * @param variables - Query variables.
764
- * @param listener - Callback function to invoke on cache invalidation.
765
- * @returns Unsubscribe function.
1254
+ * @param listener - Callback function to invoke on cache changes.
1255
+ * @returns Object containing initial data, stale status, unsubscribe function, and subscription.
766
1256
  */
767
1257
  subscribeQuery(artifact, variables, listener) {
768
- const dependencies = /* @__PURE__ */ new Set();
1258
+ let stale = false;
1259
+ const tuples = [];
769
1260
  const storageView = this.#getStorageView();
770
- denormalize(artifact.selections, storageView, storageView[RootFieldKey], variables, (storageKey, fieldKey) => {
771
- const dependencyKey = makeDependencyKey(storageKey, fieldKey);
772
- dependencies.add(dependencyKey);
773
- });
774
- return this.#subscribe(dependencies, listener);
1261
+ const { data, partial } = denormalize(artifact.selections, storageView, storageView[RootFieldKey], variables, (storageKey, fieldKey, path, selections) => {
1262
+ tuples.push({
1263
+ storageKey,
1264
+ fieldKey,
1265
+ path,
1266
+ selections
1267
+ });
1268
+ if (this.#stale.has(storageKey) || this.#stale.has(makeDependencyKey(storageKey, fieldKey))) stale = true;
1269
+ }, { trackFragmentDeps: false });
1270
+ const entryTree = buildEntryTree(tuples);
1271
+ const subscription = {
1272
+ listener,
1273
+ selections: artifact.selections,
1274
+ variables,
1275
+ entryTree
1276
+ };
1277
+ for (const tuple of tuples) {
1278
+ const depKey = makeDependencyKey(tuple.storageKey, tuple.fieldKey);
1279
+ const entry = {
1280
+ path: tuple.path,
1281
+ subscription
1282
+ };
1283
+ let entrySet = this.#subscriptions.get(depKey);
1284
+ if (!entrySet) {
1285
+ entrySet = /* @__PURE__ */ new Set();
1286
+ this.#subscriptions.set(depKey, entrySet);
1287
+ }
1288
+ entrySet.add(entry);
1289
+ }
1290
+ const unsubscribe = () => {
1291
+ this.#removeSubscriptionFromTree(entryTree, subscription);
1292
+ };
1293
+ return {
1294
+ data: partial ? null : data,
1295
+ stale,
1296
+ unsubscribe,
1297
+ subscription
1298
+ };
775
1299
  }
776
1300
  /**
777
1301
  * Reads a fragment from the cache for a specific entity.
778
- * Uses structural sharing to preserve referential identity for unchanged subtrees.
779
1302
  * @param artifact - GraphQL fragment artifact.
780
1303
  * @param fragmentRef - Fragment reference containing entity key.
781
1304
  * @returns Denormalized fragment data or null if not found or invalid.
782
1305
  */
783
1306
  readFragment(artifact, fragmentRef) {
784
- const entityKey = fragmentRef[FragmentRefKey];
1307
+ const storageKey = fragmentRef[FragmentRefKey];
785
1308
  const fragmentVars = getFragmentVars(fragmentRef, artifact.name);
786
1309
  const storageView = this.#getStorageView();
787
- if (!storageView[entityKey]) return {
1310
+ let stale = false;
1311
+ const value = storageView[storageKey];
1312
+ if (!value) return {
788
1313
  data: null,
789
1314
  stale: false
790
1315
  };
791
- let stale = false;
792
- const { data, partial } = denormalize(artifact.selections, storageView, { [EntityLinkKey]: entityKey }, fragmentVars, (storageKey, fieldKey) => {
793
- if (this.#stale.has(storageKey) || this.#stale.has(makeDependencyKey(storageKey, fieldKey))) stale = true;
1316
+ const { data, partial } = denormalize(artifact.selections, storageView, storageKey === RootFieldKey ? value : { [EntityLinkKey]: storageKey }, fragmentVars, (sk, fieldKey) => {
1317
+ if (this.#stale.has(sk) || this.#stale.has(makeDependencyKey(sk, fieldKey))) stale = true;
794
1318
  });
795
1319
  if (partial) return {
796
1320
  data: null,
797
1321
  stale: false
798
1322
  };
799
- const argsId = Object.keys(fragmentVars).length > 0 ? entityKey + stringify(fragmentVars) : entityKey;
800
- const key = makeMemoKey("fragment", artifact.name, argsId);
801
- const prev = this.#memo.get(key);
802
- const result = prev === void 0 ? data : replaceEqualDeep(prev, data);
803
- this.#memo.set(key, result);
804
1323
  return {
805
- data: result,
1324
+ data,
806
1325
  stale
807
1326
  };
808
1327
  }
1328
+ /**
1329
+ * Subscribes to cache changes for a specific fragment.
1330
+ * @param artifact - GraphQL fragment artifact.
1331
+ * @param fragmentRef - Fragment reference containing entity key.
1332
+ * @param listener - Callback function to invoke on cache changes.
1333
+ * @returns Object containing initial data, stale status, unsubscribe function, and subscription.
1334
+ */
809
1335
  subscribeFragment(artifact, fragmentRef, listener) {
810
- const entityKey = fragmentRef[FragmentRefKey];
1336
+ const storageKey = fragmentRef[FragmentRefKey];
811
1337
  const fragmentVars = getFragmentVars(fragmentRef, artifact.name);
812
- const dependencies = /* @__PURE__ */ new Set();
813
1338
  const storageView = this.#getStorageView();
814
- denormalize(artifact.selections, storageView, { [EntityLinkKey]: entityKey }, fragmentVars, (storageKey, fieldKey) => {
815
- const dependencyKey = makeDependencyKey(storageKey, fieldKey);
816
- dependencies.add(dependencyKey);
817
- });
818
- return this.#subscribe(dependencies, listener);
1339
+ const value = storageKey === RootFieldKey ? storageView[RootFieldKey] : storageView[storageKey];
1340
+ if (!value) {
1341
+ const entryTree = buildEntryTree([]);
1342
+ return {
1343
+ data: null,
1344
+ stale: false,
1345
+ unsubscribe: () => {},
1346
+ subscription: {
1347
+ listener,
1348
+ selections: artifact.selections,
1349
+ variables: fragmentVars,
1350
+ entryTree
1351
+ }
1352
+ };
1353
+ }
1354
+ let stale = false;
1355
+ const tuples = [];
1356
+ const denormalizeValue = storageKey === RootFieldKey ? value : { [EntityLinkKey]: storageKey };
1357
+ const { data, partial } = denormalize(artifact.selections, storageView, denormalizeValue, fragmentVars, (sk, fieldKey, path, selections) => {
1358
+ tuples.push({
1359
+ storageKey: sk,
1360
+ fieldKey,
1361
+ path,
1362
+ selections
1363
+ });
1364
+ if (this.#stale.has(sk) || this.#stale.has(makeDependencyKey(sk, fieldKey))) stale = true;
1365
+ }, { trackFragmentDeps: false });
1366
+ if (partial) {
1367
+ const entryTree = buildEntryTree([]);
1368
+ return {
1369
+ data: null,
1370
+ stale: false,
1371
+ unsubscribe: () => {},
1372
+ subscription: {
1373
+ listener,
1374
+ selections: artifact.selections,
1375
+ variables: fragmentVars,
1376
+ entryTree
1377
+ }
1378
+ };
1379
+ }
1380
+ const entryTree = buildEntryTree(tuples, storageKey === RootFieldKey ? void 0 : storageKey);
1381
+ const subscription = {
1382
+ listener,
1383
+ selections: artifact.selections,
1384
+ variables: fragmentVars,
1385
+ entryTree
1386
+ };
1387
+ for (const tuple of tuples) {
1388
+ const depKey = makeDependencyKey(tuple.storageKey, tuple.fieldKey);
1389
+ const entry = {
1390
+ path: tuple.path,
1391
+ subscription
1392
+ };
1393
+ let entrySet = this.#subscriptions.get(depKey);
1394
+ if (!entrySet) {
1395
+ entrySet = /* @__PURE__ */ new Set();
1396
+ this.#subscriptions.set(depKey, entrySet);
1397
+ }
1398
+ entrySet.add(entry);
1399
+ }
1400
+ const unsubscribe = () => {
1401
+ this.#removeSubscriptionFromTree(entryTree, subscription);
1402
+ };
1403
+ return {
1404
+ data: partial ? null : data,
1405
+ stale,
1406
+ unsubscribe,
1407
+ subscription
1408
+ };
819
1409
  }
820
1410
  readFragments(artifact, fragmentRefs) {
821
1411
  const results = [];
@@ -829,42 +1419,35 @@ var Cache = class {
829
1419
  if (result.stale) stale = true;
830
1420
  results.push(result.data);
831
1421
  }
832
- const entityKeys = fragmentRefs.map((ref) => ref[FragmentRefKey]);
833
- const key = makeMemoKey("fragments", artifact.name, entityKeys.join(","));
834
- const prev = this.#memo.get(key);
835
- const result = prev === void 0 ? results : replaceEqualDeep(prev, results);
836
- this.#memo.set(key, result);
837
1422
  return {
838
- data: result,
1423
+ data: results,
839
1424
  stale
840
1425
  };
841
1426
  }
842
1427
  subscribeFragments(artifact, fragmentRefs, listener) {
843
- const dependencies = /* @__PURE__ */ new Set();
844
- const storageView = this.#getStorageView();
1428
+ const unsubscribes = [];
845
1429
  for (const ref of fragmentRefs) {
846
- const entityKey = ref[FragmentRefKey];
847
- const fragmentVars = getFragmentVars(ref, artifact.name);
848
- denormalize(artifact.selections, storageView, { [EntityLinkKey]: entityKey }, fragmentVars, (storageKey, fieldKey) => {
849
- dependencies.add(makeDependencyKey(storageKey, fieldKey));
850
- });
1430
+ const { unsubscribe } = this.subscribeFragment(artifact, ref, listener);
1431
+ unsubscribes.push(unsubscribe);
851
1432
  }
852
- return this.#subscribe(dependencies, listener);
1433
+ return () => {
1434
+ for (const unsub of unsubscribes) unsub();
1435
+ };
853
1436
  }
854
1437
  /**
855
1438
  * Invalidates one or more cache entries and notifies affected subscribers.
856
1439
  * @param targets - Cache entries to invalidate.
857
1440
  */
858
1441
  invalidate(...targets) {
859
- const subscriptions = /* @__PURE__ */ new Set();
1442
+ const affectedSubscriptions = /* @__PURE__ */ new Set();
860
1443
  for (const target of targets) if (target.__typename === "Query") if ("$field" in target) {
861
1444
  const fieldKey = makeFieldKeyFromArgs(target.$field, target.$args);
862
1445
  const depKey = makeDependencyKey(RootFieldKey, fieldKey);
863
1446
  this.#stale.add(depKey);
864
- this.#collectSubscriptions(RootFieldKey, fieldKey, subscriptions);
1447
+ this.#collectSubscriptions(RootFieldKey, fieldKey, affectedSubscriptions);
865
1448
  } else {
866
1449
  this.#stale.add(RootFieldKey);
867
- this.#collectSubscriptions(RootFieldKey, void 0, subscriptions);
1450
+ this.#collectSubscriptions(RootFieldKey, void 0, affectedSubscriptions);
868
1451
  }
869
1452
  else {
870
1453
  const keyFields = this.#schemaMeta.entities[target.__typename]?.keyFields;
@@ -874,10 +1457,10 @@ var Cache = class {
874
1457
  if ("$field" in target) {
875
1458
  const fieldKey = makeFieldKeyFromArgs(target.$field, target.$args);
876
1459
  this.#stale.add(makeDependencyKey(entityKey, fieldKey));
877
- this.#collectSubscriptions(entityKey, fieldKey, subscriptions);
1460
+ this.#collectSubscriptions(entityKey, fieldKey, affectedSubscriptions);
878
1461
  } else {
879
1462
  this.#stale.add(entityKey);
880
- this.#collectSubscriptions(entityKey, void 0, subscriptions);
1463
+ this.#collectSubscriptions(entityKey, void 0, affectedSubscriptions);
881
1464
  }
882
1465
  } else {
883
1466
  const prefix = `${target.__typename}:`;
@@ -886,15 +1469,30 @@ var Cache = class {
886
1469
  if ("$field" in target) {
887
1470
  const fieldKey = makeFieldKeyFromArgs(target.$field, target.$args);
888
1471
  this.#stale.add(makeDependencyKey(entityKey, fieldKey));
889
- this.#collectSubscriptions(entityKey, fieldKey, subscriptions);
1472
+ this.#collectSubscriptions(entityKey, fieldKey, affectedSubscriptions);
890
1473
  } else {
891
1474
  this.#stale.add(entityKey);
892
- this.#collectSubscriptions(entityKey, void 0, subscriptions);
1475
+ this.#collectSubscriptions(entityKey, void 0, affectedSubscriptions);
893
1476
  }
894
1477
  }
895
1478
  }
896
1479
  }
897
- for (const subscription of subscriptions) subscription.listener();
1480
+ for (const subscription of affectedSubscriptions) subscription.listener(null);
1481
+ }
1482
+ /**
1483
+ * Checks if a subscription has stale data.
1484
+ * @internal
1485
+ */
1486
+ isStale(subscription) {
1487
+ const check = (node) => {
1488
+ if (node.depKey.includes("@")) {
1489
+ const { storageKey } = parseDependencyKey(node.depKey);
1490
+ if (this.#stale.has(storageKey) || this.#stale.has(node.depKey)) return true;
1491
+ }
1492
+ for (const child of node.children.values()) if (check(child)) return true;
1493
+ return false;
1494
+ };
1495
+ return check(subscription.entryTree);
898
1496
  }
899
1497
  #hasKeyFields(target, keyFields) {
900
1498
  return keyFields.every((f) => f in target);
@@ -902,48 +1500,43 @@ var Cache = class {
902
1500
  #collectSubscriptions(storageKey, fieldKey, out) {
903
1501
  if (fieldKey === void 0) {
904
1502
  const prefix = `${storageKey}.`;
905
- for (const [depKey, ss] of this.#subscriptions) if (depKey.startsWith(prefix)) for (const s of ss) out.add(s);
1503
+ for (const [depKey, entries] of this.#subscriptions) if (depKey.startsWith(prefix)) for (const entry of entries) out.add(entry.subscription);
906
1504
  } else {
907
1505
  const depKey = makeDependencyKey(storageKey, fieldKey);
908
- const ss = this.#subscriptions.get(depKey);
909
- if (ss) for (const s of ss) out.add(s);
1506
+ const entries = this.#subscriptions.get(depKey);
1507
+ if (entries) for (const entry of entries) out.add(entry.subscription);
910
1508
  }
911
1509
  }
912
- #subscribe(dependencies, listener) {
913
- const subscription = { listener };
914
- for (const dependency of dependencies) {
915
- const subscriptions = this.#subscriptions.get(dependency) ?? /* @__PURE__ */ new Set();
916
- subscriptions.add(subscription);
917
- this.#subscriptions.set(dependency, subscriptions);
918
- }
919
- return () => {
920
- for (const dependency of dependencies) {
921
- const subscriptions = this.#subscriptions.get(dependency);
922
- subscriptions?.delete(subscription);
923
- if (subscriptions?.size === 0) this.#subscriptions.delete(dependency);
1510
+ #removeSubscriptionFromTree(node, subscription) {
1511
+ const entries = this.#subscriptions.get(node.depKey);
1512
+ if (entries) {
1513
+ for (const entry of entries) if (entry.subscription === subscription) {
1514
+ entries.delete(entry);
1515
+ break;
924
1516
  }
925
- };
1517
+ if (entries.size === 0) this.#subscriptions.delete(node.depKey);
1518
+ }
1519
+ for (const child of node.children.values()) this.#removeSubscriptionFromTree(child, subscription);
1520
+ }
1521
+ #parseDepKey(depKey) {
1522
+ return parseDependencyKey(depKey);
926
1523
  }
927
1524
  /**
928
- * Extracts a serializable snapshot of the cache storage and structural sharing state.
1525
+ * Extracts a serializable snapshot of the cache storage.
929
1526
  * Optimistic layers are excluded because they represent transient in-flight state.
930
1527
  */
931
1528
  extract() {
932
- return {
933
- storage: structuredClone(this.#storage),
934
- memo: Object.fromEntries(this.#memo)
935
- };
1529
+ return { storage: structuredClone(this.#storage) };
936
1530
  }
937
1531
  /**
938
1532
  * Hydrates the cache with a previously extracted snapshot.
939
1533
  */
940
1534
  hydrate(snapshot) {
941
- const { storage, memo } = snapshot;
1535
+ const { storage } = snapshot;
942
1536
  for (const [key, fields] of Object.entries(storage)) this.#storage[key] = {
943
1537
  ...this.#storage[key],
944
1538
  ...fields
945
1539
  };
946
- for (const [key, value] of Object.entries(memo)) this.#memo.set(key, value);
947
1540
  this.#storageView = null;
948
1541
  }
949
1542
  /**
@@ -952,7 +1545,6 @@ var Cache = class {
952
1545
  clear() {
953
1546
  this.#storage = { [RootFieldKey]: {} };
954
1547
  this.#subscriptions.clear();
955
- this.#memo.clear();
956
1548
  this.#stale.clear();
957
1549
  this.#optimisticKeys = [];
958
1550
  this.#optimisticLayers.clear();
@@ -988,6 +1580,9 @@ const cacheExchange = (options = {}) => {
988
1580
  clear: () => cache.clear()
989
1581
  },
990
1582
  io: (ops$) => {
1583
+ const subscriptionHasData = /* @__PURE__ */ new Map();
1584
+ const resubscribe$ = makeSubject();
1585
+ const refetch$ = makeSubject();
991
1586
  const fragment$ = pipe(ops$, filter((op) => op.variant === "request" && op.artifact.kind === "fragment"), mergeMap((op) => {
992
1587
  const fragmentRef = op.metadata?.fragment?.ref;
993
1588
  if (!fragmentRef) return fromValue({
@@ -995,77 +1590,152 @@ const cacheExchange = (options = {}) => {
995
1590
  errors: [new ExchangeError("Fragment operation missing fragment.ref in metadata. This usually happens when the wrong fragment reference was passed.", { exchangeName: "cache" })]
996
1591
  });
997
1592
  if (isFragmentRefArray(fragmentRef)) {
998
- const trigger = makeSubject();
999
- const teardown$ = pipe(ops$, filter((operation) => operation.variant === "teardown" && operation.key === op.key), tap(() => trigger.complete()));
1000
- return pipe(merge(fromValue(void 0), trigger.source), switchMap(() => fromSubscription(() => cache.readFragments(op.artifact, fragmentRef), () => cache.subscribeFragments(op.artifact, fragmentRef, async () => {
1001
- await Promise.resolve();
1002
- trigger.next();
1003
- }))), takeUntil(teardown$), map(({ data, stale }) => ({
1593
+ const results = makeSubject();
1594
+ const unsubscribes = [];
1595
+ const fragmentSubscriptions = [];
1596
+ for (const [index, ref] of fragmentRef.entries()) {
1597
+ const patchListener = (patches) => {
1598
+ if (patches) {
1599
+ const indexedPatches = patches.map((patch) => ({
1600
+ ...patch,
1601
+ path: [index, ...patch.path]
1602
+ }));
1603
+ results.next({
1604
+ operation: op,
1605
+ metadata: { cache: { patches: indexedPatches } },
1606
+ errors: []
1607
+ });
1608
+ } else {
1609
+ const sub = fragmentSubscriptions[index];
1610
+ if (sub && cache.isStale(sub)) {
1611
+ const { data, stale } = cache.readFragments(op.artifact, fragmentRef);
1612
+ if (data !== null) results.next({
1613
+ operation: op,
1614
+ data,
1615
+ ...stale && { metadata: { cache: { stale: true } } },
1616
+ errors: []
1617
+ });
1618
+ }
1619
+ }
1620
+ };
1621
+ const { unsubscribe, subscription } = cache.subscribeFragment(op.artifact, ref, patchListener);
1622
+ unsubscribes.push(unsubscribe);
1623
+ fragmentSubscriptions.push(subscription);
1624
+ }
1625
+ const { data: initialData, stale: initialStale } = cache.readFragments(op.artifact, fragmentRef);
1626
+ const teardown$ = pipe(ops$, filter((operation) => operation.variant === "teardown" && operation.key === op.key), tap(() => {
1627
+ for (const unsub of unsubscribes) unsub();
1628
+ results.complete();
1629
+ }));
1630
+ return pipe(merge(fromValue({
1004
1631
  operation: op,
1005
- data,
1006
- ...stale && { metadata: { cache: { stale: true } } },
1632
+ data: initialData,
1633
+ ...initialStale && { metadata: { cache: { stale: true } } },
1007
1634
  errors: []
1008
- })));
1635
+ }), results.source), takeUntil(teardown$));
1009
1636
  }
1010
1637
  if (!isFragmentRef(fragmentRef)) return fromValue({
1011
1638
  operation: op,
1012
1639
  data: fragmentRef,
1013
1640
  errors: []
1014
1641
  });
1015
- const trigger = makeSubject();
1016
- const teardown$ = pipe(ops$, filter((operation) => operation.variant === "teardown" && operation.key === op.key), tap(() => trigger.complete()));
1017
- return pipe(merge(fromValue(void 0), trigger.source), switchMap(() => fromSubscription(() => cache.readFragment(op.artifact, fragmentRef), () => cache.subscribeFragment(op.artifact, fragmentRef, async () => {
1018
- await Promise.resolve();
1019
- trigger.next();
1020
- }))), takeUntil(teardown$), map(({ data, stale }) => ({
1642
+ const results = makeSubject();
1643
+ let currentUnsubscribe = null;
1644
+ let currentSubscription = null;
1645
+ const patchListener = (patches) => {
1646
+ if (patches) results.next({
1647
+ operation: op,
1648
+ metadata: { cache: { patches } },
1649
+ errors: []
1650
+ });
1651
+ else if (currentSubscription) {
1652
+ if (cache.isStale(currentSubscription)) {
1653
+ const { data: staleData } = cache.readFragment(op.artifact, fragmentRef);
1654
+ if (staleData !== null) results.next({
1655
+ operation: op,
1656
+ data: staleData,
1657
+ metadata: { cache: { stale: true } },
1658
+ errors: []
1659
+ });
1660
+ }
1661
+ }
1662
+ };
1663
+ const { data, stale, unsubscribe, subscription } = cache.subscribeFragment(op.artifact, fragmentRef, patchListener);
1664
+ currentUnsubscribe = unsubscribe;
1665
+ currentSubscription = subscription;
1666
+ const teardown$ = pipe(ops$, filter((operation) => operation.variant === "teardown" && operation.key === op.key), tap(() => {
1667
+ if (currentUnsubscribe) currentUnsubscribe();
1668
+ results.complete();
1669
+ }));
1670
+ return pipe(merge(data === null ? empty() : fromValue({
1021
1671
  operation: op,
1022
1672
  data,
1023
1673
  ...stale && { metadata: { cache: { stale: true } } },
1024
1674
  errors: []
1025
- })));
1675
+ }), results.source), takeUntil(teardown$));
1026
1676
  }));
1027
1677
  const nonCache$ = pipe(ops$, filter((op) => op.variant === "request" && (op.artifact.kind === "mutation" || op.artifact.kind === "subscription" || op.artifact.kind === "query" && fetchPolicy === "network-only")), tap((op) => {
1028
1678
  if (op.artifact.kind === "mutation" && op.metadata?.cache?.optimisticResponse) cache.writeOptimistic(op.key, op.artifact, op.variables, op.metadata.cache.optimisticResponse);
1029
1679
  }));
1030
1680
  const query$ = pipe(ops$, filter((op) => op.variant === "request" && op.artifact.kind === "query" && fetchPolicy !== "network-only"), share());
1031
- const refetch$ = makeSubject();
1032
1681
  return merge(fragment$, pipe(query$, mergeMap((op) => {
1033
- const trigger = makeSubject();
1034
- let hasData = false;
1035
- const teardown$ = pipe(ops$, filter((operation) => operation.variant === "teardown" && operation.key === op.key), tap(() => trigger.complete()));
1036
- return pipe(merge(fromValue(void 0), trigger.source), switchMap(() => fromSubscription(() => cache.readQuery(op.artifact, op.variables), () => cache.subscribeQuery(op.artifact, op.variables, async () => {
1037
- await Promise.resolve();
1038
- trigger.next();
1039
- }))), takeUntil(teardown$), mergeMap(({ data, stale }) => {
1040
- if (data !== null && !stale) {
1041
- hasData = true;
1042
- return fromValue({
1043
- operation: op,
1044
- data,
1045
- errors: []
1046
- });
1047
- }
1048
- if (data !== null && stale) {
1049
- hasData = true;
1050
- refetch$.next(op);
1051
- return fromValue({
1052
- operation: op,
1053
- data,
1054
- metadata: { cache: { stale: true } },
1055
- errors: []
1056
- });
1057
- }
1058
- if (hasData) {
1059
- refetch$.next(op);
1060
- return empty();
1061
- }
1062
- if (fetchPolicy === "cache-only") return fromValue({
1063
- operation: op,
1064
- data: null,
1065
- errors: []
1066
- });
1682
+ const results = makeSubject();
1683
+ let currentUnsubscribe = null;
1684
+ let currentSubscription = null;
1685
+ let initialized = false;
1686
+ const doSubscribe = () => {
1687
+ if (currentUnsubscribe) currentUnsubscribe();
1688
+ const patchListener = (patches) => {
1689
+ if (patches) {
1690
+ if (!initialized) return;
1691
+ results.next({
1692
+ operation: op,
1693
+ metadata: { cache: { patches } },
1694
+ errors: []
1695
+ });
1696
+ } else if (currentSubscription) {
1697
+ if (cache.isStale(currentSubscription)) {
1698
+ const { data: staleData } = cache.readQuery(op.artifact, op.variables);
1699
+ if (staleData !== null) results.next({
1700
+ operation: op,
1701
+ data: staleData,
1702
+ metadata: { cache: { stale: true } },
1703
+ errors: []
1704
+ });
1705
+ refetch$.next(op);
1706
+ }
1707
+ }
1708
+ };
1709
+ const result = cache.subscribeQuery(op.artifact, op.variables, patchListener);
1710
+ currentUnsubscribe = result.unsubscribe;
1711
+ currentSubscription = result.subscription;
1712
+ return result;
1713
+ };
1714
+ const { data, stale } = doSubscribe();
1715
+ subscriptionHasData.set(op.key, data !== null);
1716
+ if (data !== null) initialized = true;
1717
+ const teardown$ = pipe(ops$, filter((o) => o.variant === "teardown" && o.key === op.key), tap(() => {
1718
+ if (currentUnsubscribe) currentUnsubscribe();
1719
+ subscriptionHasData.delete(op.key);
1720
+ results.complete();
1721
+ }));
1722
+ const resubStream$ = pipe(resubscribe$.source, filter((key) => key === op.key), mergeMap(() => {
1723
+ doSubscribe();
1724
+ initialized = true;
1067
1725
  return empty();
1068
1726
  }));
1727
+ const stream$ = pipe(merge(data === null ? fetchPolicy === "cache-only" ? fromValue({
1728
+ operation: op,
1729
+ data: null,
1730
+ errors: []
1731
+ }) : empty() : fromValue({
1732
+ operation: op,
1733
+ data,
1734
+ ...stale && { metadata: { cache: { stale: true } } },
1735
+ errors: []
1736
+ }), results.source, resubStream$), takeUntil(teardown$));
1737
+ if (stale) refetch$.next(op);
1738
+ return stream$;
1069
1739
  }), filter(() => fetchPolicy === "cache-only" || fetchPolicy === "cache-and-network" || fetchPolicy === "cache-first")), pipe(merge(nonCache$, pipe(query$, filter((op) => {
1070
1740
  const { data } = cache.readQuery(op.artifact, op.variables);
1071
1741
  return fetchPolicy === "cache-and-network" || data === null;
@@ -1073,8 +1743,22 @@ const cacheExchange = (options = {}) => {
1073
1743
  if (result.operation.variant === "request" && result.operation.artifact.kind === "mutation" && result.operation.metadata?.cache?.optimisticResponse) cache.removeOptimistic(result.operation.key);
1074
1744
  if (result.operation.variant === "request" && result.data) cache.writeQuery(result.operation.artifact, result.operation.variables, result.data);
1075
1745
  if (result.operation.variant !== "request" || result.operation.artifact.kind !== "query" || fetchPolicy === "network-only" || !!(result.errors && result.errors.length > 0)) return fromValue(result);
1746
+ if (subscriptionHasData.get(result.operation.key)) {
1747
+ const { data } = cache.readQuery(result.operation.artifact, result.operation.variables);
1748
+ if (data !== null) return empty();
1749
+ return fromValue({
1750
+ operation: result.operation,
1751
+ data: void 0,
1752
+ errors: [new ExchangeError("Cache failed to denormalize the network response. This is likely a bug in the cache normalizer.", { exchangeName: "cache" })]
1753
+ });
1754
+ }
1755
+ subscriptionHasData.set(result.operation.key, true);
1756
+ resubscribe$.next(result.operation.key);
1076
1757
  const { data } = cache.readQuery(result.operation.artifact, result.operation.variables);
1077
- if (data !== null) return empty();
1758
+ if (data !== null) return fromValue({
1759
+ ...result,
1760
+ data
1761
+ });
1078
1762
  return fromValue({
1079
1763
  operation: result.operation,
1080
1764
  data: void 0,
@@ -1086,6 +1770,99 @@ const cacheExchange = (options = {}) => {
1086
1770
  };
1087
1771
  };
1088
1772
 
1773
+ //#endregion
1774
+ //#region src/cache/patch.ts
1775
+ const copyNode = (node) => Array.isArray(node) ? [...node] : { ...node };
1776
+ const shallowCopyPath = (root, path) => {
1777
+ if (path.length === 0) return root;
1778
+ let result = copyNode(root);
1779
+ const top = result;
1780
+ for (let i = 0; i < path.length - 1; i++) {
1781
+ const key = path[i];
1782
+ result[key] = copyNode(result[key]);
1783
+ result = result[key];
1784
+ }
1785
+ return top;
1786
+ };
1787
+ /**
1788
+ * Sets a value at a nested path within an object.
1789
+ * @param obj - The object to modify.
1790
+ * @param path - The path to the target location.
1791
+ * @param value - The value to set.
1792
+ */
1793
+ const setPath = (obj, path, value) => {
1794
+ let current = obj;
1795
+ for (let i = 0; i < path.length - 1; i++) current = current[path[i]];
1796
+ current[path.at(-1)] = value;
1797
+ };
1798
+ /**
1799
+ * Gets a value at a nested path within an object.
1800
+ * @param obj - The object to read from.
1801
+ * @param path - The path to the target location.
1802
+ * @returns The value at the path, or the object itself if path is empty.
1803
+ */
1804
+ const getPath = (obj, path) => {
1805
+ let current = obj;
1806
+ for (const segment of path) {
1807
+ if (current === void 0 || current === null) return void 0;
1808
+ current = current[segment];
1809
+ }
1810
+ return current;
1811
+ };
1812
+ /**
1813
+ * Applies cache patches to data immutably, shallow-copying only along changed paths.
1814
+ */
1815
+ const applyPatchesImmutable = (data, patches) => {
1816
+ if (patches.length === 0) return data;
1817
+ let result = data;
1818
+ for (const patch of patches) if (patch.type === "set") {
1819
+ if (patch.path.length === 0) {
1820
+ result = patch.value;
1821
+ continue;
1822
+ }
1823
+ result = shallowCopyPath(result, patch.path);
1824
+ let target = result;
1825
+ for (let i = 0; i < patch.path.length - 1; i++) target = target[patch.path[i]];
1826
+ target[patch.path.at(-1)] = patch.value;
1827
+ } else if (patch.type === "splice") {
1828
+ result = shallowCopyPath(result, patch.path);
1829
+ let target = result;
1830
+ for (const segment of patch.path) target = target[segment];
1831
+ const arr = [...target];
1832
+ arr.splice(patch.index, patch.deleteCount, ...patch.items);
1833
+ let parent = result;
1834
+ for (let i = 0; i < patch.path.length - 1; i++) parent = parent[patch.path[i]];
1835
+ parent[patch.path.at(-1)] = arr;
1836
+ } else if (patch.type === "swap") {
1837
+ result = shallowCopyPath(result, patch.path);
1838
+ let target = result;
1839
+ for (const segment of patch.path) target = target[segment];
1840
+ const arr = [...target];
1841
+ [arr[patch.i], arr[patch.j]] = [arr[patch.j], arr[patch.i]];
1842
+ let parent = result;
1843
+ for (let i = 0; i < patch.path.length - 1; i++) parent = parent[patch.path[i]];
1844
+ parent[patch.path.at(-1)] = arr;
1845
+ }
1846
+ return result;
1847
+ };
1848
+ /**
1849
+ * Applies cache patches to a mutable target object in place.
1850
+ * @param target - The mutable object to apply patches to.
1851
+ * @param patches - The patches to apply.
1852
+ * @returns The new root value if a root-level set patch was applied, otherwise undefined.
1853
+ */
1854
+ const applyPatchesMutable = (target, patches) => {
1855
+ let root;
1856
+ for (const patch of patches) if (patch.type === "set") if (patch.path.length === 0) root = patch.value;
1857
+ else setPath(target, patch.path, patch.value);
1858
+ else if (patch.type === "splice") getPath(target, patch.path).splice(patch.index, patch.deleteCount, ...patch.items);
1859
+ else if (patch.type === "swap") {
1860
+ const arr = getPath(target, patch.path);
1861
+ [arr[patch.i], arr[patch.j]] = [arr[patch.j], arr[patch.i]];
1862
+ }
1863
+ return root;
1864
+ };
1865
+
1089
1866
  //#endregion
1090
1867
  //#region src/exchanges/retry.ts
1091
1868
  const defaultShouldRetry = (error) => isExchangeError(error, "http") && error.extensions?.statusCode !== void 0 && error.extensions.statusCode >= 500;
@@ -1557,4 +2334,4 @@ const createClient = (config) => {
1557
2334
  };
1558
2335
 
1559
2336
  //#endregion
1560
- export { AggregatedError, Client, ExchangeError, GraphQLError, RequiredFieldError, cacheExchange, createClient, dedupExchange, fragmentExchange, httpExchange, isAggregatedError, isExchangeError, isGraphQLError, requiredExchange, retryExchange, stringify, subscriptionExchange };
2337
+ export { AggregatedError, Client, ExchangeError, GraphQLError, RequiredFieldError, applyPatchesImmutable, applyPatchesMutable, cacheExchange, createClient, dedupExchange, fragmentExchange, getPath, httpExchange, isAggregatedError, isExchangeError, isGraphQLError, requiredExchange, retryExchange, setPath, stringify, subscriptionExchange };