@tanstack/db 0.4.8 → 0.4.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. package/dist/cjs/errors.cjs +51 -17
  2. package/dist/cjs/errors.cjs.map +1 -1
  3. package/dist/cjs/errors.d.cts +38 -8
  4. package/dist/cjs/index.cjs +8 -4
  5. package/dist/cjs/index.cjs.map +1 -1
  6. package/dist/cjs/query/builder/types.d.cts +1 -1
  7. package/dist/cjs/query/compiler/index.cjs +42 -19
  8. package/dist/cjs/query/compiler/index.cjs.map +1 -1
  9. package/dist/cjs/query/compiler/index.d.cts +33 -8
  10. package/dist/cjs/query/compiler/joins.cjs +88 -66
  11. package/dist/cjs/query/compiler/joins.cjs.map +1 -1
  12. package/dist/cjs/query/compiler/joins.d.cts +5 -2
  13. package/dist/cjs/query/compiler/order-by.cjs +2 -0
  14. package/dist/cjs/query/compiler/order-by.cjs.map +1 -1
  15. package/dist/cjs/query/compiler/order-by.d.cts +1 -0
  16. package/dist/cjs/query/compiler/select.cjs.map +1 -1
  17. package/dist/cjs/query/live/collection-config-builder.cjs +276 -42
  18. package/dist/cjs/query/live/collection-config-builder.cjs.map +1 -1
  19. package/dist/cjs/query/live/collection-config-builder.d.cts +84 -8
  20. package/dist/cjs/query/live/collection-registry.cjs +16 -0
  21. package/dist/cjs/query/live/collection-registry.cjs.map +1 -0
  22. package/dist/cjs/query/live/collection-registry.d.cts +26 -0
  23. package/dist/cjs/query/live/collection-subscriber.cjs +57 -58
  24. package/dist/cjs/query/live/collection-subscriber.cjs.map +1 -1
  25. package/dist/cjs/query/live/collection-subscriber.d.cts +4 -7
  26. package/dist/cjs/query/live-query-collection.cjs +11 -5
  27. package/dist/cjs/query/live-query-collection.cjs.map +1 -1
  28. package/dist/cjs/query/live-query-collection.d.cts +10 -3
  29. package/dist/cjs/query/optimizer.cjs +44 -7
  30. package/dist/cjs/query/optimizer.cjs.map +1 -1
  31. package/dist/cjs/query/optimizer.d.cts +4 -4
  32. package/dist/cjs/scheduler.cjs +137 -0
  33. package/dist/cjs/scheduler.cjs.map +1 -0
  34. package/dist/cjs/scheduler.d.cts +56 -0
  35. package/dist/cjs/transactions.cjs +7 -1
  36. package/dist/cjs/transactions.cjs.map +1 -1
  37. package/dist/esm/errors.d.ts +38 -8
  38. package/dist/esm/errors.js +52 -18
  39. package/dist/esm/errors.js.map +1 -1
  40. package/dist/esm/index.js +9 -5
  41. package/dist/esm/query/builder/types.d.ts +1 -1
  42. package/dist/esm/query/compiler/index.d.ts +33 -8
  43. package/dist/esm/query/compiler/index.js +42 -19
  44. package/dist/esm/query/compiler/index.js.map +1 -1
  45. package/dist/esm/query/compiler/joins.d.ts +5 -2
  46. package/dist/esm/query/compiler/joins.js +90 -68
  47. package/dist/esm/query/compiler/joins.js.map +1 -1
  48. package/dist/esm/query/compiler/order-by.d.ts +1 -0
  49. package/dist/esm/query/compiler/order-by.js +2 -0
  50. package/dist/esm/query/compiler/order-by.js.map +1 -1
  51. package/dist/esm/query/compiler/select.js.map +1 -1
  52. package/dist/esm/query/live/collection-config-builder.d.ts +84 -8
  53. package/dist/esm/query/live/collection-config-builder.js +276 -42
  54. package/dist/esm/query/live/collection-config-builder.js.map +1 -1
  55. package/dist/esm/query/live/collection-registry.d.ts +26 -0
  56. package/dist/esm/query/live/collection-registry.js +16 -0
  57. package/dist/esm/query/live/collection-registry.js.map +1 -0
  58. package/dist/esm/query/live/collection-subscriber.d.ts +4 -7
  59. package/dist/esm/query/live/collection-subscriber.js +57 -58
  60. package/dist/esm/query/live/collection-subscriber.js.map +1 -1
  61. package/dist/esm/query/live-query-collection.d.ts +10 -3
  62. package/dist/esm/query/live-query-collection.js +11 -5
  63. package/dist/esm/query/live-query-collection.js.map +1 -1
  64. package/dist/esm/query/optimizer.d.ts +4 -4
  65. package/dist/esm/query/optimizer.js +44 -7
  66. package/dist/esm/query/optimizer.js.map +1 -1
  67. package/dist/esm/scheduler.d.ts +56 -0
  68. package/dist/esm/scheduler.js +137 -0
  69. package/dist/esm/scheduler.js.map +1 -0
  70. package/dist/esm/transactions.js +7 -1
  71. package/dist/esm/transactions.js.map +1 -1
  72. package/package.json +2 -2
  73. package/src/errors.ts +79 -13
  74. package/src/query/builder/types.ts +1 -1
  75. package/src/query/compiler/index.ts +115 -32
  76. package/src/query/compiler/joins.ts +180 -127
  77. package/src/query/compiler/order-by.ts +7 -0
  78. package/src/query/compiler/select.ts +2 -3
  79. package/src/query/live/collection-config-builder.ts +450 -58
  80. package/src/query/live/collection-registry.ts +47 -0
  81. package/src/query/live/collection-subscriber.ts +88 -106
  82. package/src/query/live-query-collection.ts +39 -14
  83. package/src/query/optimizer.ts +85 -15
  84. package/src/scheduler.ts +198 -0
  85. package/src/transactions.ts +12 -1
@@ -0,0 +1,137 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
3
+ class Scheduler {
4
+ constructor() {
5
+ this.contexts = /* @__PURE__ */ new Map();
6
+ this.clearListeners = /* @__PURE__ */ new Set();
7
+ }
8
+ /**
9
+ * Get or create the state bucket for a context.
10
+ */
11
+ getOrCreateContext(contextId) {
12
+ let context = this.contexts.get(contextId);
13
+ if (!context) {
14
+ context = {
15
+ queue: [],
16
+ jobs: /* @__PURE__ */ new Map(),
17
+ dependencies: /* @__PURE__ */ new Map(),
18
+ completed: /* @__PURE__ */ new Set()
19
+ };
20
+ this.contexts.set(contextId, context);
21
+ }
22
+ return context;
23
+ }
24
+ /**
25
+ * Schedule work. Without a context id, executes immediately.
26
+ * Otherwise queues the job to be flushed once dependencies are satisfied.
27
+ * Scheduling the same jobId again replaces the previous run function.
28
+ */
29
+ schedule({ contextId, jobId, dependencies, run }) {
30
+ if (typeof contextId === `undefined`) {
31
+ run();
32
+ return;
33
+ }
34
+ const context = this.getOrCreateContext(contextId);
35
+ if (!context.jobs.has(jobId)) {
36
+ context.queue.push(jobId);
37
+ }
38
+ context.jobs.set(jobId, run);
39
+ if (dependencies) {
40
+ const depSet = new Set(dependencies);
41
+ depSet.delete(jobId);
42
+ context.dependencies.set(jobId, depSet);
43
+ } else if (!context.dependencies.has(jobId)) {
44
+ context.dependencies.set(jobId, /* @__PURE__ */ new Set());
45
+ }
46
+ context.completed.delete(jobId);
47
+ }
48
+ /**
49
+ * Flush all queued work for a context. Jobs with unmet dependencies are retried.
50
+ * Throws if a pass completes without running any job (dependency cycle).
51
+ */
52
+ flush(contextId) {
53
+ const context = this.contexts.get(contextId);
54
+ if (!context) return;
55
+ const { queue, jobs, dependencies, completed } = context;
56
+ while (queue.length > 0) {
57
+ let ranThisPass = false;
58
+ const jobsThisPass = queue.length;
59
+ for (let i = 0; i < jobsThisPass; i++) {
60
+ const jobId = queue.shift();
61
+ const run = jobs.get(jobId);
62
+ if (!run) {
63
+ dependencies.delete(jobId);
64
+ completed.delete(jobId);
65
+ continue;
66
+ }
67
+ const deps = dependencies.get(jobId);
68
+ let ready = !deps;
69
+ if (deps) {
70
+ ready = true;
71
+ for (const dep of deps) {
72
+ if (dep !== jobId && !completed.has(dep)) {
73
+ ready = false;
74
+ break;
75
+ }
76
+ }
77
+ }
78
+ if (ready) {
79
+ jobs.delete(jobId);
80
+ dependencies.delete(jobId);
81
+ run();
82
+ completed.add(jobId);
83
+ ranThisPass = true;
84
+ } else {
85
+ queue.push(jobId);
86
+ }
87
+ }
88
+ if (!ranThisPass) {
89
+ throw new Error(
90
+ `Scheduler detected unresolved dependencies for context ${String(
91
+ contextId
92
+ )}.`
93
+ );
94
+ }
95
+ }
96
+ this.contexts.delete(contextId);
97
+ }
98
+ /**
99
+ * Flush all contexts with pending work. Useful during tear-down.
100
+ */
101
+ flushAll() {
102
+ for (const contextId of Array.from(this.contexts.keys())) {
103
+ this.flush(contextId);
104
+ }
105
+ }
106
+ /** Clear all scheduled jobs for a context. */
107
+ clear(contextId) {
108
+ this.contexts.delete(contextId);
109
+ this.clearListeners.forEach((listener) => listener(contextId));
110
+ }
111
+ /** Register a listener to be notified when a context is cleared. */
112
+ onClear(listener) {
113
+ this.clearListeners.add(listener);
114
+ return () => this.clearListeners.delete(listener);
115
+ }
116
+ /** Check if a context has pending jobs. */
117
+ hasPendingJobs(contextId) {
118
+ const context = this.contexts.get(contextId);
119
+ return !!context && context.jobs.size > 0;
120
+ }
121
+ /** Remove a single job from a context and clean up its dependencies. */
122
+ clearJob(contextId, jobId) {
123
+ const context = this.contexts.get(contextId);
124
+ if (!context) return;
125
+ context.jobs.delete(jobId);
126
+ context.dependencies.delete(jobId);
127
+ context.completed.delete(jobId);
128
+ context.queue = context.queue.filter((id) => id !== jobId);
129
+ if (context.jobs.size === 0) {
130
+ this.contexts.delete(contextId);
131
+ }
132
+ }
133
+ }
134
+ const transactionScopedScheduler = new Scheduler();
135
+ exports.Scheduler = Scheduler;
136
+ exports.transactionScopedScheduler = transactionScopedScheduler;
137
+ //# sourceMappingURL=scheduler.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"scheduler.cjs","sources":["../../src/scheduler.ts"],"sourcesContent":["/**\n * Identifier used to scope scheduled work. Maps to a transaction id for live queries.\n */\nexport type SchedulerContextId = string | symbol\n\n/**\n * Options for {@link Scheduler.schedule}. Jobs are identified by `jobId` within a context\n * and may declare dependencies.\n */\ninterface ScheduleOptions {\n contextId?: SchedulerContextId\n jobId: unknown\n dependencies?: Iterable<unknown>\n run: () => void\n}\n\n/**\n * State per context. Queue preserves order, jobs hold run functions, dependencies track\n * prerequisites, and completed records which jobs have run during the current flush.\n */\ninterface SchedulerContextState {\n queue: Array<unknown>\n jobs: Map<unknown, () => void>\n dependencies: Map<unknown, Set<unknown>>\n completed: Set<unknown>\n}\n\n/**\n * Scoped scheduler that coalesces work by context and job.\n *\n * - **context** (e.g. transaction id) defines the batching boundary; work is queued until flushed.\n * - **job id** deduplicates work within a context; scheduling the same job replaces the previous run function.\n * - Without a context id, work executes immediately.\n *\n * Callers manage their own state; the scheduler only orchestrates execution order.\n */\nexport class Scheduler {\n private contexts = new Map<SchedulerContextId, SchedulerContextState>()\n private clearListeners = new Set<(contextId: SchedulerContextId) => void>()\n\n /**\n * Get or create the state bucket for a context.\n */\n private getOrCreateContext(\n contextId: SchedulerContextId\n ): SchedulerContextState {\n let context = this.contexts.get(contextId)\n if (!context) {\n context = {\n queue: [],\n jobs: new Map(),\n dependencies: new Map(),\n completed: new Set(),\n }\n this.contexts.set(contextId, context)\n }\n return context\n }\n\n /**\n * Schedule work. Without a context id, executes immediately.\n * Otherwise queues the job to be flushed once dependencies are satisfied.\n * Scheduling the same jobId again replaces the previous run function.\n */\n schedule({ contextId, jobId, dependencies, run }: ScheduleOptions): void {\n if (typeof contextId === `undefined`) {\n run()\n return\n }\n\n const context = this.getOrCreateContext(contextId)\n\n // If this is a new job, add it to the queue\n if (!context.jobs.has(jobId)) {\n context.queue.push(jobId)\n }\n\n // Store or replace the run function\n context.jobs.set(jobId, run)\n\n // Update dependencies\n if (dependencies) {\n const depSet = new Set<unknown>(dependencies)\n depSet.delete(jobId)\n context.dependencies.set(jobId, depSet)\n } else if (!context.dependencies.has(jobId)) {\n context.dependencies.set(jobId, new Set())\n }\n\n // Clear completion status since we're rescheduling\n context.completed.delete(jobId)\n }\n\n /**\n * Flush all queued work for a context. Jobs with unmet dependencies are retried.\n * Throws if a pass completes without running any job (dependency cycle).\n */\n flush(contextId: SchedulerContextId): void {\n const context = this.contexts.get(contextId)\n if (!context) return\n\n const { queue, jobs, dependencies, completed } = context\n\n while (queue.length > 0) {\n let ranThisPass = false\n const jobsThisPass = queue.length\n\n for (let i = 0; i < jobsThisPass; i++) {\n const jobId = queue.shift()!\n const run = jobs.get(jobId)\n if (!run) {\n dependencies.delete(jobId)\n completed.delete(jobId)\n continue\n }\n\n const deps = dependencies.get(jobId)\n let ready = !deps\n if (deps) {\n ready = true\n for (const dep of deps) {\n if (dep !== jobId && !completed.has(dep)) {\n ready = false\n break\n }\n }\n }\n\n if (ready) {\n jobs.delete(jobId)\n dependencies.delete(jobId)\n // Run the job. If it throws, we don't mark it complete, allowing the\n // error to propagate while maintaining scheduler state consistency.\n run()\n completed.add(jobId)\n ranThisPass = true\n } else {\n queue.push(jobId)\n }\n }\n\n if (!ranThisPass) {\n throw new Error(\n `Scheduler detected unresolved dependencies for context ${String(\n contextId\n )}.`\n )\n }\n }\n\n this.contexts.delete(contextId)\n }\n\n /**\n * Flush all contexts with pending work. Useful during tear-down.\n */\n flushAll(): void {\n for (const contextId of Array.from(this.contexts.keys())) {\n this.flush(contextId)\n }\n }\n\n /** Clear all scheduled jobs for a context. */\n clear(contextId: SchedulerContextId): void {\n this.contexts.delete(contextId)\n // Notify listeners that this context was cleared\n this.clearListeners.forEach((listener) => listener(contextId))\n }\n\n /** Register a listener to be notified when a context is cleared. */\n onClear(listener: (contextId: SchedulerContextId) => void): () => void {\n this.clearListeners.add(listener)\n return () => this.clearListeners.delete(listener)\n }\n\n /** Check if a context has pending jobs. */\n hasPendingJobs(contextId: SchedulerContextId): boolean {\n const context = this.contexts.get(contextId)\n return !!context && context.jobs.size > 0\n }\n\n /** Remove a single job from a context and clean up its dependencies. */\n clearJob(contextId: SchedulerContextId, jobId: unknown): void {\n const context = this.contexts.get(contextId)\n if (!context) return\n\n context.jobs.delete(jobId)\n context.dependencies.delete(jobId)\n context.completed.delete(jobId)\n context.queue = context.queue.filter((id) => id !== jobId)\n\n if (context.jobs.size === 0) {\n this.contexts.delete(contextId)\n }\n }\n}\n\nexport const transactionScopedScheduler = new Scheduler()\n"],"names":[],"mappings":";;AAoCO,MAAM,UAAU;AAAA,EAAhB,cAAA;AACL,SAAQ,+BAAe,IAAA;AACvB,SAAQ,qCAAqB,IAAA;AAAA,EAA6C;AAAA;AAAA;AAAA;AAAA,EAKlE,mBACN,WACuB;AACvB,QAAI,UAAU,KAAK,SAAS,IAAI,SAAS;AACzC,QAAI,CAAC,SAAS;AACZ,gBAAU;AAAA,QACR,OAAO,CAAA;AAAA,QACP,0BAAU,IAAA;AAAA,QACV,kCAAkB,IAAA;AAAA,QAClB,+BAAe,IAAA;AAAA,MAAI;AAErB,WAAK,SAAS,IAAI,WAAW,OAAO;AAAA,IACtC;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAS,EAAE,WAAW,OAAO,cAAc,OAA8B;AACvE,QAAI,OAAO,cAAc,aAAa;AACpC,UAAA;AACA;AAAA,IACF;AAEA,UAAM,UAAU,KAAK,mBAAmB,SAAS;AAGjD,QAAI,CAAC,QAAQ,KAAK,IAAI,KAAK,GAAG;AAC5B,cAAQ,MAAM,KAAK,KAAK;AAAA,IAC1B;AAGA,YAAQ,KAAK,IAAI,OAAO,GAAG;AAG3B,QAAI,cAAc;AAChB,YAAM,SAAS,IAAI,IAAa,YAAY;AAC5C,aAAO,OAAO,KAAK;AACnB,cAAQ,aAAa,IAAI,OAAO,MAAM;AAAA,IACxC,WAAW,CAAC,QAAQ,aAAa,IAAI,KAAK,GAAG;AAC3C,cAAQ,aAAa,IAAI,OAAO,oBAAI,KAAK;AAAA,IAC3C;AAGA,YAAQ,UAAU,OAAO,KAAK;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,WAAqC;AACzC,UAAM,UAAU,KAAK,SAAS,IAAI,SAAS;AAC3C,QAAI,CAAC,QAAS;AAEd,UAAM,EAAE,OAAO,MAAM,cAAc,cAAc;AAEjD,WAAO,MAAM,SAAS,GAAG;AACvB,UAAI,cAAc;AAClB,YAAM,eAAe,MAAM;AAE3B,eAAS,IAAI,GAAG,IAAI,cAAc,KAAK;AACrC,cAAM,QAAQ,MAAM,MAAA;AACpB,cAAM,MAAM,KAAK,IAAI,KAAK;AAC1B,YAAI,CAAC,KAAK;AACR,uBAAa,OAAO,KAAK;AACzB,oBAAU,OAAO,KAAK;AACtB;AAAA,QACF;AAEA,cAAM,OAAO,aAAa,IAAI,KAAK;AACnC,YAAI,QAAQ,CAAC;AACb,YAAI,MAAM;AACR,kBAAQ;AACR,qBAAW,OAAO,MAAM;AACtB,gBAAI,QAAQ,SAAS,CAAC,UAAU,IAAI,GAAG,GAAG;AACxC,sBAAQ;AACR;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,YAAI,OAAO;AACT,eAAK,OAAO,KAAK;AACjB,uBAAa,OAAO,KAAK;AAGzB,cAAA;AACA,oBAAU,IAAI,KAAK;AACnB,wBAAc;AAAA,QAChB,OAAO;AACL,gBAAM,KAAK,KAAK;AAAA,QAClB;AAAA,MACF;AAEA,UAAI,CAAC,aAAa;AAChB,cAAM,IAAI;AAAA,UACR,0DAA0D;AAAA,YACxD;AAAA,UAAA,CACD;AAAA,QAAA;AAAA,MAEL;AAAA,IACF;AAEA,SAAK,SAAS,OAAO,SAAS;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EAKA,WAAiB;AACf,eAAW,aAAa,MAAM,KAAK,KAAK,SAAS,KAAA,CAAM,GAAG;AACxD,WAAK,MAAM,SAAS;AAAA,IACtB;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,WAAqC;AACzC,SAAK,SAAS,OAAO,SAAS;AAE9B,SAAK,eAAe,QAAQ,CAAC,aAAa,SAAS,SAAS,CAAC;AAAA,EAC/D;AAAA;AAAA,EAGA,QAAQ,UAA+D;AACrE,SAAK,eAAe,IAAI,QAAQ;AAChC,WAAO,MAAM,KAAK,eAAe,OAAO,QAAQ;AAAA,EAClD;AAAA;AAAA,EAGA,eAAe,WAAwC;AACrD,UAAM,UAAU,KAAK,SAAS,IAAI,SAAS;AAC3C,WAAO,CAAC,CAAC,WAAW,QAAQ,KAAK,OAAO;AAAA,EAC1C;AAAA;AAAA,EAGA,SAAS,WAA+B,OAAsB;AAC5D,UAAM,UAAU,KAAK,SAAS,IAAI,SAAS;AAC3C,QAAI,CAAC,QAAS;AAEd,YAAQ,KAAK,OAAO,KAAK;AACzB,YAAQ,aAAa,OAAO,KAAK;AACjC,YAAQ,UAAU,OAAO,KAAK;AAC9B,YAAQ,QAAQ,QAAQ,MAAM,OAAO,CAAC,OAAO,OAAO,KAAK;AAEzD,QAAI,QAAQ,KAAK,SAAS,GAAG;AAC3B,WAAK,SAAS,OAAO,SAAS;AAAA,IAChC;AAAA,EACF;AACF;AAEO,MAAM,6BAA6B,IAAI,UAAA;;;"}
@@ -0,0 +1,56 @@
1
+ /**
2
+ * Identifier used to scope scheduled work. Maps to a transaction id for live queries.
3
+ */
4
+ export type SchedulerContextId = string | symbol;
5
+ /**
6
+ * Options for {@link Scheduler.schedule}. Jobs are identified by `jobId` within a context
7
+ * and may declare dependencies.
8
+ */
9
+ interface ScheduleOptions {
10
+ contextId?: SchedulerContextId;
11
+ jobId: unknown;
12
+ dependencies?: Iterable<unknown>;
13
+ run: () => void;
14
+ }
15
+ /**
16
+ * Scoped scheduler that coalesces work by context and job.
17
+ *
18
+ * - **context** (e.g. transaction id) defines the batching boundary; work is queued until flushed.
19
+ * - **job id** deduplicates work within a context; scheduling the same job replaces the previous run function.
20
+ * - Without a context id, work executes immediately.
21
+ *
22
+ * Callers manage their own state; the scheduler only orchestrates execution order.
23
+ */
24
+ export declare class Scheduler {
25
+ private contexts;
26
+ private clearListeners;
27
+ /**
28
+ * Get or create the state bucket for a context.
29
+ */
30
+ private getOrCreateContext;
31
+ /**
32
+ * Schedule work. Without a context id, executes immediately.
33
+ * Otherwise queues the job to be flushed once dependencies are satisfied.
34
+ * Scheduling the same jobId again replaces the previous run function.
35
+ */
36
+ schedule({ contextId, jobId, dependencies, run }: ScheduleOptions): void;
37
+ /**
38
+ * Flush all queued work for a context. Jobs with unmet dependencies are retried.
39
+ * Throws if a pass completes without running any job (dependency cycle).
40
+ */
41
+ flush(contextId: SchedulerContextId): void;
42
+ /**
43
+ * Flush all contexts with pending work. Useful during tear-down.
44
+ */
45
+ flushAll(): void;
46
+ /** Clear all scheduled jobs for a context. */
47
+ clear(contextId: SchedulerContextId): void;
48
+ /** Register a listener to be notified when a context is cleared. */
49
+ onClear(listener: (contextId: SchedulerContextId) => void): () => void;
50
+ /** Check if a context has pending jobs. */
51
+ hasPendingJobs(contextId: SchedulerContextId): boolean;
52
+ /** Remove a single job from a context and clean up its dependencies. */
53
+ clearJob(contextId: SchedulerContextId, jobId: unknown): void;
54
+ }
55
+ export declare const transactionScopedScheduler: Scheduler;
56
+ export {};
@@ -2,6 +2,7 @@
2
2
  Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
3
3
  const deferred = require("./deferred.cjs");
4
4
  const errors = require("./errors.cjs");
5
+ const scheduler = require("./scheduler.cjs");
5
6
  const transactions = [];
6
7
  let transactionStack = [];
7
8
  let sequenceNumber = 0;
@@ -63,10 +64,15 @@ function getActiveTransaction() {
63
64
  }
64
65
  }
65
66
  function registerTransaction(tx) {
67
+ scheduler.transactionScopedScheduler.clear(tx.id);
66
68
  transactionStack.push(tx);
67
69
  }
68
70
  function unregisterTransaction(tx) {
69
- transactionStack = transactionStack.filter((t) => t.id !== tx.id);
71
+ try {
72
+ scheduler.transactionScopedScheduler.flush(tx.id);
73
+ } finally {
74
+ transactionStack = transactionStack.filter((t) => t.id !== tx.id);
75
+ }
70
76
  }
71
77
  function removeFromPendingList(tx) {
72
78
  const index = transactions.findIndex((t) => t.id === tx.id);
@@ -1 +1 @@
1
- {"version":3,"file":"transactions.cjs","sources":["../../src/transactions.ts"],"sourcesContent":["import { createDeferred } from \"./deferred\"\nimport {\n MissingMutationFunctionError,\n TransactionAlreadyCompletedRollbackError,\n TransactionNotPendingCommitError,\n TransactionNotPendingMutateError,\n} from \"./errors\"\nimport type { Deferred } from \"./deferred\"\nimport type {\n MutationFn,\n PendingMutation,\n TransactionConfig,\n TransactionState,\n TransactionWithMutations,\n} from \"./types\"\n\nconst transactions: Array<Transaction<any>> = []\nlet transactionStack: Array<Transaction<any>> = []\n\nlet sequenceNumber = 0\n\n/**\n * Merges two pending mutations for the same item within a transaction\n *\n * Merge behavior truth table:\n * - (insert, update) → insert (merge changes, keep empty original)\n * - (insert, delete) → null (cancel both mutations)\n * - (update, delete) → delete (delete dominates)\n * - (update, update) → update (replace with latest, union changes)\n * - (delete, delete) → delete (replace with latest)\n * - (insert, insert) → insert (replace with latest)\n *\n * Note: (delete, update) and (delete, insert) should never occur as the collection\n * layer prevents operations on deleted items within the same transaction.\n *\n * @param existing - The existing mutation in the transaction\n * @param incoming - The new mutation being applied\n * @returns The merged mutation, or null if both should be removed\n */\nfunction mergePendingMutations<T extends object>(\n existing: PendingMutation<T>,\n incoming: PendingMutation<T>\n): PendingMutation<T> | null {\n // Truth table implementation\n switch (`${existing.type}-${incoming.type}` as const) {\n case `insert-update`: {\n // Update after insert: keep as insert but merge changes\n // For insert-update, the key should remain the same since collections don't allow key changes\n return {\n ...existing,\n type: `insert` as const,\n original: {},\n modified: incoming.modified,\n changes: { ...existing.changes, ...incoming.changes },\n // Keep existing keys (key changes not allowed in updates)\n key: existing.key,\n globalKey: existing.globalKey,\n // Merge metadata (last-write-wins)\n metadata: incoming.metadata ?? existing.metadata,\n syncMetadata: { ...existing.syncMetadata, ...incoming.syncMetadata },\n // Update tracking info\n mutationId: incoming.mutationId,\n updatedAt: incoming.updatedAt,\n }\n }\n\n case `insert-delete`:\n // Delete after insert: cancel both mutations\n return null\n\n case `update-delete`:\n // Delete after update: delete dominates\n return incoming\n\n case `update-update`: {\n // Update after update: replace with latest, union changes\n return {\n ...incoming,\n // Keep original from first update\n original: existing.original,\n // Union the changes from both updates\n changes: { ...existing.changes, ...incoming.changes },\n // Merge metadata\n metadata: incoming.metadata ?? existing.metadata,\n syncMetadata: { ...existing.syncMetadata, ...incoming.syncMetadata },\n }\n }\n\n case `delete-delete`:\n case `insert-insert`:\n // Same type: replace with latest\n return incoming\n\n default: {\n // Exhaustiveness check\n const _exhaustive: never = `${existing.type}-${incoming.type}` as never\n throw new Error(`Unhandled mutation combination: ${_exhaustive}`)\n }\n }\n}\n\n/**\n * Creates a new transaction for grouping multiple collection operations\n * @param config - Transaction configuration with mutation function\n * @returns A new Transaction instance\n * @example\n * // Basic transaction usage\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Send all mutations to API\n * await api.saveChanges(transaction.mutations)\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * collection.update(\"2\", draft => { draft.completed = true })\n * })\n *\n * await tx.isPersisted.promise\n *\n * @example\n * // Handle transaction errors\n * try {\n * const tx = createTransaction({\n * mutationFn: async () => { throw new Error(\"API failed\") }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"New item\" })\n * })\n *\n * await tx.isPersisted.promise\n * } catch (error) {\n * console.log('Transaction failed:', error)\n * }\n *\n * @example\n * // Manual commit control\n * const tx = createTransaction({\n * autoCommit: false,\n * mutationFn: async () => {\n * // API call\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * // Commit later\n * await tx.commit()\n */\nexport function createTransaction<T extends object = Record<string, unknown>>(\n config: TransactionConfig<T>\n): Transaction<T> {\n const newTransaction = new Transaction<T>(config)\n transactions.push(newTransaction)\n return newTransaction\n}\n\n/**\n * Gets the currently active ambient transaction, if any\n * Used internally by collection operations to join existing transactions\n * @returns The active transaction or undefined if none is active\n * @example\n * // Check if operations will join an ambient transaction\n * const ambientTx = getActiveTransaction()\n * if (ambientTx) {\n * console.log('Operations will join transaction:', ambientTx.id)\n * }\n */\nexport function getActiveTransaction(): Transaction | undefined {\n if (transactionStack.length > 0) {\n return transactionStack.slice(-1)[0]\n } else {\n return undefined\n }\n}\n\nfunction registerTransaction(tx: Transaction<any>) {\n transactionStack.push(tx)\n}\n\nfunction unregisterTransaction(tx: Transaction<any>) {\n transactionStack = transactionStack.filter((t) => t.id !== tx.id)\n}\n\nfunction removeFromPendingList(tx: Transaction<any>) {\n const index = transactions.findIndex((t) => t.id === tx.id)\n if (index !== -1) {\n transactions.splice(index, 1)\n }\n}\n\nclass Transaction<T extends object = Record<string, unknown>> {\n public id: string\n public state: TransactionState\n public mutationFn: MutationFn<T>\n public mutations: Array<PendingMutation<T>>\n public isPersisted: Deferred<Transaction<T>>\n public autoCommit: boolean\n public createdAt: Date\n public sequenceNumber: number\n public metadata: Record<string, unknown>\n public error?: {\n message: string\n error: Error\n }\n\n constructor(config: TransactionConfig<T>) {\n if (typeof config.mutationFn === `undefined`) {\n throw new MissingMutationFunctionError()\n }\n this.id = config.id ?? crypto.randomUUID()\n this.mutationFn = config.mutationFn\n this.state = `pending`\n this.mutations = []\n this.isPersisted = createDeferred<Transaction<T>>()\n this.autoCommit = config.autoCommit ?? true\n this.createdAt = new Date()\n this.sequenceNumber = sequenceNumber++\n this.metadata = config.metadata ?? {}\n }\n\n setState(newState: TransactionState) {\n this.state = newState\n\n if (newState === `completed` || newState === `failed`) {\n removeFromPendingList(this)\n }\n }\n\n /**\n * Execute collection operations within this transaction\n * @param callback - Function containing collection operations to group together\n * @returns This transaction for chaining\n * @example\n * // Group multiple operations\n * const tx = createTransaction({ mutationFn: async () => {\n * // Send to API\n * }})\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * collection.update(\"2\", draft => { draft.completed = true })\n * collection.delete(\"3\")\n * })\n *\n * await tx.isPersisted.promise\n *\n * @example\n * // Handle mutate errors\n * try {\n * tx.mutate(() => {\n * collection.insert({ id: \"invalid\" }) // This might throw\n * })\n * } catch (error) {\n * console.log('Mutation failed:', error)\n * }\n *\n * @example\n * // Manual commit control\n * const tx = createTransaction({ autoCommit: false, mutationFn: async () => {} })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * // Commit later when ready\n * await tx.commit()\n */\n mutate(callback: () => void): Transaction<T> {\n if (this.state !== `pending`) {\n throw new TransactionNotPendingMutateError()\n }\n\n registerTransaction(this)\n try {\n callback()\n } finally {\n unregisterTransaction(this)\n }\n\n if (this.autoCommit) {\n this.commit().catch(() => {\n // Errors from autoCommit are handled via isPersisted.promise\n // This catch prevents unhandled promise rejections\n })\n }\n\n return this\n }\n\n /**\n * Apply new mutations to this transaction, intelligently merging with existing mutations\n *\n * When mutations operate on the same item (same globalKey), they are merged according to\n * the following rules:\n *\n * - **insert + update** → insert (merge changes, keep empty original)\n * - **insert + delete** → removed (mutations cancel each other out)\n * - **update + delete** → delete (delete dominates)\n * - **update + update** → update (union changes, keep first original)\n * - **same type** → replace with latest\n *\n * This merging reduces over-the-wire churn and keeps the optimistic local view\n * aligned with user intent.\n *\n * @param mutations - Array of new mutations to apply\n */\n applyMutations(mutations: Array<PendingMutation<any>>): void {\n for (const newMutation of mutations) {\n const existingIndex = this.mutations.findIndex(\n (m) => m.globalKey === newMutation.globalKey\n )\n\n if (existingIndex >= 0) {\n const existingMutation = this.mutations[existingIndex]!\n const mergeResult = mergePendingMutations(existingMutation, newMutation)\n\n if (mergeResult === null) {\n // Remove the mutation (e.g., delete after insert cancels both)\n this.mutations.splice(existingIndex, 1)\n } else {\n // Replace with merged mutation\n this.mutations[existingIndex] = mergeResult\n }\n } else {\n // Insert new mutation\n this.mutations.push(newMutation)\n }\n }\n }\n\n /**\n * Rollback the transaction and any conflicting transactions\n * @param config - Configuration for rollback behavior\n * @returns This transaction for chaining\n * @example\n * // Manual rollback\n * const tx = createTransaction({ mutationFn: async () => {\n * // Send to API\n * }})\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * })\n *\n * // Rollback if needed\n * if (shouldCancel) {\n * tx.rollback()\n * }\n *\n * @example\n * // Handle rollback cascade (automatic)\n * const tx1 = createTransaction({ mutationFn: async () => {} })\n * const tx2 = createTransaction({ mutationFn: async () => {} })\n *\n * tx1.mutate(() => collection.update(\"1\", draft => { draft.value = \"A\" }))\n * tx2.mutate(() => collection.update(\"1\", draft => { draft.value = \"B\" })) // Same item\n *\n * tx1.rollback() // This will also rollback tx2 due to conflict\n *\n * @example\n * // Handle rollback in error scenarios\n * try {\n * await tx.isPersisted.promise\n * } catch (error) {\n * console.log('Transaction was rolled back:', error)\n * // Transaction automatically rolled back on mutation function failure\n * }\n */\n rollback(config?: { isSecondaryRollback?: boolean }): Transaction<T> {\n const isSecondaryRollback = config?.isSecondaryRollback ?? false\n if (this.state === `completed`) {\n throw new TransactionAlreadyCompletedRollbackError()\n }\n\n this.setState(`failed`)\n\n // See if there's any other transactions w/ mutations on the same ids\n // and roll them back as well.\n if (!isSecondaryRollback) {\n const mutationIds = new Set()\n this.mutations.forEach((m) => mutationIds.add(m.globalKey))\n for (const t of transactions) {\n t.state === `pending` &&\n t.mutations.some((m) => mutationIds.has(m.globalKey)) &&\n t.rollback({ isSecondaryRollback: true })\n }\n }\n\n // Reject the promise\n this.isPersisted.reject(this.error?.error)\n this.touchCollection()\n\n return this\n }\n\n // Tell collection that something has changed with the transaction\n touchCollection(): void {\n const hasCalled = new Set()\n for (const mutation of this.mutations) {\n if (!hasCalled.has(mutation.collection.id)) {\n mutation.collection._state.onTransactionStateChange()\n\n // Only call commitPendingTransactions if there are pending sync transactions\n if (mutation.collection._state.pendingSyncedTransactions.length > 0) {\n mutation.collection._state.commitPendingTransactions()\n }\n\n hasCalled.add(mutation.collection.id)\n }\n }\n }\n\n /**\n * Commit the transaction and execute the mutation function\n * @returns Promise that resolves to this transaction when complete\n * @example\n * // Manual commit (when autoCommit is false)\n * const tx = createTransaction({\n * autoCommit: false,\n * mutationFn: async ({ transaction }) => {\n * await api.saveChanges(transaction.mutations)\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * })\n *\n * await tx.commit() // Manually commit\n *\n * @example\n * // Handle commit errors\n * try {\n * const tx = createTransaction({\n * mutationFn: async () => { throw new Error(\"API failed\") }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * await tx.commit()\n * } catch (error) {\n * console.log('Commit failed, transaction rolled back:', error)\n * }\n *\n * @example\n * // Check transaction state after commit\n * await tx.commit()\n * console.log(tx.state) // \"completed\" or \"failed\"\n */\n async commit(): Promise<Transaction<T>> {\n if (this.state !== `pending`) {\n throw new TransactionNotPendingCommitError()\n }\n\n this.setState(`persisting`)\n\n if (this.mutations.length === 0) {\n this.setState(`completed`)\n this.isPersisted.resolve(this)\n\n return this\n }\n\n // Run mutationFn\n try {\n // At this point we know there's at least one mutation\n // We've already verified mutations is non-empty, so this cast is safe\n // Use a direct type assertion instead of object spreading to preserve the original type\n await this.mutationFn({\n transaction: this as unknown as TransactionWithMutations<T>,\n })\n\n this.setState(`completed`)\n this.touchCollection()\n\n this.isPersisted.resolve(this)\n } catch (error) {\n // Preserve the original error for rethrowing\n const originalError =\n error instanceof Error ? error : new Error(String(error))\n\n // Update transaction with error information\n this.error = {\n message: originalError.message,\n error: originalError,\n }\n\n // rollback the transaction\n this.rollback()\n\n // Re-throw the original error to preserve identity and stack\n throw originalError\n }\n\n return this\n }\n\n /**\n * Compare two transactions by their createdAt time and sequence number in order\n * to sort them in the order they were created.\n * @param other - The other transaction to compare to\n * @returns -1 if this transaction was created before the other, 1 if it was created after, 0 if they were created at the same time\n */\n compareCreatedAt(other: Transaction<any>): number {\n const createdAtComparison =\n this.createdAt.getTime() - other.createdAt.getTime()\n if (createdAtComparison !== 0) {\n return createdAtComparison\n }\n return this.sequenceNumber - other.sequenceNumber\n }\n}\n\nexport type { Transaction }\n"],"names":["MissingMutationFunctionError","createDeferred","TransactionNotPendingMutateError","TransactionAlreadyCompletedRollbackError","TransactionNotPendingCommitError"],"mappings":";;;;AAgBA,MAAM,eAAwC,CAAA;AAC9C,IAAI,mBAA4C,CAAA;AAEhD,IAAI,iBAAiB;AAoBrB,SAAS,sBACP,UACA,UAC2B;AAE3B,UAAQ,GAAG,SAAS,IAAI,IAAI,SAAS,IAAI,IAAA;AAAA,IACvC,KAAK,iBAAiB;AAGpB,aAAO;AAAA,QACL,GAAG;AAAA,QACH,MAAM;AAAA,QACN,UAAU,CAAA;AAAA,QACV,UAAU,SAAS;AAAA,QACnB,SAAS,EAAE,GAAG,SAAS,SAAS,GAAG,SAAS,QAAA;AAAA;AAAA,QAE5C,KAAK,SAAS;AAAA,QACd,WAAW,SAAS;AAAA;AAAA,QAEpB,UAAU,SAAS,YAAY,SAAS;AAAA,QACxC,cAAc,EAAE,GAAG,SAAS,cAAc,GAAG,SAAS,aAAA;AAAA;AAAA,QAEtD,YAAY,SAAS;AAAA,QACrB,WAAW,SAAS;AAAA,MAAA;AAAA,IAExB;AAAA,IAEA,KAAK;AAEH,aAAO;AAAA,IAET,KAAK;AAEH,aAAO;AAAA,IAET,KAAK,iBAAiB;AAEpB,aAAO;AAAA,QACL,GAAG;AAAA;AAAA,QAEH,UAAU,SAAS;AAAA;AAAA,QAEnB,SAAS,EAAE,GAAG,SAAS,SAAS,GAAG,SAAS,QAAA;AAAA;AAAA,QAE5C,UAAU,SAAS,YAAY,SAAS;AAAA,QACxC,cAAc,EAAE,GAAG,SAAS,cAAc,GAAG,SAAS,aAAA;AAAA,MAAa;AAAA,IAEvE;AAAA,IAEA,KAAK;AAAA,IACL,KAAK;AAEH,aAAO;AAAA,IAET,SAAS;AAEP,YAAM,cAAqB,GAAG,SAAS,IAAI,IAAI,SAAS,IAAI;AAC5D,YAAM,IAAI,MAAM,mCAAmC,WAAW,EAAE;AAAA,IAClE;AAAA,EAAA;AAEJ;AAsDO,SAAS,kBACd,QACgB;AAChB,QAAM,iBAAiB,IAAI,YAAe,MAAM;AAChD,eAAa,KAAK,cAAc;AAChC,SAAO;AACT;AAaO,SAAS,uBAAgD;AAC9D,MAAI,iBAAiB,SAAS,GAAG;AAC/B,WAAO,iBAAiB,MAAM,EAAE,EAAE,CAAC;AAAA,EACrC,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAEA,SAAS,oBAAoB,IAAsB;AACjD,mBAAiB,KAAK,EAAE;AAC1B;AAEA,SAAS,sBAAsB,IAAsB;AACnD,qBAAmB,iBAAiB,OAAO,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAClE;AAEA,SAAS,sBAAsB,IAAsB;AACnD,QAAM,QAAQ,aAAa,UAAU,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAC1D,MAAI,UAAU,IAAI;AAChB,iBAAa,OAAO,OAAO,CAAC;AAAA,EAC9B;AACF;AAEA,MAAM,YAAwD;AAAA,EAe5D,YAAY,QAA8B;AACxC,QAAI,OAAO,OAAO,eAAe,aAAa;AAC5C,YAAM,IAAIA,OAAAA,6BAAA;AAAA,IACZ;AACA,SAAK,KAAK,OAAO,MAAM,OAAO,WAAA;AAC9B,SAAK,aAAa,OAAO;AACzB,SAAK,QAAQ;AACb,SAAK,YAAY,CAAA;AACjB,SAAK,cAAcC,wBAAA;AACnB,SAAK,aAAa,OAAO,cAAc;AACvC,SAAK,gCAAgB,KAAA;AACrB,SAAK,iBAAiB;AACtB,SAAK,WAAW,OAAO,YAAY,CAAA;AAAA,EACrC;AAAA,EAEA,SAAS,UAA4B;AACnC,SAAK,QAAQ;AAEb,QAAI,aAAa,eAAe,aAAa,UAAU;AACrD,4BAAsB,IAAI;AAAA,IAC5B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyCA,OAAO,UAAsC;AAC3C,QAAI,KAAK,UAAU,WAAW;AAC5B,YAAM,IAAIC,OAAAA,iCAAA;AAAA,IACZ;AAEA,wBAAoB,IAAI;AACxB,QAAI;AACF,eAAA;AAAA,IACF,UAAA;AACE,4BAAsB,IAAI;AAAA,IAC5B;AAEA,QAAI,KAAK,YAAY;AACnB,WAAK,SAAS,MAAM,MAAM;AAAA,MAG1B,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmBA,eAAe,WAA8C;AAC3D,eAAW,eAAe,WAAW;AACnC,YAAM,gBAAgB,KAAK,UAAU;AAAA,QACnC,CAAC,MAAM,EAAE,cAAc,YAAY;AAAA,MAAA;AAGrC,UAAI,iBAAiB,GAAG;AACtB,cAAM,mBAAmB,KAAK,UAAU,aAAa;AACrD,cAAM,cAAc,sBAAsB,kBAAkB,WAAW;AAEvE,YAAI,gBAAgB,MAAM;AAExB,eAAK,UAAU,OAAO,eAAe,CAAC;AAAA,QACxC,OAAO;AAEL,eAAK,UAAU,aAAa,IAAI;AAAA,QAClC;AAAA,MACF,OAAO;AAEL,aAAK,UAAU,KAAK,WAAW;AAAA,MACjC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwCA,SAAS,QAA4D;AACnE,UAAM,sBAAsB,QAAQ,uBAAuB;AAC3D,QAAI,KAAK,UAAU,aAAa;AAC9B,YAAM,IAAIC,OAAAA,yCAAA;AAAA,IACZ;AAEA,SAAK,SAAS,QAAQ;AAItB,QAAI,CAAC,qBAAqB;AACxB,YAAM,kCAAkB,IAAA;AACxB,WAAK,UAAU,QAAQ,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC;AAC1D,iBAAW,KAAK,cAAc;AAC5B,UAAE,UAAU,aACV,EAAE,UAAU,KAAK,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC,KACpD,EAAE,SAAS,EAAE,qBAAqB,MAAM;AAAA,MAC5C;AAAA,IACF;AAGA,SAAK,YAAY,OAAO,KAAK,OAAO,KAAK;AACzC,SAAK,gBAAA;AAEL,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,kBAAwB;AACtB,UAAM,gCAAgB,IAAA;AACtB,eAAW,YAAY,KAAK,WAAW;AACrC,UAAI,CAAC,UAAU,IAAI,SAAS,WAAW,EAAE,GAAG;AAC1C,iBAAS,WAAW,OAAO,yBAAA;AAG3B,YAAI,SAAS,WAAW,OAAO,0BAA0B,SAAS,GAAG;AACnE,mBAAS,WAAW,OAAO,0BAAA;AAAA,QAC7B;AAEA,kBAAU,IAAI,SAAS,WAAW,EAAE;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyCA,MAAM,SAAkC;AACtC,QAAI,KAAK,UAAU,WAAW;AAC5B,YAAM,IAAIC,OAAAA,iCAAA;AAAA,IACZ;AAEA,SAAK,SAAS,YAAY;AAE1B,QAAI,KAAK,UAAU,WAAW,GAAG;AAC/B,WAAK,SAAS,WAAW;AACzB,WAAK,YAAY,QAAQ,IAAI;AAE7B,aAAO;AAAA,IACT;AAGA,QAAI;AAIF,YAAM,KAAK,WAAW;AAAA,QACpB,aAAa;AAAA,MAAA,CACd;AAED,WAAK,SAAS,WAAW;AACzB,WAAK,gBAAA;AAEL,WAAK,YAAY,QAAQ,IAAI;AAAA,IAC/B,SAAS,OAAO;AAEd,YAAM,gBACJ,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAG1D,WAAK,QAAQ;AAAA,QACX,SAAS,cAAc;AAAA,QACvB,OAAO;AAAA,MAAA;AAIT,WAAK,SAAA;AAGL,YAAM;AAAA,IACR;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,iBAAiB,OAAiC;AAChD,UAAM,sBACJ,KAAK,UAAU,YAAY,MAAM,UAAU,QAAA;AAC7C,QAAI,wBAAwB,GAAG;AAC7B,aAAO;AAAA,IACT;AACA,WAAO,KAAK,iBAAiB,MAAM;AAAA,EACrC;AACF;;;"}
1
+ {"version":3,"file":"transactions.cjs","sources":["../../src/transactions.ts"],"sourcesContent":["import { createDeferred } from \"./deferred\"\nimport {\n MissingMutationFunctionError,\n TransactionAlreadyCompletedRollbackError,\n TransactionNotPendingCommitError,\n TransactionNotPendingMutateError,\n} from \"./errors\"\nimport { transactionScopedScheduler } from \"./scheduler.js\"\nimport type { Deferred } from \"./deferred\"\nimport type {\n MutationFn,\n PendingMutation,\n TransactionConfig,\n TransactionState,\n TransactionWithMutations,\n} from \"./types\"\n\nconst transactions: Array<Transaction<any>> = []\nlet transactionStack: Array<Transaction<any>> = []\n\nlet sequenceNumber = 0\n\n/**\n * Merges two pending mutations for the same item within a transaction\n *\n * Merge behavior truth table:\n * - (insert, update) → insert (merge changes, keep empty original)\n * - (insert, delete) → null (cancel both mutations)\n * - (update, delete) → delete (delete dominates)\n * - (update, update) → update (replace with latest, union changes)\n * - (delete, delete) → delete (replace with latest)\n * - (insert, insert) → insert (replace with latest)\n *\n * Note: (delete, update) and (delete, insert) should never occur as the collection\n * layer prevents operations on deleted items within the same transaction.\n *\n * @param existing - The existing mutation in the transaction\n * @param incoming - The new mutation being applied\n * @returns The merged mutation, or null if both should be removed\n */\nfunction mergePendingMutations<T extends object>(\n existing: PendingMutation<T>,\n incoming: PendingMutation<T>\n): PendingMutation<T> | null {\n // Truth table implementation\n switch (`${existing.type}-${incoming.type}` as const) {\n case `insert-update`: {\n // Update after insert: keep as insert but merge changes\n // For insert-update, the key should remain the same since collections don't allow key changes\n return {\n ...existing,\n type: `insert` as const,\n original: {},\n modified: incoming.modified,\n changes: { ...existing.changes, ...incoming.changes },\n // Keep existing keys (key changes not allowed in updates)\n key: existing.key,\n globalKey: existing.globalKey,\n // Merge metadata (last-write-wins)\n metadata: incoming.metadata ?? existing.metadata,\n syncMetadata: { ...existing.syncMetadata, ...incoming.syncMetadata },\n // Update tracking info\n mutationId: incoming.mutationId,\n updatedAt: incoming.updatedAt,\n }\n }\n\n case `insert-delete`:\n // Delete after insert: cancel both mutations\n return null\n\n case `update-delete`:\n // Delete after update: delete dominates\n return incoming\n\n case `update-update`: {\n // Update after update: replace with latest, union changes\n return {\n ...incoming,\n // Keep original from first update\n original: existing.original,\n // Union the changes from both updates\n changes: { ...existing.changes, ...incoming.changes },\n // Merge metadata\n metadata: incoming.metadata ?? existing.metadata,\n syncMetadata: { ...existing.syncMetadata, ...incoming.syncMetadata },\n }\n }\n\n case `delete-delete`:\n case `insert-insert`:\n // Same type: replace with latest\n return incoming\n\n default: {\n // Exhaustiveness check\n const _exhaustive: never = `${existing.type}-${incoming.type}` as never\n throw new Error(`Unhandled mutation combination: ${_exhaustive}`)\n }\n }\n}\n\n/**\n * Creates a new transaction for grouping multiple collection operations\n * @param config - Transaction configuration with mutation function\n * @returns A new Transaction instance\n * @example\n * // Basic transaction usage\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Send all mutations to API\n * await api.saveChanges(transaction.mutations)\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * collection.update(\"2\", draft => { draft.completed = true })\n * })\n *\n * await tx.isPersisted.promise\n *\n * @example\n * // Handle transaction errors\n * try {\n * const tx = createTransaction({\n * mutationFn: async () => { throw new Error(\"API failed\") }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"New item\" })\n * })\n *\n * await tx.isPersisted.promise\n * } catch (error) {\n * console.log('Transaction failed:', error)\n * }\n *\n * @example\n * // Manual commit control\n * const tx = createTransaction({\n * autoCommit: false,\n * mutationFn: async () => {\n * // API call\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * // Commit later\n * await tx.commit()\n */\nexport function createTransaction<T extends object = Record<string, unknown>>(\n config: TransactionConfig<T>\n): Transaction<T> {\n const newTransaction = new Transaction<T>(config)\n transactions.push(newTransaction)\n return newTransaction\n}\n\n/**\n * Gets the currently active ambient transaction, if any\n * Used internally by collection operations to join existing transactions\n * @returns The active transaction or undefined if none is active\n * @example\n * // Check if operations will join an ambient transaction\n * const ambientTx = getActiveTransaction()\n * if (ambientTx) {\n * console.log('Operations will join transaction:', ambientTx.id)\n * }\n */\nexport function getActiveTransaction(): Transaction | undefined {\n if (transactionStack.length > 0) {\n return transactionStack.slice(-1)[0]\n } else {\n return undefined\n }\n}\n\nfunction registerTransaction(tx: Transaction<any>) {\n // Clear any stale work that may have been left behind if a previous mutate\n // scope aborted before we could flush.\n transactionScopedScheduler.clear(tx.id)\n transactionStack.push(tx)\n}\n\nfunction unregisterTransaction(tx: Transaction<any>) {\n // Always flush pending work for this transaction before removing it from\n // the ambient stack – this runs even if the mutate callback throws.\n // If flush throws (e.g., due to a job error), we still clean up the stack.\n try {\n transactionScopedScheduler.flush(tx.id)\n } finally {\n transactionStack = transactionStack.filter((t) => t.id !== tx.id)\n }\n}\n\nfunction removeFromPendingList(tx: Transaction<any>) {\n const index = transactions.findIndex((t) => t.id === tx.id)\n if (index !== -1) {\n transactions.splice(index, 1)\n }\n}\n\nclass Transaction<T extends object = Record<string, unknown>> {\n public id: string\n public state: TransactionState\n public mutationFn: MutationFn<T>\n public mutations: Array<PendingMutation<T>>\n public isPersisted: Deferred<Transaction<T>>\n public autoCommit: boolean\n public createdAt: Date\n public sequenceNumber: number\n public metadata: Record<string, unknown>\n public error?: {\n message: string\n error: Error\n }\n\n constructor(config: TransactionConfig<T>) {\n if (typeof config.mutationFn === `undefined`) {\n throw new MissingMutationFunctionError()\n }\n this.id = config.id ?? crypto.randomUUID()\n this.mutationFn = config.mutationFn\n this.state = `pending`\n this.mutations = []\n this.isPersisted = createDeferred<Transaction<T>>()\n this.autoCommit = config.autoCommit ?? true\n this.createdAt = new Date()\n this.sequenceNumber = sequenceNumber++\n this.metadata = config.metadata ?? {}\n }\n\n setState(newState: TransactionState) {\n this.state = newState\n\n if (newState === `completed` || newState === `failed`) {\n removeFromPendingList(this)\n }\n }\n\n /**\n * Execute collection operations within this transaction\n * @param callback - Function containing collection operations to group together\n * @returns This transaction for chaining\n * @example\n * // Group multiple operations\n * const tx = createTransaction({ mutationFn: async () => {\n * // Send to API\n * }})\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * collection.update(\"2\", draft => { draft.completed = true })\n * collection.delete(\"3\")\n * })\n *\n * await tx.isPersisted.promise\n *\n * @example\n * // Handle mutate errors\n * try {\n * tx.mutate(() => {\n * collection.insert({ id: \"invalid\" }) // This might throw\n * })\n * } catch (error) {\n * console.log('Mutation failed:', error)\n * }\n *\n * @example\n * // Manual commit control\n * const tx = createTransaction({ autoCommit: false, mutationFn: async () => {} })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * // Commit later when ready\n * await tx.commit()\n */\n mutate(callback: () => void): Transaction<T> {\n if (this.state !== `pending`) {\n throw new TransactionNotPendingMutateError()\n }\n\n registerTransaction(this)\n try {\n callback()\n } finally {\n unregisterTransaction(this)\n }\n\n if (this.autoCommit) {\n this.commit().catch(() => {\n // Errors from autoCommit are handled via isPersisted.promise\n // This catch prevents unhandled promise rejections\n })\n }\n\n return this\n }\n\n /**\n * Apply new mutations to this transaction, intelligently merging with existing mutations\n *\n * When mutations operate on the same item (same globalKey), they are merged according to\n * the following rules:\n *\n * - **insert + update** → insert (merge changes, keep empty original)\n * - **insert + delete** → removed (mutations cancel each other out)\n * - **update + delete** → delete (delete dominates)\n * - **update + update** → update (union changes, keep first original)\n * - **same type** → replace with latest\n *\n * This merging reduces over-the-wire churn and keeps the optimistic local view\n * aligned with user intent.\n *\n * @param mutations - Array of new mutations to apply\n */\n applyMutations(mutations: Array<PendingMutation<any>>): void {\n for (const newMutation of mutations) {\n const existingIndex = this.mutations.findIndex(\n (m) => m.globalKey === newMutation.globalKey\n )\n\n if (existingIndex >= 0) {\n const existingMutation = this.mutations[existingIndex]!\n const mergeResult = mergePendingMutations(existingMutation, newMutation)\n\n if (mergeResult === null) {\n // Remove the mutation (e.g., delete after insert cancels both)\n this.mutations.splice(existingIndex, 1)\n } else {\n // Replace with merged mutation\n this.mutations[existingIndex] = mergeResult\n }\n } else {\n // Insert new mutation\n this.mutations.push(newMutation)\n }\n }\n }\n\n /**\n * Rollback the transaction and any conflicting transactions\n * @param config - Configuration for rollback behavior\n * @returns This transaction for chaining\n * @example\n * // Manual rollback\n * const tx = createTransaction({ mutationFn: async () => {\n * // Send to API\n * }})\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * })\n *\n * // Rollback if needed\n * if (shouldCancel) {\n * tx.rollback()\n * }\n *\n * @example\n * // Handle rollback cascade (automatic)\n * const tx1 = createTransaction({ mutationFn: async () => {} })\n * const tx2 = createTransaction({ mutationFn: async () => {} })\n *\n * tx1.mutate(() => collection.update(\"1\", draft => { draft.value = \"A\" }))\n * tx2.mutate(() => collection.update(\"1\", draft => { draft.value = \"B\" })) // Same item\n *\n * tx1.rollback() // This will also rollback tx2 due to conflict\n *\n * @example\n * // Handle rollback in error scenarios\n * try {\n * await tx.isPersisted.promise\n * } catch (error) {\n * console.log('Transaction was rolled back:', error)\n * // Transaction automatically rolled back on mutation function failure\n * }\n */\n rollback(config?: { isSecondaryRollback?: boolean }): Transaction<T> {\n const isSecondaryRollback = config?.isSecondaryRollback ?? false\n if (this.state === `completed`) {\n throw new TransactionAlreadyCompletedRollbackError()\n }\n\n this.setState(`failed`)\n\n // See if there's any other transactions w/ mutations on the same ids\n // and roll them back as well.\n if (!isSecondaryRollback) {\n const mutationIds = new Set()\n this.mutations.forEach((m) => mutationIds.add(m.globalKey))\n for (const t of transactions) {\n t.state === `pending` &&\n t.mutations.some((m) => mutationIds.has(m.globalKey)) &&\n t.rollback({ isSecondaryRollback: true })\n }\n }\n\n // Reject the promise\n this.isPersisted.reject(this.error?.error)\n this.touchCollection()\n\n return this\n }\n\n // Tell collection that something has changed with the transaction\n touchCollection(): void {\n const hasCalled = new Set()\n for (const mutation of this.mutations) {\n if (!hasCalled.has(mutation.collection.id)) {\n mutation.collection._state.onTransactionStateChange()\n\n // Only call commitPendingTransactions if there are pending sync transactions\n if (mutation.collection._state.pendingSyncedTransactions.length > 0) {\n mutation.collection._state.commitPendingTransactions()\n }\n\n hasCalled.add(mutation.collection.id)\n }\n }\n }\n\n /**\n * Commit the transaction and execute the mutation function\n * @returns Promise that resolves to this transaction when complete\n * @example\n * // Manual commit (when autoCommit is false)\n * const tx = createTransaction({\n * autoCommit: false,\n * mutationFn: async ({ transaction }) => {\n * await api.saveChanges(transaction.mutations)\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * })\n *\n * await tx.commit() // Manually commit\n *\n * @example\n * // Handle commit errors\n * try {\n * const tx = createTransaction({\n * mutationFn: async () => { throw new Error(\"API failed\") }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * await tx.commit()\n * } catch (error) {\n * console.log('Commit failed, transaction rolled back:', error)\n * }\n *\n * @example\n * // Check transaction state after commit\n * await tx.commit()\n * console.log(tx.state) // \"completed\" or \"failed\"\n */\n async commit(): Promise<Transaction<T>> {\n if (this.state !== `pending`) {\n throw new TransactionNotPendingCommitError()\n }\n\n this.setState(`persisting`)\n\n if (this.mutations.length === 0) {\n this.setState(`completed`)\n this.isPersisted.resolve(this)\n\n return this\n }\n\n // Run mutationFn\n try {\n // At this point we know there's at least one mutation\n // We've already verified mutations is non-empty, so this cast is safe\n // Use a direct type assertion instead of object spreading to preserve the original type\n await this.mutationFn({\n transaction: this as unknown as TransactionWithMutations<T>,\n })\n\n this.setState(`completed`)\n this.touchCollection()\n\n this.isPersisted.resolve(this)\n } catch (error) {\n // Preserve the original error for rethrowing\n const originalError =\n error instanceof Error ? error : new Error(String(error))\n\n // Update transaction with error information\n this.error = {\n message: originalError.message,\n error: originalError,\n }\n\n // rollback the transaction\n this.rollback()\n\n // Re-throw the original error to preserve identity and stack\n throw originalError\n }\n\n return this\n }\n\n /**\n * Compare two transactions by their createdAt time and sequence number in order\n * to sort them in the order they were created.\n * @param other - The other transaction to compare to\n * @returns -1 if this transaction was created before the other, 1 if it was created after, 0 if they were created at the same time\n */\n compareCreatedAt(other: Transaction<any>): number {\n const createdAtComparison =\n this.createdAt.getTime() - other.createdAt.getTime()\n if (createdAtComparison !== 0) {\n return createdAtComparison\n }\n return this.sequenceNumber - other.sequenceNumber\n }\n}\n\nexport type { Transaction }\n"],"names":["transactionScopedScheduler","MissingMutationFunctionError","createDeferred","TransactionNotPendingMutateError","TransactionAlreadyCompletedRollbackError","TransactionNotPendingCommitError"],"mappings":";;;;;AAiBA,MAAM,eAAwC,CAAA;AAC9C,IAAI,mBAA4C,CAAA;AAEhD,IAAI,iBAAiB;AAoBrB,SAAS,sBACP,UACA,UAC2B;AAE3B,UAAQ,GAAG,SAAS,IAAI,IAAI,SAAS,IAAI,IAAA;AAAA,IACvC,KAAK,iBAAiB;AAGpB,aAAO;AAAA,QACL,GAAG;AAAA,QACH,MAAM;AAAA,QACN,UAAU,CAAA;AAAA,QACV,UAAU,SAAS;AAAA,QACnB,SAAS,EAAE,GAAG,SAAS,SAAS,GAAG,SAAS,QAAA;AAAA;AAAA,QAE5C,KAAK,SAAS;AAAA,QACd,WAAW,SAAS;AAAA;AAAA,QAEpB,UAAU,SAAS,YAAY,SAAS;AAAA,QACxC,cAAc,EAAE,GAAG,SAAS,cAAc,GAAG,SAAS,aAAA;AAAA;AAAA,QAEtD,YAAY,SAAS;AAAA,QACrB,WAAW,SAAS;AAAA,MAAA;AAAA,IAExB;AAAA,IAEA,KAAK;AAEH,aAAO;AAAA,IAET,KAAK;AAEH,aAAO;AAAA,IAET,KAAK,iBAAiB;AAEpB,aAAO;AAAA,QACL,GAAG;AAAA;AAAA,QAEH,UAAU,SAAS;AAAA;AAAA,QAEnB,SAAS,EAAE,GAAG,SAAS,SAAS,GAAG,SAAS,QAAA;AAAA;AAAA,QAE5C,UAAU,SAAS,YAAY,SAAS;AAAA,QACxC,cAAc,EAAE,GAAG,SAAS,cAAc,GAAG,SAAS,aAAA;AAAA,MAAa;AAAA,IAEvE;AAAA,IAEA,KAAK;AAAA,IACL,KAAK;AAEH,aAAO;AAAA,IAET,SAAS;AAEP,YAAM,cAAqB,GAAG,SAAS,IAAI,IAAI,SAAS,IAAI;AAC5D,YAAM,IAAI,MAAM,mCAAmC,WAAW,EAAE;AAAA,IAClE;AAAA,EAAA;AAEJ;AAsDO,SAAS,kBACd,QACgB;AAChB,QAAM,iBAAiB,IAAI,YAAe,MAAM;AAChD,eAAa,KAAK,cAAc;AAChC,SAAO;AACT;AAaO,SAAS,uBAAgD;AAC9D,MAAI,iBAAiB,SAAS,GAAG;AAC/B,WAAO,iBAAiB,MAAM,EAAE,EAAE,CAAC;AAAA,EACrC,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAEA,SAAS,oBAAoB,IAAsB;AAGjDA,uCAA2B,MAAM,GAAG,EAAE;AACtC,mBAAiB,KAAK,EAAE;AAC1B;AAEA,SAAS,sBAAsB,IAAsB;AAInD,MAAI;AACFA,yCAA2B,MAAM,GAAG,EAAE;AAAA,EACxC,UAAA;AACE,uBAAmB,iBAAiB,OAAO,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAAA,EAClE;AACF;AAEA,SAAS,sBAAsB,IAAsB;AACnD,QAAM,QAAQ,aAAa,UAAU,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAC1D,MAAI,UAAU,IAAI;AAChB,iBAAa,OAAO,OAAO,CAAC;AAAA,EAC9B;AACF;AAEA,MAAM,YAAwD;AAAA,EAe5D,YAAY,QAA8B;AACxC,QAAI,OAAO,OAAO,eAAe,aAAa;AAC5C,YAAM,IAAIC,OAAAA,6BAAA;AAAA,IACZ;AACA,SAAK,KAAK,OAAO,MAAM,OAAO,WAAA;AAC9B,SAAK,aAAa,OAAO;AACzB,SAAK,QAAQ;AACb,SAAK,YAAY,CAAA;AACjB,SAAK,cAAcC,wBAAA;AACnB,SAAK,aAAa,OAAO,cAAc;AACvC,SAAK,gCAAgB,KAAA;AACrB,SAAK,iBAAiB;AACtB,SAAK,WAAW,OAAO,YAAY,CAAA;AAAA,EACrC;AAAA,EAEA,SAAS,UAA4B;AACnC,SAAK,QAAQ;AAEb,QAAI,aAAa,eAAe,aAAa,UAAU;AACrD,4BAAsB,IAAI;AAAA,IAC5B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyCA,OAAO,UAAsC;AAC3C,QAAI,KAAK,UAAU,WAAW;AAC5B,YAAM,IAAIC,OAAAA,iCAAA;AAAA,IACZ;AAEA,wBAAoB,IAAI;AACxB,QAAI;AACF,eAAA;AAAA,IACF,UAAA;AACE,4BAAsB,IAAI;AAAA,IAC5B;AAEA,QAAI,KAAK,YAAY;AACnB,WAAK,SAAS,MAAM,MAAM;AAAA,MAG1B,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmBA,eAAe,WAA8C;AAC3D,eAAW,eAAe,WAAW;AACnC,YAAM,gBAAgB,KAAK,UAAU;AAAA,QACnC,CAAC,MAAM,EAAE,cAAc,YAAY;AAAA,MAAA;AAGrC,UAAI,iBAAiB,GAAG;AACtB,cAAM,mBAAmB,KAAK,UAAU,aAAa;AACrD,cAAM,cAAc,sBAAsB,kBAAkB,WAAW;AAEvE,YAAI,gBAAgB,MAAM;AAExB,eAAK,UAAU,OAAO,eAAe,CAAC;AAAA,QACxC,OAAO;AAEL,eAAK,UAAU,aAAa,IAAI;AAAA,QAClC;AAAA,MACF,OAAO;AAEL,aAAK,UAAU,KAAK,WAAW;AAAA,MACjC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwCA,SAAS,QAA4D;AACnE,UAAM,sBAAsB,QAAQ,uBAAuB;AAC3D,QAAI,KAAK,UAAU,aAAa;AAC9B,YAAM,IAAIC,OAAAA,yCAAA;AAAA,IACZ;AAEA,SAAK,SAAS,QAAQ;AAItB,QAAI,CAAC,qBAAqB;AACxB,YAAM,kCAAkB,IAAA;AACxB,WAAK,UAAU,QAAQ,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC;AAC1D,iBAAW,KAAK,cAAc;AAC5B,UAAE,UAAU,aACV,EAAE,UAAU,KAAK,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC,KACpD,EAAE,SAAS,EAAE,qBAAqB,MAAM;AAAA,MAC5C;AAAA,IACF;AAGA,SAAK,YAAY,OAAO,KAAK,OAAO,KAAK;AACzC,SAAK,gBAAA;AAEL,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,kBAAwB;AACtB,UAAM,gCAAgB,IAAA;AACtB,eAAW,YAAY,KAAK,WAAW;AACrC,UAAI,CAAC,UAAU,IAAI,SAAS,WAAW,EAAE,GAAG;AAC1C,iBAAS,WAAW,OAAO,yBAAA;AAG3B,YAAI,SAAS,WAAW,OAAO,0BAA0B,SAAS,GAAG;AACnE,mBAAS,WAAW,OAAO,0BAAA;AAAA,QAC7B;AAEA,kBAAU,IAAI,SAAS,WAAW,EAAE;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyCA,MAAM,SAAkC;AACtC,QAAI,KAAK,UAAU,WAAW;AAC5B,YAAM,IAAIC,OAAAA,iCAAA;AAAA,IACZ;AAEA,SAAK,SAAS,YAAY;AAE1B,QAAI,KAAK,UAAU,WAAW,GAAG;AAC/B,WAAK,SAAS,WAAW;AACzB,WAAK,YAAY,QAAQ,IAAI;AAE7B,aAAO;AAAA,IACT;AAGA,QAAI;AAIF,YAAM,KAAK,WAAW;AAAA,QACpB,aAAa;AAAA,MAAA,CACd;AAED,WAAK,SAAS,WAAW;AACzB,WAAK,gBAAA;AAEL,WAAK,YAAY,QAAQ,IAAI;AAAA,IAC/B,SAAS,OAAO;AAEd,YAAM,gBACJ,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAG1D,WAAK,QAAQ;AAAA,QACX,SAAS,cAAc;AAAA,QACvB,OAAO;AAAA,MAAA;AAIT,WAAK,SAAA;AAGL,YAAM;AAAA,IACR;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,iBAAiB,OAAiC;AAChD,UAAM,sBACJ,KAAK,UAAU,YAAY,MAAM,UAAU,QAAA;AAC7C,QAAI,wBAAwB,GAAG;AAC7B,aAAO;AAAA,IACT;AACA,WAAO,KAAK,iBAAiB,MAAM;AAAA,EACrC;AACF;;;"}
@@ -144,8 +144,12 @@ export declare class HavingRequiresGroupByError extends QueryCompilationError {
144
144
  export declare class LimitOffsetRequireOrderByError extends QueryCompilationError {
145
145
  constructor();
146
146
  }
147
+ /**
148
+ * Error thrown when a collection input stream is not found during query compilation.
149
+ * In self-joins, each alias (e.g., 'employee', 'manager') requires its own input stream.
150
+ */
147
151
  export declare class CollectionInputNotFoundError extends QueryCompilationError {
148
- constructor(collectionId: string);
152
+ constructor(alias: string, collectionId?: string, availableKeys?: Array<string>);
149
153
  }
150
154
  export declare class UnsupportedFromTypeError extends QueryCompilationError {
151
155
  constructor(type: string);
@@ -168,17 +172,17 @@ export declare class JoinError extends TanStackDBError {
168
172
  export declare class UnsupportedJoinTypeError extends JoinError {
169
173
  constructor(joinType: string);
170
174
  }
171
- export declare class InvalidJoinConditionSameTableError extends JoinError {
172
- constructor(tableAlias: string);
175
+ export declare class InvalidJoinConditionSameSourceError extends JoinError {
176
+ constructor(sourceAlias: string);
173
177
  }
174
- export declare class InvalidJoinConditionTableMismatchError extends JoinError {
178
+ export declare class InvalidJoinConditionSourceMismatchError extends JoinError {
175
179
  constructor();
176
180
  }
177
- export declare class InvalidJoinConditionLeftTableError extends JoinError {
178
- constructor(tableAlias: string);
181
+ export declare class InvalidJoinConditionLeftSourceError extends JoinError {
182
+ constructor(sourceAlias: string);
179
183
  }
180
- export declare class InvalidJoinConditionRightTableError extends JoinError {
181
- constructor(tableAlias: string);
184
+ export declare class InvalidJoinConditionRightSourceError extends JoinError {
185
+ constructor(sourceAlias: string);
182
186
  }
183
187
  export declare class InvalidJoinCondition extends JoinError {
184
188
  constructor();
@@ -234,3 +238,29 @@ export declare class QueryOptimizerError extends TanStackDBError {
234
238
  export declare class CannotCombineEmptyExpressionListError extends QueryOptimizerError {
235
239
  constructor();
236
240
  }
241
+ /**
242
+ * Internal error when the query optimizer fails to convert a WHERE clause to a collection filter.
243
+ */
244
+ export declare class WhereClauseConversionError extends QueryOptimizerError {
245
+ constructor(collectionId: string, alias: string);
246
+ }
247
+ /**
248
+ * Error when a subscription cannot be found during lazy join processing.
249
+ * For subqueries, aliases may be remapped (e.g., 'activeUser' → 'user').
250
+ */
251
+ export declare class SubscriptionNotFoundError extends QueryCompilationError {
252
+ constructor(resolvedAlias: string, originalAlias: string, collectionId: string, availableAliases: Array<string>);
253
+ }
254
+ /**
255
+ * Error thrown when aggregate expressions are used outside of a GROUP BY context.
256
+ */
257
+ export declare class AggregateNotSupportedError extends QueryCompilationError {
258
+ constructor();
259
+ }
260
+ /**
261
+ * Internal error when the compiler returns aliases that don't have corresponding input streams.
262
+ * This should never happen since all aliases come from user declarations.
263
+ */
264
+ export declare class MissingAliasInputsError extends QueryCompilationError {
265
+ constructor(missingAliases: Array<string>);
266
+ }
@@ -279,8 +279,10 @@ class LimitOffsetRequireOrderByError extends QueryCompilationError {
279
279
  }
280
280
  }
281
281
  class CollectionInputNotFoundError extends QueryCompilationError {
282
- constructor(collectionId) {
283
- super(`Input for collection "${collectionId}" not found in inputs map`);
282
+ constructor(alias, collectionId, availableKeys) {
283
+ const details = collectionId ? `alias "${alias}" (collection "${collectionId}")` : `collection "${alias}"`;
284
+ const availableKeysMsg = availableKeys?.length ? `. Available keys: ${availableKeys.join(`, `)}` : ``;
285
+ super(`Input for ${details} not found in inputs map${availableKeysMsg}`);
284
286
  }
285
287
  }
286
288
  class UnsupportedFromTypeError extends QueryCompilationError {
@@ -319,29 +321,29 @@ class UnsupportedJoinTypeError extends JoinError {
319
321
  super(`Unsupported join type: ${joinType}`);
320
322
  }
321
323
  }
322
- class InvalidJoinConditionSameTableError extends JoinError {
323
- constructor(tableAlias) {
324
+ class InvalidJoinConditionSameSourceError extends JoinError {
325
+ constructor(sourceAlias) {
324
326
  super(
325
- `Invalid join condition: both expressions refer to the same table "${tableAlias}"`
327
+ `Invalid join condition: both expressions refer to the same source "${sourceAlias}"`
326
328
  );
327
329
  }
328
330
  }
329
- class InvalidJoinConditionTableMismatchError extends JoinError {
331
+ class InvalidJoinConditionSourceMismatchError extends JoinError {
330
332
  constructor() {
331
- super(`Invalid join condition: expressions must reference table aliases`);
333
+ super(`Invalid join condition: expressions must reference source aliases`);
332
334
  }
333
335
  }
334
- class InvalidJoinConditionLeftTableError extends JoinError {
335
- constructor(tableAlias) {
336
+ class InvalidJoinConditionLeftSourceError extends JoinError {
337
+ constructor(sourceAlias) {
336
338
  super(
337
- `Invalid join condition: left expression refers to an unavailable table "${tableAlias}"`
339
+ `Invalid join condition: left expression refers to an unavailable source "${sourceAlias}"`
338
340
  );
339
341
  }
340
342
  }
341
- class InvalidJoinConditionRightTableError extends JoinError {
342
- constructor(tableAlias) {
343
+ class InvalidJoinConditionRightSourceError extends JoinError {
344
+ constructor(sourceAlias) {
343
345
  super(
344
- `Invalid join condition: right expression does not refer to the joined table "${tableAlias}"`
346
+ `Invalid join condition: right expression does not refer to the joined source "${sourceAlias}"`
345
347
  );
346
348
  }
347
349
  }
@@ -457,8 +459,37 @@ class CannotCombineEmptyExpressionListError extends QueryOptimizerError {
457
459
  super(`Cannot combine empty expression list`);
458
460
  }
459
461
  }
462
+ class WhereClauseConversionError extends QueryOptimizerError {
463
+ constructor(collectionId, alias) {
464
+ super(
465
+ `Failed to convert WHERE clause to collection filter for collection '${collectionId}' alias '${alias}'. This indicates a bug in the query optimization logic.`
466
+ );
467
+ }
468
+ }
469
+ class SubscriptionNotFoundError extends QueryCompilationError {
470
+ constructor(resolvedAlias, originalAlias, collectionId, availableAliases) {
471
+ super(
472
+ `Internal error: subscription for alias '${resolvedAlias}' (remapped from '${originalAlias}', collection '${collectionId}') is missing in join pipeline. Available aliases: ${availableAliases.join(`, `)}. This indicates a bug in alias tracking.`
473
+ );
474
+ }
475
+ }
476
+ class AggregateNotSupportedError extends QueryCompilationError {
477
+ constructor() {
478
+ super(
479
+ `Aggregate expressions are not supported in this context. Use GROUP BY clause for aggregates.`
480
+ );
481
+ }
482
+ }
483
+ class MissingAliasInputsError extends QueryCompilationError {
484
+ constructor(missingAliases) {
485
+ super(
486
+ `Internal error: compiler returned aliases without inputs: ${missingAliases.join(`, `)}. This indicates a bug in query compilation. Please report this issue.`
487
+ );
488
+ }
489
+ }
460
490
  export {
461
491
  AggregateFunctionNotInSelectError,
492
+ AggregateNotSupportedError,
462
493
  CannotCombineEmptyExpressionListError,
463
494
  CollectionConfigurationError,
464
495
  CollectionInErrorStateError,
@@ -477,10 +508,10 @@ export {
477
508
  HavingRequiresGroupByError,
478
509
  InvalidCollectionStatusTransitionError,
479
510
  InvalidJoinCondition,
480
- InvalidJoinConditionLeftTableError,
481
- InvalidJoinConditionRightTableError,
482
- InvalidJoinConditionSameTableError,
483
- InvalidJoinConditionTableMismatchError,
511
+ InvalidJoinConditionLeftSourceError,
512
+ InvalidJoinConditionRightSourceError,
513
+ InvalidJoinConditionSameSourceError,
514
+ InvalidJoinConditionSourceMismatchError,
484
515
  InvalidSchemaError,
485
516
  InvalidSourceError,
486
517
  InvalidStorageDataFormatError,
@@ -491,6 +522,7 @@ export {
491
522
  KeyUpdateNotAllowedError,
492
523
  LimitOffsetRequireOrderByError,
493
524
  LocalStorageCollectionError,
525
+ MissingAliasInputsError,
494
526
  MissingDeleteHandlerError,
495
527
  MissingHandlerError,
496
528
  MissingInsertHandlerError,
@@ -517,6 +549,7 @@ export {
517
549
  StorageError,
518
550
  StorageKeyRequiredError,
519
551
  SubQueryMustHaveFromClauseError,
552
+ SubscriptionNotFoundError,
520
553
  SyncCleanupError,
521
554
  SyncTransactionAlreadyCommittedError,
522
555
  SyncTransactionAlreadyCommittedWriteError,
@@ -533,6 +566,7 @@ export {
533
566
  UnsupportedFromTypeError,
534
567
  UnsupportedJoinSourceTypeError,
535
568
  UnsupportedJoinTypeError,
536
- UpdateKeyNotFoundError
569
+ UpdateKeyNotFoundError,
570
+ WhereClauseConversionError
537
571
  };
538
572
  //# sourceMappingURL=errors.js.map