dalila 1.4.2 → 1.4.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/context/auto-scope.d.ts +167 -0
- package/dist/context/auto-scope.js +381 -0
- package/dist/context/context.d.ts +111 -0
- package/dist/context/context.js +283 -0
- package/dist/context/index.d.ts +2 -0
- package/dist/context/index.js +2 -0
- package/dist/context/raw.d.ts +2 -0
- package/dist/context/raw.js +2 -0
- package/dist/core/dev.d.ts +7 -0
- package/dist/core/dev.js +14 -0
- package/dist/core/for.d.ts +42 -0
- package/dist/core/for.js +311 -0
- package/dist/core/index.d.ts +14 -0
- package/dist/core/index.js +14 -0
- package/dist/core/key.d.ts +33 -0
- package/dist/core/key.js +83 -0
- package/dist/core/match.d.ts +22 -0
- package/dist/core/match.js +175 -0
- package/dist/core/mutation.d.ts +55 -0
- package/dist/core/mutation.js +128 -0
- package/dist/core/persist.d.ts +63 -0
- package/dist/core/persist.js +371 -0
- package/dist/core/query.d.ts +72 -0
- package/dist/core/query.js +184 -0
- package/dist/core/resource.d.ts +299 -0
- package/dist/core/resource.js +924 -0
- package/dist/core/scheduler.d.ts +111 -0
- package/dist/core/scheduler.js +243 -0
- package/dist/core/scope.d.ts +74 -0
- package/dist/core/scope.js +171 -0
- package/dist/core/signal.d.ts +88 -0
- package/dist/core/signal.js +451 -0
- package/dist/core/store.d.ts +130 -0
- package/dist/core/store.js +234 -0
- package/dist/core/virtual.d.ts +26 -0
- package/dist/core/virtual.js +277 -0
- package/dist/core/watch-testing.d.ts +13 -0
- package/dist/core/watch-testing.js +16 -0
- package/dist/core/watch.d.ts +81 -0
- package/dist/core/watch.js +353 -0
- package/dist/core/when.d.ts +23 -0
- package/dist/core/when.js +124 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.js +4 -0
- package/dist/internal/watch-testing.d.ts +1 -0
- package/dist/internal/watch-testing.js +8 -0
- package/dist/router/index.d.ts +1 -0
- package/dist/router/index.js +1 -0
- package/dist/router/route.d.ts +23 -0
- package/dist/router/route.js +48 -0
- package/dist/router/router.d.ts +23 -0
- package/dist/router/router.js +169 -0
- package/dist/runtime/bind.d.ts +65 -0
- package/dist/runtime/bind.js +616 -0
- package/dist/runtime/index.d.ts +10 -0
- package/dist/runtime/index.js +9 -0
- package/dist/simple.d.ts +11 -0
- package/dist/simple.js +11 -0
- package/package.json +1 -1
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Low-level scheduler (runtime core).
|
|
3
|
+
*
|
|
4
|
+
* This module is intentionally independent from signals/effects to avoid circular
|
|
5
|
+
* dependencies and to keep the runtime easy to reason about.
|
|
6
|
+
*
|
|
7
|
+
* What it provides:
|
|
8
|
+
* - A RAF queue (`schedule`) for work that should be grouped per frame (DOM writes, batching flush)
|
|
9
|
+
* - A microtask queue (`scheduleMicrotask`) for reactive follow-ups and short jobs
|
|
10
|
+
* - A batching mechanism (`batch` + `queueInBatch`) to coalesce many notifications into one frame
|
|
11
|
+
* - Optional DOM discipline helpers (`measure`/`mutate`) to document read/write intent
|
|
12
|
+
*
|
|
13
|
+
* Invariants:
|
|
14
|
+
* - Tasks are executed in FIFO order within each queue.
|
|
15
|
+
* - Microtasks are drained fully (up to a hard iteration limit) before returning control.
|
|
16
|
+
* - RAF tasks are drained fully (up to a hard iteration limit) per frame.
|
|
17
|
+
*
|
|
18
|
+
* Safety:
|
|
19
|
+
* - Iteration caps prevent infinite loops from growing queues unbounded.
|
|
20
|
+
* - Flushes always drain with `splice(0)` to release references eagerly.
|
|
21
|
+
*/
|
|
22
|
+
type Task = () => void;
|
|
23
|
+
/**
|
|
24
|
+
* Scheduler configuration.
|
|
25
|
+
*/
|
|
26
|
+
interface SchedulerConfig {
|
|
27
|
+
/**
|
|
28
|
+
* Maximum microtask iterations before stopping (prevents infinite loops).
|
|
29
|
+
* A high value is intentional: reactive systems may legitimately schedule
|
|
30
|
+
* multiple microtask waves. Default: 1000.
|
|
31
|
+
*/
|
|
32
|
+
maxMicrotaskIterations: number;
|
|
33
|
+
/**
|
|
34
|
+
* Maximum RAF iterations before stopping (prevents infinite loops).
|
|
35
|
+
* RAF work is typically heavier (DOM), so keep it lower. Default: 100.
|
|
36
|
+
*/
|
|
37
|
+
maxRafIterations: number;
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Configure scheduler limits.
|
|
41
|
+
*
|
|
42
|
+
* Call this early in your app initialization if you need different limits.
|
|
43
|
+
*
|
|
44
|
+
* Example:
|
|
45
|
+
* ```ts
|
|
46
|
+
* configureScheduler({ maxMicrotaskIterations: 2000 });
|
|
47
|
+
* ```
|
|
48
|
+
*/
|
|
49
|
+
export declare function configureScheduler(config: Partial<SchedulerConfig>): void;
|
|
50
|
+
/**
|
|
51
|
+
* Get the current scheduler configuration.
|
|
52
|
+
*/
|
|
53
|
+
export declare function getSchedulerConfig(): Readonly<SchedulerConfig>;
|
|
54
|
+
/**
|
|
55
|
+
* Schedule work for the next animation frame.
|
|
56
|
+
*
|
|
57
|
+
* Use this for DOM-affecting work you want grouped per frame.
|
|
58
|
+
* (In Node tests, `requestAnimationFrame` is typically mocked.)
|
|
59
|
+
*/
|
|
60
|
+
export declare function schedule(task: Task): void;
|
|
61
|
+
/**
|
|
62
|
+
* Schedule work in a microtask.
|
|
63
|
+
*
|
|
64
|
+
* Use this for reactive re-runs and short jobs where you want to:
|
|
65
|
+
* - run after the current call stack,
|
|
66
|
+
* - but before the next frame.
|
|
67
|
+
*/
|
|
68
|
+
export declare function scheduleMicrotask(task: Task): void;
|
|
69
|
+
/**
|
|
70
|
+
* Returns true while inside a `batch()` call.
|
|
71
|
+
*
|
|
72
|
+
* Reactive primitives can use this to:
|
|
73
|
+
* - update state immediately,
|
|
74
|
+
* - but defer notification fan-out until the batch finishes.
|
|
75
|
+
*/
|
|
76
|
+
export declare function isBatching(): boolean;
|
|
77
|
+
/**
|
|
78
|
+
* Enqueue work to run at the end of the *outermost* batch.
|
|
79
|
+
*
|
|
80
|
+
* Deduplication:
|
|
81
|
+
* - The same Task identity will run at most once per batch flush.
|
|
82
|
+
* - This is critical for effects/notifications: many signals can enqueue the same work.
|
|
83
|
+
*/
|
|
84
|
+
export declare function queueInBatch(task: Task): void;
|
|
85
|
+
/**
|
|
86
|
+
* Batch multiple updates so their resulting notifications are grouped.
|
|
87
|
+
*
|
|
88
|
+
* Contract:
|
|
89
|
+
* - State updates inside the batch happen immediately.
|
|
90
|
+
* - Notifications/effects are deferred until the batch completes.
|
|
91
|
+
* - Nested batches are supported: only the outermost batch triggers a flush.
|
|
92
|
+
*
|
|
93
|
+
* Flush strategy:
|
|
94
|
+
* - We flush batched tasks in *one RAF* to group visible DOM work per frame.
|
|
95
|
+
*/
|
|
96
|
+
export declare function batch(fn: () => void): void;
|
|
97
|
+
/**
|
|
98
|
+
* DOM read discipline helper.
|
|
99
|
+
*
|
|
100
|
+
* Currently a no-op wrapper. Its purpose is to make intent explicit:
|
|
101
|
+
* put layout reads inside `measure()` so future tooling/optimizations can hook in.
|
|
102
|
+
*/
|
|
103
|
+
export declare function measure<T>(fn: () => T): T;
|
|
104
|
+
/**
|
|
105
|
+
* DOM write discipline helper.
|
|
106
|
+
*
|
|
107
|
+
* Writes are scheduled in a microtask so they don't interleave with synchronous reads.
|
|
108
|
+
* If you want stricter "writes only on RAF", you can swap this to `schedule(fn)`.
|
|
109
|
+
*/
|
|
110
|
+
export declare function mutate(fn: () => void): void;
|
|
111
|
+
export {};
|
|
@@ -0,0 +1,243 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Low-level scheduler (runtime core).
|
|
3
|
+
*
|
|
4
|
+
* This module is intentionally independent from signals/effects to avoid circular
|
|
5
|
+
* dependencies and to keep the runtime easy to reason about.
|
|
6
|
+
*
|
|
7
|
+
* What it provides:
|
|
8
|
+
* - A RAF queue (`schedule`) for work that should be grouped per frame (DOM writes, batching flush)
|
|
9
|
+
* - A microtask queue (`scheduleMicrotask`) for reactive follow-ups and short jobs
|
|
10
|
+
* - A batching mechanism (`batch` + `queueInBatch`) to coalesce many notifications into one frame
|
|
11
|
+
* - Optional DOM discipline helpers (`measure`/`mutate`) to document read/write intent
|
|
12
|
+
*
|
|
13
|
+
* Invariants:
|
|
14
|
+
* - Tasks are executed in FIFO order within each queue.
|
|
15
|
+
* - Microtasks are drained fully (up to a hard iteration limit) before returning control.
|
|
16
|
+
* - RAF tasks are drained fully (up to a hard iteration limit) per frame.
|
|
17
|
+
*
|
|
18
|
+
* Safety:
|
|
19
|
+
* - Iteration caps prevent infinite loops from growing queues unbounded.
|
|
20
|
+
* - Flushes always drain with `splice(0)` to release references eagerly.
|
|
21
|
+
*/
|
|
22
|
+
const schedulerConfig = {
|
|
23
|
+
maxMicrotaskIterations: 1000,
|
|
24
|
+
maxRafIterations: 100,
|
|
25
|
+
};
|
|
26
|
+
/**
|
|
27
|
+
* Configure scheduler limits.
|
|
28
|
+
*
|
|
29
|
+
* Call this early in your app initialization if you need different limits.
|
|
30
|
+
*
|
|
31
|
+
* Example:
|
|
32
|
+
* ```ts
|
|
33
|
+
* configureScheduler({ maxMicrotaskIterations: 2000 });
|
|
34
|
+
* ```
|
|
35
|
+
*/
|
|
36
|
+
export function configureScheduler(config) {
|
|
37
|
+
Object.assign(schedulerConfig, config);
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Get the current scheduler configuration.
|
|
41
|
+
*/
|
|
42
|
+
export function getSchedulerConfig() {
|
|
43
|
+
return { ...schedulerConfig };
|
|
44
|
+
}
|
|
45
|
+
let rafScheduled = false;
|
|
46
|
+
let microtaskScheduled = false;
|
|
47
|
+
let isFlushingRaf = false;
|
|
48
|
+
let isFlushingMicrotasks = false;
|
|
49
|
+
/** FIFO queues. */
|
|
50
|
+
const rafQueue = [];
|
|
51
|
+
const microtaskQueue = [];
|
|
52
|
+
const rafImpl = typeof globalThis !== 'undefined' && typeof globalThis.requestAnimationFrame === 'function'
|
|
53
|
+
? (cb) => globalThis.requestAnimationFrame(() => cb())
|
|
54
|
+
: (cb) => setTimeout(cb, 0);
|
|
55
|
+
/**
|
|
56
|
+
* Batching state.
|
|
57
|
+
*
|
|
58
|
+
* During `batch()`, producers enqueue "notifications" via `queueInBatch()`.
|
|
59
|
+
* When the outermost batch exits, we flush all queued tasks in a single RAF.
|
|
60
|
+
*/
|
|
61
|
+
let batchDepth = 0;
|
|
62
|
+
/** Batched tasks (FIFO) + identity dedupe set. */
|
|
63
|
+
const batchQueue = [];
|
|
64
|
+
const batchQueueSet = new Set();
|
|
65
|
+
/**
|
|
66
|
+
* Schedule work for the next animation frame.
|
|
67
|
+
*
|
|
68
|
+
* Use this for DOM-affecting work you want grouped per frame.
|
|
69
|
+
* (In Node tests, `requestAnimationFrame` is typically mocked.)
|
|
70
|
+
*/
|
|
71
|
+
export function schedule(task) {
|
|
72
|
+
rafQueue.push(task);
|
|
73
|
+
if (!rafScheduled) {
|
|
74
|
+
rafScheduled = true;
|
|
75
|
+
rafImpl(flushRaf);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
/**
|
|
79
|
+
* Schedule work in a microtask.
|
|
80
|
+
*
|
|
81
|
+
* Use this for reactive re-runs and short jobs where you want to:
|
|
82
|
+
* - run after the current call stack,
|
|
83
|
+
* - but before the next frame.
|
|
84
|
+
*/
|
|
85
|
+
export function scheduleMicrotask(task) {
|
|
86
|
+
microtaskQueue.push(task);
|
|
87
|
+
if (!microtaskScheduled) {
|
|
88
|
+
microtaskScheduled = true;
|
|
89
|
+
Promise.resolve().then(flushMicrotasks);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Returns true while inside a `batch()` call.
|
|
94
|
+
*
|
|
95
|
+
* Reactive primitives can use this to:
|
|
96
|
+
* - update state immediately,
|
|
97
|
+
* - but defer notification fan-out until the batch finishes.
|
|
98
|
+
*/
|
|
99
|
+
export function isBatching() {
|
|
100
|
+
return batchDepth > 0;
|
|
101
|
+
}
|
|
102
|
+
/**
|
|
103
|
+
* Enqueue work to run at the end of the *outermost* batch.
|
|
104
|
+
*
|
|
105
|
+
* Deduplication:
|
|
106
|
+
* - The same Task identity will run at most once per batch flush.
|
|
107
|
+
* - This is critical for effects/notifications: many signals can enqueue the same work.
|
|
108
|
+
*/
|
|
109
|
+
export function queueInBatch(task) {
|
|
110
|
+
if (batchQueueSet.has(task))
|
|
111
|
+
return;
|
|
112
|
+
batchQueue.push(task);
|
|
113
|
+
batchQueueSet.add(task);
|
|
114
|
+
}
|
|
115
|
+
/**
|
|
116
|
+
* Batch multiple updates so their resulting notifications are grouped.
|
|
117
|
+
*
|
|
118
|
+
* Contract:
|
|
119
|
+
* - State updates inside the batch happen immediately.
|
|
120
|
+
* - Notifications/effects are deferred until the batch completes.
|
|
121
|
+
* - Nested batches are supported: only the outermost batch triggers a flush.
|
|
122
|
+
*
|
|
123
|
+
* Flush strategy:
|
|
124
|
+
* - We flush batched tasks in *one RAF* to group visible DOM work per frame.
|
|
125
|
+
*/
|
|
126
|
+
export function batch(fn) {
|
|
127
|
+
batchDepth++;
|
|
128
|
+
try {
|
|
129
|
+
fn();
|
|
130
|
+
}
|
|
131
|
+
finally {
|
|
132
|
+
batchDepth--;
|
|
133
|
+
if (batchDepth === 0)
|
|
134
|
+
flushBatch();
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
/**
|
|
138
|
+
* Flush batched tasks by scheduling a single RAF job.
|
|
139
|
+
*
|
|
140
|
+
* This keeps "batch outputs" aligned to frames:
|
|
141
|
+
* multiple signal sets -> one notification wave -> one render frame.
|
|
142
|
+
*/
|
|
143
|
+
function flushBatch() {
|
|
144
|
+
if (batchQueue.length === 0)
|
|
145
|
+
return;
|
|
146
|
+
const tasks = batchQueue.splice(0);
|
|
147
|
+
batchQueueSet.clear();
|
|
148
|
+
schedule(() => {
|
|
149
|
+
for (const t of tasks)
|
|
150
|
+
t();
|
|
151
|
+
});
|
|
152
|
+
}
|
|
153
|
+
/**
|
|
154
|
+
* Drain the microtask queue.
|
|
155
|
+
*
|
|
156
|
+
* Implementation notes:
|
|
157
|
+
* - Uses `splice(0)` to drop references eagerly.
|
|
158
|
+
* - Runs until the queue is empty, but caps the number of drain waves.
|
|
159
|
+
* (A single task can enqueue more microtasks; that still counts as another iteration.)
|
|
160
|
+
*/
|
|
161
|
+
function flushMicrotasks() {
|
|
162
|
+
if (isFlushingMicrotasks)
|
|
163
|
+
return;
|
|
164
|
+
isFlushingMicrotasks = true;
|
|
165
|
+
let iterations = 0;
|
|
166
|
+
const maxIterations = schedulerConfig.maxMicrotaskIterations;
|
|
167
|
+
try {
|
|
168
|
+
while (microtaskQueue.length > 0 && iterations < maxIterations) {
|
|
169
|
+
iterations++;
|
|
170
|
+
const tasks = microtaskQueue.splice(0);
|
|
171
|
+
for (const t of tasks)
|
|
172
|
+
t();
|
|
173
|
+
}
|
|
174
|
+
if (iterations >= maxIterations && microtaskQueue.length > 0) {
|
|
175
|
+
console.error(`[Dalila] Scheduler exceeded ${maxIterations} microtask iterations. ` +
|
|
176
|
+
`Possible infinite loop detected. Remaining ${microtaskQueue.length} tasks discarded.`);
|
|
177
|
+
microtaskQueue.length = 0;
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
finally {
|
|
181
|
+
isFlushingMicrotasks = false;
|
|
182
|
+
microtaskScheduled = false;
|
|
183
|
+
// If tasks were queued after we stopped flushing, reschedule a new microtask turn.
|
|
184
|
+
if (microtaskQueue.length > 0 && !microtaskScheduled) {
|
|
185
|
+
microtaskScheduled = true;
|
|
186
|
+
Promise.resolve().then(flushMicrotasks);
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
/**
|
|
191
|
+
* Drain the RAF queue.
|
|
192
|
+
*
|
|
193
|
+
* Implementation notes:
|
|
194
|
+
* - RAF work is typically heavier (DOM), so we cap iterations more aggressively.
|
|
195
|
+
* - Like microtasks, a task may enqueue more RAF tasks; that triggers another flush cycle.
|
|
196
|
+
*/
|
|
197
|
+
function flushRaf() {
|
|
198
|
+
if (isFlushingRaf)
|
|
199
|
+
return;
|
|
200
|
+
isFlushingRaf = true;
|
|
201
|
+
let iterations = 0;
|
|
202
|
+
const maxIterations = schedulerConfig.maxRafIterations;
|
|
203
|
+
try {
|
|
204
|
+
while (rafQueue.length > 0 && iterations < maxIterations) {
|
|
205
|
+
iterations++;
|
|
206
|
+
const tasks = rafQueue.splice(0);
|
|
207
|
+
for (const t of tasks)
|
|
208
|
+
t();
|
|
209
|
+
}
|
|
210
|
+
if (iterations >= maxIterations && rafQueue.length > 0) {
|
|
211
|
+
console.error(`[Dalila] Scheduler exceeded ${maxIterations} RAF iterations. ` +
|
|
212
|
+
`Possible infinite loop detected. Remaining ${rafQueue.length} tasks discarded.`);
|
|
213
|
+
rafQueue.length = 0;
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
finally {
|
|
217
|
+
isFlushingRaf = false;
|
|
218
|
+
rafScheduled = false;
|
|
219
|
+
// If tasks were queued during the flush, schedule another frame.
|
|
220
|
+
if (rafQueue.length > 0 && !rafScheduled) {
|
|
221
|
+
rafScheduled = true;
|
|
222
|
+
rafImpl(flushRaf);
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
/**
|
|
227
|
+
* DOM read discipline helper.
|
|
228
|
+
*
|
|
229
|
+
* Currently a no-op wrapper. Its purpose is to make intent explicit:
|
|
230
|
+
* put layout reads inside `measure()` so future tooling/optimizations can hook in.
|
|
231
|
+
*/
|
|
232
|
+
export function measure(fn) {
|
|
233
|
+
return fn();
|
|
234
|
+
}
|
|
235
|
+
/**
|
|
236
|
+
* DOM write discipline helper.
|
|
237
|
+
*
|
|
238
|
+
* Writes are scheduled in a microtask so they don't interleave with synchronous reads.
|
|
239
|
+
* If you want stricter "writes only on RAF", you can swap this to `schedule(fn)`.
|
|
240
|
+
*/
|
|
241
|
+
export function mutate(fn) {
|
|
242
|
+
scheduleMicrotask(fn);
|
|
243
|
+
}
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* A Scope is a simple lifecycle container.
|
|
3
|
+
* Anything registered via `onCleanup` will run when `dispose()` is called.
|
|
4
|
+
*
|
|
5
|
+
* This is the foundation for "automatic cleanup" in Dalila:
|
|
6
|
+
* - effects can be disposed when a view/controller scope ends
|
|
7
|
+
* - event listeners / timers / abort controllers can be tied to scope lifetime
|
|
8
|
+
*/
|
|
9
|
+
export interface Scope {
|
|
10
|
+
/** Register a cleanup callback to run when the scope is disposed. */
|
|
11
|
+
onCleanup(fn: () => void): void;
|
|
12
|
+
/** Dispose the scope and run all registered cleanups. */
|
|
13
|
+
dispose(): void;
|
|
14
|
+
/** Parent scope in the hierarchy (for context lookup). */
|
|
15
|
+
readonly parent: Scope | null;
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Subscribe to scope creation events.
|
|
19
|
+
* Returns an unsubscribe function.
|
|
20
|
+
*/
|
|
21
|
+
export declare function onScopeCreate(fn: (scope: Scope) => void): () => void;
|
|
22
|
+
/**
|
|
23
|
+
* Subscribe to scope disposal events.
|
|
24
|
+
* Returns an unsubscribe function.
|
|
25
|
+
*/
|
|
26
|
+
export declare function onScopeDispose(fn: (scope: Scope) => void): () => void;
|
|
27
|
+
/** Returns true if the given scope has been disposed. */
|
|
28
|
+
export declare function isScopeDisposed(scope: Scope): boolean;
|
|
29
|
+
/**
|
|
30
|
+
* Creates a new Scope instance.
|
|
31
|
+
*
|
|
32
|
+
* Notes:
|
|
33
|
+
* - Cleanups run in FIFO order (registration order).
|
|
34
|
+
* - If a cleanup registers another cleanup during disposal, it will NOT run
|
|
35
|
+
* in the same dispose pass (because we snapshot via `splice(0)`).
|
|
36
|
+
* - Parent is captured from the current scope context (set by withScope).
|
|
37
|
+
*/
|
|
38
|
+
export declare function createScope(parentOverride?: Scope | null): Scope;
|
|
39
|
+
/** Returns the current active scope (or null if none). */
|
|
40
|
+
export declare function getCurrentScope(): Scope | null;
|
|
41
|
+
/**
|
|
42
|
+
* Returns the current scope hierarchy, from current scope up to the root.
|
|
43
|
+
*/
|
|
44
|
+
export declare function getCurrentScopeHierarchy(): Scope[];
|
|
45
|
+
/**
|
|
46
|
+
* Sets the current active scope.
|
|
47
|
+
* Prefer using `withScope()` unless you are implementing low-level internals.
|
|
48
|
+
*/
|
|
49
|
+
export declare function setCurrentScope(scope: Scope | null): void;
|
|
50
|
+
/**
|
|
51
|
+
* Runs a function with the given scope set as current, then restores the previous scope.
|
|
52
|
+
*
|
|
53
|
+
* This enables scope-aware primitives:
|
|
54
|
+
* - `signal()` can register cleanup in the current scope
|
|
55
|
+
* - `effect()` can auto-dispose when the scope ends
|
|
56
|
+
*/
|
|
57
|
+
export declare function withScope<T>(scope: Scope, fn: () => T): T;
|
|
58
|
+
/**
|
|
59
|
+
* Async version of withScope that properly maintains scope during await.
|
|
60
|
+
*
|
|
61
|
+
* IMPORTANT: Use this instead of withScope when fn is async, because
|
|
62
|
+
* withScope() restores the previous scope immediately when the Promise
|
|
63
|
+
* is returned, not when it resolves. This means anything created after
|
|
64
|
+
* an await would not be in the scope.
|
|
65
|
+
*
|
|
66
|
+
* Example:
|
|
67
|
+
* ```ts
|
|
68
|
+
* await withScopeAsync(scope, async () => {
|
|
69
|
+
* await fetch(...);
|
|
70
|
+
* const sig = signal(0); // ← This will be in scope
|
|
71
|
+
* });
|
|
72
|
+
* ```
|
|
73
|
+
*/
|
|
74
|
+
export declare function withScopeAsync<T>(scope: Scope, fn: () => Promise<T>): Promise<T>;
|
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
/** Tracks disposed scopes without mutating the public interface. */
|
|
2
|
+
const disposedScopes = new WeakSet();
|
|
3
|
+
const scopeCreateListeners = new Set();
|
|
4
|
+
const scopeDisposeListeners = new Set();
|
|
5
|
+
/**
|
|
6
|
+
* Subscribe to scope creation events.
|
|
7
|
+
* Returns an unsubscribe function.
|
|
8
|
+
*/
|
|
9
|
+
export function onScopeCreate(fn) {
|
|
10
|
+
scopeCreateListeners.add(fn);
|
|
11
|
+
return () => scopeCreateListeners.delete(fn);
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Subscribe to scope disposal events.
|
|
15
|
+
* Returns an unsubscribe function.
|
|
16
|
+
*/
|
|
17
|
+
export function onScopeDispose(fn) {
|
|
18
|
+
scopeDisposeListeners.add(fn);
|
|
19
|
+
return () => scopeDisposeListeners.delete(fn);
|
|
20
|
+
}
|
|
21
|
+
/** Returns true if the given scope has been disposed. */
|
|
22
|
+
export function isScopeDisposed(scope) {
|
|
23
|
+
return disposedScopes.has(scope);
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Creates a new Scope instance.
|
|
27
|
+
*
|
|
28
|
+
* Notes:
|
|
29
|
+
* - Cleanups run in FIFO order (registration order).
|
|
30
|
+
* - If a cleanup registers another cleanup during disposal, it will NOT run
|
|
31
|
+
* in the same dispose pass (because we snapshot via `splice(0)`).
|
|
32
|
+
* - Parent is captured from the current scope context (set by withScope).
|
|
33
|
+
*/
|
|
34
|
+
export function createScope(parentOverride) {
|
|
35
|
+
const cleanups = [];
|
|
36
|
+
const parent = parentOverride === undefined ? currentScope : parentOverride === null ? null : parentOverride;
|
|
37
|
+
const runCleanupSafely = (fn) => {
|
|
38
|
+
try {
|
|
39
|
+
fn();
|
|
40
|
+
return undefined;
|
|
41
|
+
}
|
|
42
|
+
catch (err) {
|
|
43
|
+
return err;
|
|
44
|
+
}
|
|
45
|
+
};
|
|
46
|
+
const scope = {
|
|
47
|
+
onCleanup(fn) {
|
|
48
|
+
if (isScopeDisposed(scope)) {
|
|
49
|
+
const error = runCleanupSafely(fn);
|
|
50
|
+
if (error) {
|
|
51
|
+
console.error('[Dalila] cleanup registered after dispose() threw:', error);
|
|
52
|
+
}
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
cleanups.push(fn);
|
|
56
|
+
},
|
|
57
|
+
dispose() {
|
|
58
|
+
if (isScopeDisposed(scope))
|
|
59
|
+
return;
|
|
60
|
+
disposedScopes.add(scope);
|
|
61
|
+
const snapshot = cleanups.splice(0);
|
|
62
|
+
const errors = [];
|
|
63
|
+
for (const fn of snapshot) {
|
|
64
|
+
const error = runCleanupSafely(fn);
|
|
65
|
+
if (error)
|
|
66
|
+
errors.push(error);
|
|
67
|
+
}
|
|
68
|
+
if (errors.length > 0) {
|
|
69
|
+
console.error('[Dalila] scope.dispose() had cleanup errors:', errors);
|
|
70
|
+
}
|
|
71
|
+
for (const listener of scopeDisposeListeners) {
|
|
72
|
+
try {
|
|
73
|
+
listener(scope);
|
|
74
|
+
}
|
|
75
|
+
catch (err) {
|
|
76
|
+
console.error('[Dalila] scope.dispose() listener threw:', err);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
},
|
|
80
|
+
parent,
|
|
81
|
+
};
|
|
82
|
+
if (parent) {
|
|
83
|
+
parent.onCleanup(() => scope.dispose());
|
|
84
|
+
}
|
|
85
|
+
for (const listener of scopeCreateListeners) {
|
|
86
|
+
try {
|
|
87
|
+
listener(scope);
|
|
88
|
+
}
|
|
89
|
+
catch (err) {
|
|
90
|
+
console.error('[Dalila] scope.create() listener threw:', err);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
return scope;
|
|
94
|
+
}
|
|
95
|
+
/**
|
|
96
|
+
* The currently active scope for the running code path.
|
|
97
|
+
* This is set by `withScope()` and read by reactive primitives.
|
|
98
|
+
*/
|
|
99
|
+
let currentScope = null;
|
|
100
|
+
/** Returns the current active scope (or null if none). */
|
|
101
|
+
export function getCurrentScope() {
|
|
102
|
+
return currentScope;
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Returns the current scope hierarchy, from current scope up to the root.
|
|
106
|
+
*/
|
|
107
|
+
export function getCurrentScopeHierarchy() {
|
|
108
|
+
const scopes = [];
|
|
109
|
+
let current = currentScope;
|
|
110
|
+
while (current) {
|
|
111
|
+
scopes.push(current);
|
|
112
|
+
current = current.parent;
|
|
113
|
+
}
|
|
114
|
+
return scopes;
|
|
115
|
+
}
|
|
116
|
+
/**
|
|
117
|
+
* Sets the current active scope.
|
|
118
|
+
* Prefer using `withScope()` unless you are implementing low-level internals.
|
|
119
|
+
*/
|
|
120
|
+
export function setCurrentScope(scope) {
|
|
121
|
+
currentScope = scope;
|
|
122
|
+
}
|
|
123
|
+
/**
|
|
124
|
+
* Runs a function with the given scope set as current, then restores the previous scope.
|
|
125
|
+
*
|
|
126
|
+
* This enables scope-aware primitives:
|
|
127
|
+
* - `signal()` can register cleanup in the current scope
|
|
128
|
+
* - `effect()` can auto-dispose when the scope ends
|
|
129
|
+
*/
|
|
130
|
+
export function withScope(scope, fn) {
|
|
131
|
+
if (isScopeDisposed(scope)) {
|
|
132
|
+
throw new Error('[Dalila] withScope() cannot enter a disposed scope.');
|
|
133
|
+
}
|
|
134
|
+
const prevScope = currentScope;
|
|
135
|
+
currentScope = scope;
|
|
136
|
+
try {
|
|
137
|
+
return fn();
|
|
138
|
+
}
|
|
139
|
+
finally {
|
|
140
|
+
currentScope = prevScope;
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
/**
|
|
144
|
+
* Async version of withScope that properly maintains scope during await.
|
|
145
|
+
*
|
|
146
|
+
* IMPORTANT: Use this instead of withScope when fn is async, because
|
|
147
|
+
* withScope() restores the previous scope immediately when the Promise
|
|
148
|
+
* is returned, not when it resolves. This means anything created after
|
|
149
|
+
* an await would not be in the scope.
|
|
150
|
+
*
|
|
151
|
+
* Example:
|
|
152
|
+
* ```ts
|
|
153
|
+
* await withScopeAsync(scope, async () => {
|
|
154
|
+
* await fetch(...);
|
|
155
|
+
* const sig = signal(0); // ← This will be in scope
|
|
156
|
+
* });
|
|
157
|
+
* ```
|
|
158
|
+
*/
|
|
159
|
+
export async function withScopeAsync(scope, fn) {
|
|
160
|
+
if (isScopeDisposed(scope)) {
|
|
161
|
+
throw new Error('[Dalila] withScopeAsync() cannot enter a disposed scope.');
|
|
162
|
+
}
|
|
163
|
+
const prevScope = currentScope;
|
|
164
|
+
currentScope = scope;
|
|
165
|
+
try {
|
|
166
|
+
return await fn();
|
|
167
|
+
}
|
|
168
|
+
finally {
|
|
169
|
+
currentScope = prevScope;
|
|
170
|
+
}
|
|
171
|
+
}
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Register a global error handler for effects/computed invalidations.
|
|
3
|
+
*
|
|
4
|
+
* Use this to report errors without crashing the reactive graph.
|
|
5
|
+
*/
|
|
6
|
+
export declare function setEffectErrorHandler(handler: (error: Error, source: string) => void): void;
|
|
7
|
+
export interface Signal<T> {
|
|
8
|
+
/** Read the current value (with dependency tracking if inside an effect). */
|
|
9
|
+
(): T;
|
|
10
|
+
/** Set a new value and notify subscribers. */
|
|
11
|
+
set(value: T): void;
|
|
12
|
+
/** Update the value using a function. */
|
|
13
|
+
update(fn: (v: T) => T): void;
|
|
14
|
+
/** Read the current value without creating a dependency (no tracking). */
|
|
15
|
+
peek(): T;
|
|
16
|
+
/** Subscribe to value changes manually (outside of effects). Returns unsubscribe function. */
|
|
17
|
+
on(callback: (value: T) => void): () => void;
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Create a signal: a mutable value with automatic dependency tracking.
|
|
21
|
+
*
|
|
22
|
+
* Reads:
|
|
23
|
+
* - if there is an active effect, subscribe it (dynamic deps supported)
|
|
24
|
+
*
|
|
25
|
+
* Writes:
|
|
26
|
+
* - update the value immediately
|
|
27
|
+
* - notify subscribers (immediately, or deferred via batch queue)
|
|
28
|
+
*
|
|
29
|
+
* Lifecycle:
|
|
30
|
+
* - effects remove themselves from subscriber sets on re-run and on dispose
|
|
31
|
+
* - signals do not "own" subscriber lifetimes; they only maintain the set
|
|
32
|
+
*/
|
|
33
|
+
export declare function signal<T>(initialValue: T): Signal<T>;
|
|
34
|
+
/**
|
|
35
|
+
* Create an effect: reruns `fn` whenever any tracked signal changes.
|
|
36
|
+
*
|
|
37
|
+
* Scheduling:
|
|
38
|
+
* - the initial run is scheduled (microtask) to coalesce multiple writes
|
|
39
|
+
*
|
|
40
|
+
* Dependency tracking:
|
|
41
|
+
* - before each run, the effect unsubscribes from previous dependencies
|
|
42
|
+
* - during the run, reads resubscribe to the new dependencies (dynamic deps)
|
|
43
|
+
*
|
|
44
|
+
* Scope:
|
|
45
|
+
* - if created inside a scope, the effect runs inside that scope
|
|
46
|
+
* - the effect is disposed automatically when the scope disposes
|
|
47
|
+
*/
|
|
48
|
+
export declare function effect(fn: () => void): () => void;
|
|
49
|
+
/**
|
|
50
|
+
* Create a computed signal (derived, cached, read-only).
|
|
51
|
+
*
|
|
52
|
+
* Semantics:
|
|
53
|
+
* - lazy: computes on first read
|
|
54
|
+
* - cached: returns the cached value until invalidated
|
|
55
|
+
* - synchronous invalidation: dependencies mark it dirty immediately
|
|
56
|
+
*
|
|
57
|
+
* Dependency tracking:
|
|
58
|
+
* - while computing, we collect dependencies into an internal "markDirty" effect
|
|
59
|
+
* - those dependencies will synchronously mark this computed as dirty on change
|
|
60
|
+
*
|
|
61
|
+
* Subscription:
|
|
62
|
+
* - other effects can subscribe to the computed like a normal signal
|
|
63
|
+
*/
|
|
64
|
+
export declare function computed<T>(fn: () => T): Signal<T>;
|
|
65
|
+
/**
|
|
66
|
+
* Async effect with cancellation.
|
|
67
|
+
*
|
|
68
|
+
* Semantics:
|
|
69
|
+
* - provides an AbortSignal to the callback
|
|
70
|
+
* - on re-run, aborts the previous run before starting the next
|
|
71
|
+
* - when disposed, aborts the current run and stops future scheduling
|
|
72
|
+
*/
|
|
73
|
+
export declare function effectAsync(fn: (signal: AbortSignal) => void): () => void;
|
|
74
|
+
/**
|
|
75
|
+
* Run a function without tracking any signal reads as dependencies.
|
|
76
|
+
*
|
|
77
|
+
* Use this inside an effect when you want to read a signal's value
|
|
78
|
+
* without creating a dependency on it.
|
|
79
|
+
*
|
|
80
|
+
* Example:
|
|
81
|
+
* ```ts
|
|
82
|
+
* effect(() => {
|
|
83
|
+
* const tracked = count(); // This read is tracked
|
|
84
|
+
* const untracked = untrack(() => other()); // This read is NOT tracked
|
|
85
|
+
* });
|
|
86
|
+
* ```
|
|
87
|
+
*/
|
|
88
|
+
export declare function untrack<T>(fn: () => T): T;
|