@rangojs/router 0.0.0-experimental.fa8a383a → 0.0.0-experimental.ffbe1b7f

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/dist/vite/index.js +17 -2
  2. package/package.json +1 -1
  3. package/skills/cache-guide/SKILL.md +32 -0
  4. package/skills/caching/SKILL.md +8 -0
  5. package/skills/loader/SKILL.md +52 -42
  6. package/skills/parallel/SKILL.md +67 -0
  7. package/skills/route/SKILL.md +31 -0
  8. package/skills/typesafety/SKILL.md +10 -0
  9. package/src/browser/partial-update.ts +11 -0
  10. package/src/browser/prefetch/queue.ts +61 -29
  11. package/src/browser/prefetch/resource-ready.ts +77 -0
  12. package/src/browser/react/NavigationProvider.tsx +5 -3
  13. package/src/cache/cache-runtime.ts +15 -11
  14. package/src/cache/cache-scope.ts +46 -5
  15. package/src/cache/taint.ts +55 -0
  16. package/src/context-var.ts +72 -2
  17. package/src/route-definition/helpers-types.ts +6 -5
  18. package/src/router/handler-context.ts +31 -8
  19. package/src/router/loader-resolution.ts +7 -1
  20. package/src/router/match-middleware/background-revalidation.ts +12 -1
  21. package/src/router/match-middleware/cache-lookup.ts +46 -6
  22. package/src/router/match-middleware/cache-store.ts +21 -4
  23. package/src/router/match-result.ts +11 -5
  24. package/src/router/metrics.ts +6 -1
  25. package/src/router/middleware-types.ts +6 -2
  26. package/src/router/middleware.ts +2 -2
  27. package/src/router/router-context.ts +1 -0
  28. package/src/router/segment-resolution/fresh.ts +37 -14
  29. package/src/router/segment-resolution/helpers.ts +29 -24
  30. package/src/router/segment-resolution/revalidation.ts +43 -19
  31. package/src/router/types.ts +1 -0
  32. package/src/router.ts +1 -0
  33. package/src/rsc/handler.ts +0 -9
  34. package/src/server/context.ts +12 -0
  35. package/src/server/request-context.ts +42 -8
  36. package/src/types/handler-context.ts +120 -22
  37. package/src/types/loader-types.ts +4 -4
@@ -1745,7 +1745,7 @@ import { resolve } from "node:path";
1745
1745
  // package.json
1746
1746
  var package_default = {
1747
1747
  name: "@rangojs/router",
1748
- version: "0.0.0-experimental.fa8a383a",
1748
+ version: "0.0.0-experimental.ffbe1b7f",
1749
1749
  description: "Django-inspired RSC router with composable URL patterns",
1750
1750
  keywords: [
1751
1751
  "react",
@@ -3274,8 +3274,17 @@ function jsonParseExpression(value) {
3274
3274
  }
3275
3275
 
3276
3276
  // src/context-var.ts
3277
+ var NON_CACHEABLE_KEYS = /* @__PURE__ */ Symbol.for(
3278
+ "rango:non-cacheable-keys"
3279
+ );
3280
+ function getNonCacheableKeys(variables) {
3281
+ if (!variables[NON_CACHEABLE_KEYS]) {
3282
+ variables[NON_CACHEABLE_KEYS] = /* @__PURE__ */ new Set();
3283
+ }
3284
+ return variables[NON_CACHEABLE_KEYS];
3285
+ }
3277
3286
  var FORBIDDEN_KEYS = /* @__PURE__ */ new Set(["__proto__", "constructor", "prototype"]);
3278
- function contextSet(variables, keyOrVar, value) {
3287
+ function contextSet(variables, keyOrVar, value, options) {
3279
3288
  if (typeof keyOrVar === "string") {
3280
3289
  if (FORBIDDEN_KEYS.has(keyOrVar)) {
3281
3290
  throw new Error(
@@ -3283,8 +3292,14 @@ function contextSet(variables, keyOrVar, value) {
3283
3292
  );
3284
3293
  }
3285
3294
  variables[keyOrVar] = value;
3295
+ if (options?.cache === false) {
3296
+ getNonCacheableKeys(variables).add(keyOrVar);
3297
+ }
3286
3298
  } else {
3287
3299
  variables[keyOrVar.key] = value;
3300
+ if (options?.cache === false) {
3301
+ getNonCacheableKeys(variables).add(keyOrVar.key);
3302
+ }
3288
3303
  }
3289
3304
  }
3290
3305
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@rangojs/router",
3
- "version": "0.0.0-experimental.fa8a383a",
3
+ "version": "0.0.0-experimental.ffbe1b7f",
4
4
  "description": "Django-inspired RSC router with composable URL patterns",
5
5
  "keywords": [
6
6
  "react",
@@ -162,6 +162,38 @@ middleware(async (ctx, next) => {
162
162
  });
163
163
  ```
164
164
 
165
+ ## Context Variable Cache Safety
166
+
167
+ Context variables created with `createVar()` are cacheable by default and can
168
+ be read freely inside `cache()` and `"use cache"` scopes. Non-cacheable vars
169
+ throw at read time to prevent request-specific data from being captured.
170
+
171
+ There are two ways to mark a value as non-cacheable:
172
+
173
+ ```typescript
174
+ // Var-level policy — inherently request-specific data
175
+ const Session = createVar<SessionData>({ cache: false });
176
+
177
+ // Write-level escalation — this specific write is non-cacheable
178
+ ctx.set(Theme, derivedTheme, { cache: false });
179
+ ```
180
+
181
+ "Least cacheable wins": if either the var definition or the `ctx.set()` call
182
+ specifies `cache: false`, the value is non-cacheable.
183
+
184
+ **Behavior inside cache scopes:**
185
+
186
+ | Operation | Inside `cache()` / `"use cache"` |
187
+ | ----------------------------------- | -------------------------------- |
188
+ | `ctx.get(cacheableVar)` | Allowed |
189
+ | `ctx.get(nonCacheableVar)` | Throws |
190
+ | `ctx.set(var, value)` (cacheable) | Allowed |
191
+ | `ctx.header()`, `ctx.cookie()`, etc | Throws (response side effects) |
192
+
193
+ Write is dumb — `ctx.set()` stores the cache metadata but does not enforce.
194
+ Enforcement happens at read time (`ctx.get()`), where ALS detects the cache
195
+ scope and rejects non-cacheable reads.
196
+
165
197
  ## Loaders Are Always Fresh
166
198
 
167
199
  Loaders are **never cached** by route-level `cache()`. Even on a full cache hit
@@ -173,6 +173,14 @@ const router = createRouter<AppBindings>({
173
173
  KV entries require `expirationTtl >= 60s`. Short-lived entries (< 60s total TTL)
174
174
  are only cached in L1.
175
175
 
176
+ ## Context Variables Inside Cache Boundaries
177
+
178
+ Context variables (`createVar`) are cacheable by default and can be read and
179
+ written inside `cache()` scopes. Variables marked with `{ cache: false }` (at
180
+ the var level or write level) throw when read inside a cache scope. Response
181
+ side effects (`ctx.header()`, `ctx.cookie()`) always throw inside cache
182
+ boundaries. See `/cache-guide` for the full cache safety table.
183
+
176
184
  ## Nested Cache Boundaries
177
185
 
178
186
  Override cache settings for specific sections:
@@ -65,24 +65,10 @@ export const urlpatterns = urls(({ path, loader }) => [
65
65
 
66
66
  ## Consuming Loader Data
67
67
 
68
- Loaders are the **live data layer** they resolve fresh on every request.
69
- The way you consume them depends on whether you're in a server component
70
- (route handler) or a client component.
71
-
72
- > **IMPORTANT: Prefer consuming loaders in client components.** Keeping data
73
- > fetching in loaders and consumption in client components creates a clean
74
- > separation: the server-side handler renders static markup that can be
75
- > freely cached with `cache()`, while loader data stays fresh on every
76
- > request. When you consume loaders in server handlers via `ctx.use()`, the
77
- > handler output depends on the loader data, which means caching the handler
78
- > also caches the data — defeating the purpose of the live data layer.
79
-
80
- ### In Client Components (Preferred)
81
-
82
- Client components use `useLoader()` from `@rangojs/router/client`.
83
- The loader **must** be registered with `loader()` in the route's DSL
84
- segments so the framework knows to resolve it during SSR and stream
85
- the data to the client:
68
+ Register loaders with `loader()` in the DSL and consume them in client
69
+ components with `useLoader()`. This is the recommended pattern it keeps
70
+ data fetching on the server and consumption on the client, with a clean
71
+ separation that works correctly with `cache()`.
86
72
 
87
73
  ```typescript
88
74
  "use client";
@@ -96,40 +82,60 @@ function ProductDetails() {
96
82
  ```
97
83
 
98
84
  ```typescript
99
- // Route definition — loader() registration required for client consumption
85
+ // Route definition — loader() registration required
100
86
  path("/product/:slug", ProductPage, { name: "product" }, () => [
101
- loader(ProductLoader), // Required for useLoader() in client components
87
+ loader(ProductLoader),
102
88
  ]);
103
89
  ```
104
90
 
105
- ### In Route Handlers (Server Components)
91
+ DSL loaders are the **live data layer** — they resolve fresh on every
92
+ request, even when the route is inside a `cache()` boundary. The router
93
+ excludes them from the segment cache at storage time and re-resolves them
94
+ on retrieval. This means `cache()` gives you cached UI + fresh data by
95
+ default.
106
96
 
107
- In server components, use `ctx.use(Loader)` directly in the route handler.
108
- This doesn't require `loader()` registration in the DSL — it works
109
- standalone. **However**, prefer client-side consumption when possible (see
110
- note above).
97
+ ### Cache safety
111
98
 
112
- ```typescript
113
- import { ProductLoader } from "./loaders/product";
99
+ DSL loaders can safely read `createVar({ cache: false })` variables
100
+ because they are always resolved fresh. The read guard is bypassed for
101
+ loader functions — they never produce stale data.
102
+
103
+ ### ctx.use(Loader) — escape hatch
104
+
105
+ For cases where you need loader data in the server handler itself (e.g.,
106
+ to set ctx variables or make routing decisions), use `ctx.use(Loader)`:
114
107
 
115
- // Route handler — server component
108
+ ```typescript
116
109
  path("/product/:slug", async (ctx) => {
117
110
  const { product } = await ctx.use(ProductLoader);
118
- return <h1>{product.name}</h1>;
119
- }, { name: "product" })
111
+ ctx.set(Product, product); // make available to children
112
+ return <ProductPage />;
113
+ }, { name: "product" }, () => [
114
+ loader(ProductLoader), // still register for client consumption
115
+ ])
120
116
  ```
121
117
 
122
- When you do register with `loader()` in the DSL, `ctx.use()` returns the
118
+ When you register with `loader()` in the DSL, `ctx.use()` returns the
123
119
  same memoized result — loaders never run twice per request.
124
120
 
121
+ **Limitations of ctx.use(Loader):**
122
+
123
+ - The handler output depends on the loader data. If the route is inside
124
+ `cache()`, the handler is cached with the loader result baked in —
125
+ defeating the live data guarantee.
126
+ - Non-cacheable variable reads (`createVar({ cache: false })`) inside the
127
+ handler still throw, even if the data came from a loader.
128
+ - Prefer DSL `loader()` + client `useLoader()` for data that depends on
129
+ non-cacheable context variables.
130
+
125
131
  **Never use `useLoader()` in server components** — it is a client-only API.
126
132
 
127
133
  ### Summary
128
134
 
129
- | Context | API | `loader()` DSL required? |
130
- | ---------------------------- | ------------------- | ------------------------ |
131
- | Client component (preferred) | `useLoader(Loader)` | **Yes** |
132
- | Route handler (server) | `ctx.use(Loader)` | No |
135
+ | Pattern | API | Cache-safe | Recommended |
136
+ | ---------------------- | ------------------- | ---------- | ----------- |
137
+ | DSL + client component | `useLoader(Loader)` | Yes | Yes |
138
+ | Handler escape hatch | `ctx.use(Loader)` | No | When needed |
133
139
 
134
140
  ## Loader Context
135
141
 
@@ -564,10 +570,9 @@ export const CartLoader = createLoader(async (ctx) => {
564
570
  return { cart };
565
571
  });
566
572
 
567
- // urls.tsx
573
+ // urls.tsx — register loaders in the DSL
568
574
  export const urlpatterns = urls(({ path, layout, loader, loading, cache, revalidate }) => [
569
575
  layout(<ShopLayout />, () => [
570
- // Shared cart loader for all shop routes
571
576
  loader(CartLoader, () => [
572
577
  revalidate(({ actionId }) => actionId?.includes("Cart") ?? false),
573
578
  ]),
@@ -579,17 +584,22 @@ export const urlpatterns = urls(({ path, layout, loader, loading, cache, revalid
579
584
  ]),
580
585
  ]);
581
586
 
582
- // pages/product.tsx — server component (route handler)
587
+ // components/ProductDetails.tsx — consume in client component
588
+ "use client";
589
+ import { useLoader } from "@rangojs/router/client";
583
590
  import { ProductLoader, CartLoader } from "./loaders/shop";
584
591
 
585
- async function ProductPage(ctx) {
586
- const { product } = await ctx.use(ProductLoader);
587
- const { cart } = await ctx.use(CartLoader);
592
+ function ProductDetails() {
593
+ const { data: { product } } = useLoader(ProductLoader);
594
+ const { data: { cart } } = useLoader(CartLoader);
588
595
 
589
596
  return (
590
597
  <div>
591
598
  <h1>{product.name}</h1>
592
- <AddToCartButton productId={product.id} inCart={cart?.items.includes(product.id)} />
599
+ <AddToCartButton
600
+ productId={product.id}
601
+ inCart={cart?.items.includes(product.id)}
602
+ />
593
603
  </div>
594
604
  );
595
605
  }
@@ -92,6 +92,73 @@ path("/dashboard/:id", (ctx) => {
92
92
  ])
93
93
  ```
94
94
 
95
+ ## Setting Handles (Meta, Breadcrumbs)
96
+
97
+ Parallel slot handlers can call `ctx.use(Meta)` or `ctx.use(Breadcrumbs)` to
98
+ push handle data. The data is associated with the **parent** layout or route
99
+ segment, not the parallel segment itself. This is because parallels execute
100
+ after their parent handler and inherit its segment scope.
101
+
102
+ This works well for document-level metadata — the handle data follows the
103
+ parent's lifecycle (appears when the parent is mounted, removed when it
104
+ unmounts).
105
+
106
+ ```typescript
107
+ parallel({
108
+ "@meta": (ctx) => {
109
+ const meta = ctx.use(Meta);
110
+ meta({ title: "Product Detail" });
111
+ meta({ name: "description", content: "..." });
112
+ return null; // UI-less slot, only sets metadata
113
+ },
114
+ "@sidebar": (ctx) => <Sidebar />,
115
+ })
116
+ ```
117
+
118
+ Multiple parallels on the same parent can each push handle data — they all
119
+ accumulate under the parent segment ID.
120
+
121
+ ### Pattern: `@meta` slot for per-route metadata overrides
122
+
123
+ A dedicated `@meta` parallel slot lets routes define metadata separately from
124
+ their handler logic. The layout sets defaults via a title template, and each
125
+ route overrides via its own `@meta` slot. Since child segments push after
126
+ parents and `collectMeta` uses last-wins deduplication, overrides work
127
+ naturally.
128
+
129
+ ```typescript
130
+ // Layout sets defaults
131
+ layout((ctx) => {
132
+ ctx.use(Meta)({ title: { template: "%s | Store", default: "Store" } });
133
+ return <StoreLayout />;
134
+ }, () => [
135
+ // Route with @meta override — decoupled from handler rendering
136
+ path("/:slug", ProductPage, { name: "product" }, () => [
137
+ parallel({
138
+ "@meta": async (ctx) => {
139
+ const product = await ctx.use(ProductLoader);
140
+ const meta = ctx.use(Meta);
141
+ meta({ title: product.name });
142
+ meta({ name: "description", content: product.description });
143
+ meta({
144
+ "script:ld+json": {
145
+ "@context": "https://schema.org",
146
+ "@type": "Product",
147
+ name: product.name,
148
+ description: product.description,
149
+ },
150
+ });
151
+ return null; // UI-less slot
152
+ },
153
+ }),
154
+ ]),
155
+ ])
156
+ ```
157
+
158
+ This keeps the route handler focused on rendering UI while metadata
159
+ (title, description, Open Graph, JSON-LD) lives in a composable slot that
160
+ can be added, removed, or swapped per route without touching the handler.
161
+
95
162
  ## Parallel Routes with Loaders
96
163
 
97
164
  Add loaders and loading states to parallel routes:
@@ -181,6 +181,37 @@ String keys still work (`ctx.set("key", value)` / `ctx.get("key")`), but
181
181
  Only route handlers and middleware can call `ctx.set()`. Layouts, parallels,
182
182
  and intercepts can only read via `ctx.get()`.
183
183
 
184
+ #### Non-cacheable context variables
185
+
186
+ Mark a var as non-cacheable when it holds inherently request-specific data
187
+ (sessions, auth tokens, per-request IDs). There are two ways:
188
+
189
+ ```typescript
190
+ // Var-level: every value written to this var is non-cacheable
191
+ const Session = createVar<SessionData>({ cache: false });
192
+
193
+ // Write-level: escalate a normally-cacheable var for this specific write
194
+ const Theme = createVar<string>();
195
+ ctx.set(Theme, userTheme, { cache: false });
196
+ ```
197
+
198
+ "Least cacheable wins" — if either the var definition or the write site says
199
+ `cache: false`, the value is non-cacheable.
200
+
201
+ Reading a non-cacheable var inside `cache()` or `"use cache"` throws at
202
+ runtime. This prevents request-specific data from leaking into cached output:
203
+
204
+ ```typescript
205
+ // This throws — Session is non-cacheable
206
+ async function CachedWidget(ctx) {
207
+ "use cache";
208
+ const session = ctx.get(Session); // Error: non-cacheable var read inside cache scope
209
+ return <Widget />;
210
+ }
211
+ ```
212
+
213
+ Cacheable vars (the default) can be read freely inside cache scopes.
214
+
184
215
  ### Revalidation Contracts for Handler Data
185
216
 
186
217
  Handler-first guarantees apply within a single full render pass. For partial
@@ -369,8 +369,18 @@ interface PaginationData {
369
369
  perPage: number;
370
370
  }
371
371
  export const Pagination = createVar<PaginationData>();
372
+
373
+ // Non-cacheable var — reading inside cache() or "use cache" throws at runtime
374
+ const Session = createVar<SessionData>({ cache: false });
372
375
  ```
373
376
 
377
+ `createVar` accepts an optional options object. The `cache` option (default
378
+ `true`) controls whether the var's values can be read inside cache scopes.
379
+ Write-level escalation is also supported: `ctx.set(Var, value, { cache: false })`
380
+ marks a specific write as non-cacheable even if the var itself is cacheable.
381
+ "Least cacheable wins" — if either says `cache: false`, the value throws on
382
+ read inside `cache()` or `"use cache"`.
383
+
374
384
  ### Producer (handler or middleware)
375
385
 
376
386
  ```typescript
@@ -259,6 +259,17 @@ export function createPartialUpdater(
259
259
  existingSegments,
260
260
  );
261
261
 
262
+ // Fix: tx.commit() cached the source page's handleData because
263
+ // eventController hasn't been updated yet. Overwrite with the
264
+ // correct cached handleData to prevent cache corruption on
265
+ // subsequent navigations to this same URL.
266
+ if (mode.targetCacheHandleData) {
267
+ store.updateCacheHandleData(
268
+ store.getHistoryKey(),
269
+ mode.targetCacheHandleData,
270
+ );
271
+ }
272
+
262
273
  // Include cachedHandleData in metadata so NavigationProvider can restore
263
274
  // breadcrumbs and other handle data from cache.
264
275
  // Remove `handles` from metadata to prevent NavigationProvider from
@@ -5,21 +5,19 @@
5
5
  * Hover prefetches bypass this queue — they fire directly for immediate response
6
6
  * to user intent.
7
7
  *
8
- * Draining is deferred to the next animation frame so prefetch network activity
9
- * never blocks paint. This applies to both the initial batch and subsequent
10
- * batches every drain cycle yields to the browser first.
8
+ * Draining waits for an idle main-thread moment and for viewport images to
9
+ * finish loading, so prefetch fetch() calls never compete with critical
10
+ * resources for the browser's connection pool.
11
11
  *
12
12
  * When a navigation starts, queued prefetches are cancelled but executing ones
13
13
  * are left running. Navigation can reuse their in-flight responses via the
14
14
  * prefetch cache's inflight promise map, avoiding duplicate requests.
15
15
  */
16
16
 
17
- const MAX_CONCURRENT = 2;
17
+ import { wait, waitForIdle, waitForViewportImages } from "./resource-ready.js";
18
18
 
19
- const deferToNextPaint: (fn: () => void) => void =
20
- typeof requestAnimationFrame === "function"
21
- ? requestAnimationFrame
22
- : (fn) => setTimeout(fn, 0);
19
+ const MAX_CONCURRENT = 2;
20
+ const IMAGE_WAIT_TIMEOUT = 2000;
23
21
 
24
22
  let active = 0;
25
23
  const queue: Array<{
@@ -28,8 +26,9 @@ const queue: Array<{
28
26
  }> = [];
29
27
  const queued = new Set<string>();
30
28
  const executing = new Set<string>();
31
- let abortController: AbortController | null = null;
29
+ const abortControllers = new Map<string, AbortController>();
32
30
  let drainScheduled = false;
31
+ let drainGeneration = 0;
33
32
 
34
33
  function startExecution(
35
34
  key: string,
@@ -37,8 +36,10 @@ function startExecution(
37
36
  ): void {
38
37
  active++;
39
38
  executing.add(key);
40
- abortController ??= new AbortController();
41
- execute(abortController.signal).finally(() => {
39
+ const ac = new AbortController();
40
+ abortControllers.set(key, ac);
41
+ execute(ac.signal).finally(() => {
42
+ abortControllers.delete(key);
42
43
  // Only decrement if this key wasn't already cleared by cancelAllPrefetches.
43
44
  // Without this guard, cancelled tasks' .finally() would underflow active
44
45
  // below zero, breaking the MAX_CONCURRENT guarantee.
@@ -50,18 +51,32 @@ function startExecution(
50
51
  }
51
52
 
52
53
  /**
53
- * Schedule a drain on the next animation frame.
54
- * Coalesces multiple drain requests into a single rAF callback so
55
- * batch completion doesn't schedule redundant frames.
54
+ * Schedule a drain after the browser is idle and viewport images are loaded.
55
+ * Coalesces multiple drain requests into a single deferred callback so
56
+ * batch completion doesn't schedule redundant waits.
57
+ *
58
+ * The two-step wait ensures prefetch fetch() calls don't compete with
59
+ * images for the browser's connection pool:
60
+ * 1. waitForIdle — yield until the main thread has a quiet moment
61
+ * 2. waitForViewportImages OR 2s timeout — yield until visible images
62
+ * finish loading, but don't let slow/broken images block indefinitely
56
63
  */
57
64
  function scheduleDrain(): void {
58
65
  if (drainScheduled) return;
59
66
  if (active >= MAX_CONCURRENT || queue.length === 0) return;
60
67
  drainScheduled = true;
61
- deferToNextPaint(() => {
62
- drainScheduled = false;
63
- drain();
64
- });
68
+ const gen = drainGeneration;
69
+ waitForIdle()
70
+ .then(() =>
71
+ Promise.race([waitForViewportImages(), wait(IMAGE_WAIT_TIMEOUT)]),
72
+ )
73
+ .then(() => {
74
+ drainScheduled = false;
75
+ // Stale drain: a cancel/abort happened while we were waiting.
76
+ // A fresh scheduleDrain will be called by whatever enqueues next.
77
+ if (gen !== drainGeneration) return;
78
+ if (queue.length > 0) drain();
79
+ });
65
80
  }
66
81
 
67
82
  function drain(): void {
@@ -74,9 +89,10 @@ function drain(): void {
74
89
 
75
90
  /**
76
91
  * Enqueue a prefetch for concurrency-limited execution.
77
- * Execution is always deferred to the next animation frame to avoid
78
- * blocking paint, even when below the concurrency limit.
79
- * Deduplicates by key — items already queued or executing are skipped.
92
+ * Execution is deferred until the browser is idle and viewport images
93
+ * have finished loading, so prefetches never compete with critical
94
+ * resources. Deduplicates by key — items already queued or executing
95
+ * are skipped.
80
96
  *
81
97
  * The executor receives an AbortSignal that is aborted when
82
98
  * cancelAllPrefetches() is called (e.g. on navigation start).
@@ -93,19 +109,32 @@ export function enqueuePrefetch(
93
109
  }
94
110
 
95
111
  /**
96
- * Cancel queued prefetches. Executing prefetches are left running so
97
- * navigation can reuse their in-flight responses (checked via
98
- * consumeInflightPrefetch in the prefetch cache). With MAX_CONCURRENT=2
99
- * and priority: "low", in-flight prefetches don't meaningfully compete
100
- * with navigation fetches under HTTP/2 multiplexing.
112
+ * Cancel queued prefetches and abort in-flight ones that don't match
113
+ * the current navigation target. If `keepUrl` is provided, the
114
+ * executing prefetch whose key contains that URL is kept alive so
115
+ * navigation can reuse its response via consumeInflightPrefetch.
101
116
  *
102
117
  * Called when a navigation starts via the NavigationProvider's
103
118
  * event controller subscription.
104
119
  */
105
- export function cancelAllPrefetches(): void {
120
+ export function cancelAllPrefetches(keepUrl?: string | null): void {
106
121
  queue.length = 0;
107
122
  queued.clear();
108
123
  drainScheduled = false;
124
+ drainGeneration++;
125
+
126
+ // Abort in-flight prefetches that aren't for the navigation target.
127
+ // Keys use format "sourceHref\0targetPathname+search" — match the
128
+ // target portion (after \0) against keepUrl.
129
+ for (const [key, ac] of abortControllers) {
130
+ const target = key.split("\0")[1];
131
+ if (keepUrl && target && keepUrl.startsWith(target)) continue;
132
+ ac.abort();
133
+ abortControllers.delete(key);
134
+ if (executing.delete(key)) {
135
+ active--;
136
+ }
137
+ }
109
138
  }
110
139
 
111
140
  /**
@@ -114,8 +143,10 @@ export function cancelAllPrefetches(): void {
114
143
  * in-flight responses would be stale.
115
144
  */
116
145
  export function abortAllPrefetches(): void {
117
- abortController?.abort();
118
- abortController = null;
146
+ for (const ac of abortControllers.values()) {
147
+ ac.abort();
148
+ }
149
+ abortControllers.clear();
119
150
 
120
151
  queue.length = 0;
121
152
  queued.clear();
@@ -125,4 +156,5 @@ export function abortAllPrefetches(): void {
125
156
  executing.clear();
126
157
  active = 0;
127
158
  drainScheduled = false;
159
+ drainGeneration++;
128
160
  }
@@ -0,0 +1,77 @@
1
+ /**
2
+ * Resource Readiness
3
+ *
4
+ * Utilities to defer speculative prefetches until critical resources
5
+ * (viewport images) have finished loading. Prevents prefetch fetch()
6
+ * calls from competing with images for the browser's connection pool.
7
+ */
8
+
9
+ /**
10
+ * Resolve when all in-viewport images have finished loading.
11
+ * Returns immediately if no images are pending.
12
+ *
13
+ * Only checks images that exist at call time — does not observe
14
+ * dynamically added images. For SPA navigations where new images
15
+ * appear after render, call this after the navigation settles.
16
+ */
17
+ export function waitForViewportImages(): Promise<void> {
18
+ if (typeof document === "undefined") return Promise.resolve();
19
+
20
+ const pending = Array.from(document.querySelectorAll("img")).filter((img) => {
21
+ if (img.complete) return false;
22
+ const rect = img.getBoundingClientRect();
23
+ return (
24
+ rect.bottom > 0 &&
25
+ rect.right > 0 &&
26
+ rect.top < window.innerHeight &&
27
+ rect.left < window.innerWidth
28
+ );
29
+ });
30
+
31
+ if (pending.length === 0) return Promise.resolve();
32
+
33
+ return new Promise((resolve) => {
34
+ const settled = new Set<HTMLImageElement>();
35
+
36
+ const settle = (img: HTMLImageElement) => {
37
+ if (settled.has(img)) return;
38
+ settled.add(img);
39
+ if (settled.size >= pending.length) resolve();
40
+ };
41
+
42
+ for (const img of pending) {
43
+ img.addEventListener("load", () => settle(img), { once: true });
44
+ img.addEventListener("error", () => settle(img), { once: true });
45
+ // Re-check: image may have completed between the initial filter
46
+ // and listener attachment. settle() is idempotent per image, so
47
+ // a queued load event firing afterward is harmless.
48
+ if (img.complete) settle(img);
49
+ }
50
+ });
51
+ }
52
+
53
+ /**
54
+ * Resolve after the given number of milliseconds.
55
+ */
56
+ export function wait(ms: number): Promise<void> {
57
+ return new Promise((resolve) => setTimeout(resolve, ms));
58
+ }
59
+
60
+ /**
61
+ * Resolve when the browser has an idle main-thread moment.
62
+ * Uses requestIdleCallback where available, falls back to setTimeout.
63
+ *
64
+ * This is a scheduling hint, not an asset-loaded detector — combine
65
+ * with waitForViewportImages() for full resource readiness.
66
+ */
67
+ export function waitForIdle(timeout = 200): Promise<void> {
68
+ if (typeof window !== "undefined" && "requestIdleCallback" in window) {
69
+ return new Promise((resolve) => {
70
+ window.requestIdleCallback(() => resolve(), { timeout });
71
+ });
72
+ }
73
+
74
+ return new Promise((resolve) => {
75
+ setTimeout(resolve, 0);
76
+ });
77
+ }
@@ -289,15 +289,17 @@ export function NavigationProvider({
289
289
  };
290
290
  }, [warmupEnabled]);
291
291
 
292
- // Cancel speculative prefetches when navigation starts.
293
- // Viewport/render prefetches should not compete with navigation fetches.
292
+ // Cancel non-matching prefetches when navigation starts.
293
+ // Frees connections so the navigation fetch isn't competing with
294
+ // speculative prefetches. The prefetch matching the navigation target
295
+ // is kept alive so it can be reused via consumeInflightPrefetch.
294
296
  useEffect(() => {
295
297
  let wasIdle = true;
296
298
  const unsub = eventController.subscribe(() => {
297
299
  const state = eventController.getState();
298
300
  const isIdle = state.state === "idle" && !state.isStreaming;
299
301
  if (wasIdle && !isIdle) {
300
- cancelAllPrefetches();
302
+ cancelAllPrefetches(state.pendingUrl);
301
303
  }
302
304
  wasIdle = isIdle;
303
305
  });