@timber-js/app 0.2.0-alpha.6 → 0.2.0-alpha.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/dist/_chunks/{tracing-Cwn7697K.js → tracing-CemImE6h.js} +16 -2
  2. package/dist/_chunks/{tracing-Cwn7697K.js.map → tracing-CemImE6h.js.map} +1 -1
  3. package/dist/adapters/nitro.d.ts.map +1 -1
  4. package/dist/adapters/nitro.js.map +1 -1
  5. package/dist/cache/fast-hash.d.ts +22 -0
  6. package/dist/cache/fast-hash.d.ts.map +1 -0
  7. package/dist/cache/index.js +51 -9
  8. package/dist/cache/index.js.map +1 -1
  9. package/dist/cache/register-cached-function.d.ts.map +1 -1
  10. package/dist/cache/timber-cache.d.ts.map +1 -1
  11. package/dist/client/index.js.map +1 -1
  12. package/dist/client/link.d.ts.map +1 -1
  13. package/dist/client/router.d.ts.map +1 -1
  14. package/dist/client/segment-context.d.ts +1 -1
  15. package/dist/client/segment-context.d.ts.map +1 -1
  16. package/dist/client/segment-merger.d.ts.map +1 -1
  17. package/dist/client/stale-reload.d.ts.map +1 -1
  18. package/dist/client/top-loader.d.ts.map +1 -1
  19. package/dist/client/transition-root.d.ts +1 -1
  20. package/dist/client/transition-root.d.ts.map +1 -1
  21. package/dist/index.d.ts +2 -0
  22. package/dist/index.d.ts.map +1 -1
  23. package/dist/index.js +12 -6
  24. package/dist/index.js.map +1 -1
  25. package/dist/plugins/entries.d.ts.map +1 -1
  26. package/dist/plugins/fonts.d.ts.map +1 -1
  27. package/dist/server/action-client.d.ts.map +1 -1
  28. package/dist/server/index.js +9 -1
  29. package/dist/server/index.js.map +1 -1
  30. package/dist/server/pipeline.d.ts.map +1 -1
  31. package/dist/server/response-cache.d.ts +5 -4
  32. package/dist/server/response-cache.d.ts.map +1 -1
  33. package/dist/server/route-element-builder.d.ts.map +1 -1
  34. package/dist/server/rsc-entry/index.d.ts.map +1 -1
  35. package/dist/server/rsc-entry/rsc-payload.d.ts.map +1 -1
  36. package/dist/server/rsc-entry/rsc-stream.d.ts +6 -0
  37. package/dist/server/rsc-entry/rsc-stream.d.ts.map +1 -1
  38. package/dist/server/rsc-entry/ssr-renderer.d.ts.map +1 -1
  39. package/dist/server/tracing.d.ts +10 -0
  40. package/dist/server/tracing.d.ts.map +1 -1
  41. package/dist/server/waituntil-bridge.d.ts.map +1 -1
  42. package/package.json +1 -1
  43. package/src/adapters/nitro.ts +6 -1
  44. package/src/cache/fast-hash.ts +34 -0
  45. package/src/cache/register-cached-function.ts +7 -3
  46. package/src/cache/timber-cache.ts +17 -10
  47. package/src/client/browser-entry.ts +10 -6
  48. package/src/client/link.tsx +14 -9
  49. package/src/client/router.ts +4 -6
  50. package/src/client/segment-context.ts +6 -1
  51. package/src/client/segment-merger.ts +2 -8
  52. package/src/client/stale-reload.ts +4 -6
  53. package/src/client/top-loader.tsx +8 -7
  54. package/src/client/transition-root.tsx +7 -1
  55. package/src/index.ts +2 -0
  56. package/src/plugins/entries.ts +1 -0
  57. package/src/plugins/fonts.ts +24 -17
  58. package/src/server/action-client.ts +7 -1
  59. package/src/server/pipeline.ts +7 -0
  60. package/src/server/response-cache.ts +169 -36
  61. package/src/server/route-element-builder.ts +1 -6
  62. package/src/server/rsc-entry/index.ts +9 -1
  63. package/src/server/rsc-entry/rsc-payload.ts +42 -10
  64. package/src/server/rsc-entry/rsc-stream.ts +9 -5
  65. package/src/server/rsc-entry/ssr-renderer.ts +11 -8
  66. package/src/server/tracing.ts +23 -0
  67. package/src/server/waituntil-bridge.ts +4 -1
@@ -487,7 +487,14 @@ export function createPipeline(config: PipelineConfig): (req: Request) => Promis
487
487
  return new Response(null, { status: error.status });
488
488
  }
489
489
  // RedirectSignal leaked from render — honour the redirect.
490
+ // For RSC payload requests, return 204 + X-Timber-Redirect so the
491
+ // client router can perform a soft SPA redirect (same as middleware path).
490
492
  if (error instanceof RedirectSignal) {
493
+ const isRsc = (req.headers.get('Accept') ?? '').includes('text/x-component');
494
+ if (isRsc) {
495
+ responseHeaders.set('X-Timber-Redirect', error.location);
496
+ return new Response(null, { status: 204, headers: responseHeaders });
497
+ }
491
498
  responseHeaders.set('Location', error.location);
492
499
  return new Response(null, { status: error.status, headers: responseHeaders });
493
500
  }
@@ -10,10 +10,11 @@
10
10
  * re-executing the RSC-to-SSR pipeline. Entries have a short TTL
11
11
  * (default 5s) and the cache has a bounded size (default 150 entries).
12
12
  *
13
- * Cache keys are compound: method + pathname + isRscPayload. Responses
14
- * with Set-Cookie headers are never cached (they contain user-specific
15
- * state). When `publicOnly` is true (default), requests with Cookie or
16
- * Authorization headers bypass the cache entirely.
13
+ * Cache keys are compound: pathname + search + isRscPayload + Vary'd headers.
14
+ * Only GET requests are cached. Responses with Set-Cookie, Cache-Control:
15
+ * no-store/private, or error/redirect status codes are never cached.
16
+ * When `publicOnly` is true (default), requests with Cookie or Authorization
17
+ * headers bypass the cache entirely.
17
18
  *
18
19
  * See design/02-rendering-pipeline.md, design/31-benchmarking.md.
19
20
  */
@@ -65,16 +66,11 @@ interface CacheEntry {
65
66
  headers: [string, string][];
66
67
  /** Timestamp when this entry was created. */
67
68
  createdAt: number;
68
- }
69
-
70
- // ─── Singleflight Result ───────────────────────────────────────────────────
71
-
72
- /** Internal type: singleflight returns either a raw response or a cache entry. */
73
- interface SingleflightResult {
74
- /** Non-null when the response wasn't cacheable — only the first caller gets it. */
75
- response: Response | null;
76
- /** Non-null when the response was cached — all callers construct from this. */
77
- entry: CacheEntry | null;
69
+ /**
70
+ * The Vary header value from the original response, if any.
71
+ * Used to build variant-aware cache keys for subsequent requests.
72
+ */
73
+ vary: string | null;
78
74
  }
79
75
 
80
76
  // ─── LRU Cache ─────────────────────────────────────────────────────────────
@@ -154,6 +150,59 @@ export interface ResponseCache {
154
150
  clear(): void;
155
151
  }
156
152
 
153
+ // ─── Cache-Control parsing ─────────────────────────────────────────────────
154
+
155
+ /**
156
+ * Check if a Cache-Control header value contains directives that forbid
157
+ * storing the response in a shared cache. We check for `no-store` and
158
+ * `private` — both indicate the response must not be reused.
159
+ *
160
+ * This is intentionally simple: we don't parse `max-age`, `s-maxage`,
161
+ * `must-revalidate`, etc. This is a short-TTL render cache, not an HTTP
162
+ * cache — we just need to respect explicit "don't cache this" signals.
163
+ */
164
+ function hasCacheControlNoStore(headerValue: string | null): boolean {
165
+ if (!headerValue) return false;
166
+ // Split on comma, trim whitespace, check for no-store or private directives.
167
+ // Case-insensitive per HTTP spec.
168
+ const lower = headerValue.toLowerCase();
169
+ return lower.includes('no-store') || lower.includes('private');
170
+ }
171
+
172
+ // ─── Vary header handling ──────────────────────────────────────────────────
173
+
174
+ /**
175
+ * Parse a Vary header value into a sorted list of header names.
176
+ * Returns null if there is no Vary header or it's empty.
177
+ * Returns ['*'] if Vary: * (meaning the response varies on everything —
178
+ * effectively uncacheable).
179
+ */
180
+ function parseVaryHeader(headerValue: string | null): string[] | null {
181
+ if (!headerValue) return null;
182
+ const trimmed = headerValue.trim();
183
+ if (trimmed === '') return null;
184
+ if (trimmed === '*') return ['*'];
185
+
186
+ // Split on comma, normalize to lowercase, sort for deterministic keys
187
+ return trimmed
188
+ .split(',')
189
+ .map((h) => h.trim().toLowerCase())
190
+ .filter((h) => h.length > 0)
191
+ .sort();
192
+ }
193
+
194
+ /**
195
+ * Build a Vary-aware suffix for the cache key. For each header name in the
196
+ * Vary list, append the request's value for that header. This ensures that
197
+ * requests with different Accept-Language (for example) get different cache
198
+ * entries.
199
+ */
200
+ function buildVarySuffix(req: Request, varyHeaders: string[]): string {
201
+ return varyHeaders.map((h) => `${h}=${req.headers.get(h) ?? ''}`).join('&');
202
+ }
203
+
204
+ // ─── Factory ───────────────────────────────────────────────────────────────
205
+
157
206
  /**
158
207
  * Create a response cache with singleflight deduplication and LRU caching.
159
208
  */
@@ -161,7 +210,22 @@ export function createResponseCache(config: ResolvedResponseCacheConfig): Respon
161
210
  const lru = new LruCache(config.maxSize, config.ttlMs);
162
211
  const singleflight = createSingleflight();
163
212
 
213
+ /**
214
+ * Known Vary headers per path. When a response includes a Vary header,
215
+ * we store the parsed header names so that subsequent requests to the
216
+ * same path can include the Vary'd header values in their cache key
217
+ * BEFORE rendering (i.e., on cache lookup, not just after the first
218
+ * render). This avoids the "first request always misses" problem for
219
+ * Vary'd responses.
220
+ */
221
+ const knownVaryHeaders = new Map<string, string[]>();
222
+
164
223
  function buildCacheKey(req: Request, isRscPayload: boolean): string | null {
224
+ // Never cache non-GET requests. POST/PUT/DELETE have side effects and
225
+ // may carry per-request state (e.g., form flash data via ALS) that makes
226
+ // the rendered output unique even for the same URL.
227
+ if (req.method !== 'GET') return null;
228
+
165
229
  // When publicOnly is true, skip caching for authenticated requests
166
230
  if (config.publicOnly) {
167
231
  if (req.headers.has('Cookie') || req.headers.has('Authorization')) {
@@ -170,13 +234,30 @@ export function createResponseCache(config: ResolvedResponseCacheConfig): Respon
170
234
  }
171
235
 
172
236
  const url = new URL(req.url);
173
- return `${req.method}:${url.pathname}:${isRscPayload ? 'rsc' : 'html'}`;
237
+ // Include search params in the cache key. Pages that use searchParams
238
+ // (e.g., ?sort=asc, ?page=2) produce different output per query string.
239
+ let key = `${url.pathname}${url.search}:${isRscPayload ? 'rsc' : 'html'}`;
240
+
241
+ // If we've seen a Vary header for this path before, include the varied
242
+ // request header values in the key so different variants get different
243
+ // cache entries.
244
+ const pathKey = `${url.pathname}:${isRscPayload ? 'rsc' : 'html'}`;
245
+ const varyHeaders = knownVaryHeaders.get(pathKey);
246
+ if (varyHeaders) {
247
+ if (varyHeaders[0] === '*') {
248
+ // Vary: * means the response varies on everything — uncacheable
249
+ return null;
250
+ }
251
+ key += ':' + buildVarySuffix(req, varyHeaders);
252
+ }
253
+
254
+ return key;
174
255
  }
175
256
 
176
257
  /**
177
258
  * Check if a response is cacheable.
178
- * Responses with Set-Cookie headers are never cached — they contain
179
- * user-specific state that must not be shared across requests.
259
+ * Responses with Set-Cookie headers, Cache-Control: no-store/private,
260
+ * error/redirect status codes, or Vary: * are never cached.
180
261
  */
181
262
  function isCacheable(response: Response): boolean {
182
263
  // Don't cache error responses
@@ -188,6 +269,14 @@ export function createResponseCache(config: ResolvedResponseCacheConfig): Respon
188
269
  // Don't cache responses with Set-Cookie (user-specific state)
189
270
  if (response.headers.has('Set-Cookie')) return false;
190
271
 
272
+ // Respect Cache-Control: no-store and private directives.
273
+ // If the application explicitly says "don't cache this," we obey.
274
+ if (hasCacheControlNoStore(response.headers.get('Cache-Control'))) return false;
275
+
276
+ // Vary: * means the response varies on everything — don't cache
277
+ const vary = parseVaryHeader(response.headers.get('Vary'));
278
+ if (vary && vary[0] === '*') return false;
279
+
191
280
  // Only cache responses with a body
192
281
  if (!response.body) return false;
193
282
 
@@ -196,13 +285,27 @@ export function createResponseCache(config: ResolvedResponseCacheConfig): Respon
196
285
 
197
286
  /** Construct a fresh Response from a cache entry (each caller gets their own). */
198
287
  function responseFromEntry(entry: CacheEntry): Response {
288
+ // Null-body statuses (204, 304) cannot have a body per HTTP spec.
289
+ // The Response constructor throws if you pass a body with these statuses.
290
+ const isNullBody = entry.status === 204 || entry.status === 304;
199
291
  // slice(0) creates a copy so each caller owns their buffer
200
- return new Response(entry.body.slice(0), {
292
+ return new Response(isNullBody ? null : entry.body.slice(0), {
201
293
  status: entry.status,
202
294
  headers: entry.headers,
203
295
  });
204
296
  }
205
297
 
298
+ /**
299
+ * Record the Vary header from a response so future requests to the same
300
+ * path include varied header values in their cache key.
301
+ */
302
+ function recordVaryHeaders(pathKey: string, response: Response): void {
303
+ const vary = parseVaryHeader(response.headers.get('Vary'));
304
+ if (vary) {
305
+ knownVaryHeaders.set(pathKey, vary);
306
+ }
307
+ }
308
+
206
309
  return {
207
310
  async getOrRender(
208
311
  req: Request,
@@ -211,7 +314,9 @@ export function createResponseCache(config: ResolvedResponseCacheConfig): Respon
211
314
  ): Promise<Response> {
212
315
  const cacheKey = buildCacheKey(req, isRscPayload);
213
316
 
214
- // No cache key = skip caching entirely
317
+ // No cache key = skip caching and singleflight entirely.
318
+ // This covers POST requests, authenticated requests (publicOnly),
319
+ // and Vary: * responses.
215
320
  if (cacheKey === null) {
216
321
  return renderFn();
217
322
  }
@@ -223,18 +328,41 @@ export function createResponseCache(config: ResolvedResponseCacheConfig): Respon
223
328
  }
224
329
 
225
330
  // Singleflight: concurrent requests to the same key share one render.
226
- // The singleflight returns a SingleflightResult so all waiters
227
- // can construct their own Response from the same cached data.
228
- const result: SingleflightResult = await singleflight.do(cacheKey, async () => {
331
+ // We buffer the response body into an ArrayBuffer so ALL callers —
332
+ // including the singleflight leader get independent copies.
333
+ // This fixes the body-loss bug where the leader consumed the body
334
+ // and concurrent waiters got an empty response.
335
+ const result: CacheEntry | null = await singleflight.do(cacheKey, async () => {
229
336
  const response = await renderFn();
230
337
 
338
+ // Record Vary headers for future cache key construction
339
+ const url = new URL(req.url);
340
+ const pathKey = `${url.pathname}:${isRscPayload ? 'rsc' : 'html'}`;
341
+ recordVaryHeaders(pathKey, response);
342
+
231
343
  if (!isCacheable(response)) {
232
- return { response, entry: null };
344
+ // Buffer the body even for non-cacheable responses so the
345
+ // singleflight leader and all concurrent waiters each get
346
+ // an independent copy. Without this, the leader consumes
347
+ // the body stream and waiters get an empty response.
348
+ const body = await response.arrayBuffer();
349
+ const headers: [string, string][] = [];
350
+ response.headers.forEach((value, key) => {
351
+ headers.push([key, value]);
352
+ });
353
+ // Return as a CacheEntry shape but DON'T store in LRU.
354
+ // Callers construct Responses from this, but it won't be
355
+ // reused for future requests.
356
+ return {
357
+ body,
358
+ status: response.status,
359
+ headers,
360
+ createdAt: Date.now(),
361
+ vary: response.headers.get('Vary'),
362
+ };
233
363
  }
234
364
 
235
365
  // Buffer the response body for caching.
236
- // The original Response body is consumed here — callers get copies
237
- // from the cached ArrayBuffer.
238
366
  const body = await response.arrayBuffer();
239
367
  const headers: [string, string][] = [];
240
368
  response.headers.forEach((value, key) => {
@@ -246,24 +374,28 @@ export function createResponseCache(config: ResolvedResponseCacheConfig): Respon
246
374
  status: response.status,
247
375
  headers,
248
376
  createdAt: Date.now(),
377
+ vary: response.headers.get('Vary'),
249
378
  };
250
379
 
251
- lru.set(cacheKey, entry);
380
+ // Re-check the cache key now that we know the Vary headers.
381
+ // The initial key may not have included Vary'd header values
382
+ // if this was the first request to this path. Rebuild the key
383
+ // with the now-known Vary headers for correct LRU storage.
384
+ const updatedKey = buildCacheKey(req, isRscPayload);
385
+ if (updatedKey) {
386
+ lru.set(updatedKey, entry);
387
+ }
252
388
 
253
- return { response: null, entry };
389
+ return entry;
254
390
  });
255
391
 
256
- // Non-cacheable response only the first caller gets the original.
257
- // For singleflight, this means concurrent waiters get the same promise
258
- // result. The first caller already consumed the body, so subsequent
259
- // callers would get an empty body. This is acceptable: non-cacheable
260
- // responses (errors, redirects, Set-Cookie) are rare under concurrent
261
- // identical requests, and the status + headers are still correct.
262
- if (result.response) {
263
- return result.response;
392
+ if (result === null) {
393
+ // Shouldn't happen singleflight always returns a result.
394
+ // Defensive fallback: re-render.
395
+ return renderFn();
264
396
  }
265
397
 
266
- return responseFromEntry(result.entry!);
398
+ return responseFromEntry(result);
267
399
  },
268
400
 
269
401
  get size() {
@@ -272,6 +404,7 @@ export function createResponseCache(config: ResolvedResponseCacheConfig): Respon
272
404
 
273
405
  clear() {
274
406
  lru.clear();
407
+ knownVaryHeaders.clear();
275
408
  },
276
409
  };
277
410
  }
@@ -352,12 +352,7 @@ export async function buildRouteElement(
352
352
  // same urlPath (e.g., /(marketing) and /(app) both have "/"),
353
353
  // which would cause the wrong cached layout to be reused
354
354
  const skip =
355
- shouldSkipSegment(
356
- segment.urlPath,
357
- layoutComponent,
358
- isLeaf,
359
- clientStateTree ?? null
360
- ) &&
355
+ shouldSkipSegment(segment.urlPath, layoutComponent, isLeaf, clientStateTree ?? null) &&
361
356
  hasRenderedLayoutBelow &&
362
357
  segment.segmentType !== 'group';
363
358
 
@@ -368,7 +368,8 @@ async function renderRoute(
368
368
  throw error;
369
369
  }
370
370
 
371
- const { element, headElements, layoutComponents, deferSuspenseFor, skippedSegments } = routeResult;
371
+ const { element, headElements, layoutComponents, deferSuspenseFor, skippedSegments } =
372
+ routeResult;
372
373
 
373
374
  // Build head HTML for injection into the SSR output.
374
375
  // Collects CSS, fonts, and modulepreload from the build manifest for matched segments.
@@ -385,6 +386,13 @@ async function renderRoute(
385
386
  headHtml += buildCssLinkTags(cssUrls);
386
387
  }
387
388
 
389
+ // Inject font CSS stylesheet — pure CSS, no JS needed.
390
+ // The URL is set by the timber-fonts plugin when fonts are registered.
391
+ const fontCssUrl = (config as { fontCssUrl?: string | null }).fontCssUrl;
392
+ if (fontCssUrl) {
393
+ headHtml += `<link rel="stylesheet" href="${fontCssUrl}">`;
394
+ }
395
+
388
396
  const fontEntries = collectRouteFonts(segments, typedManifest);
389
397
  if (fontEntries.length > 0) {
390
398
  headHtml += buildFontPreloadTags(fontEntries);
@@ -45,18 +45,45 @@ export async function buildRscPayloadResponse(
45
45
  skippedSegments?: string[]
46
46
  ): Promise<Response> {
47
47
  // Read the first chunk from the RSC stream before committing headers.
48
+ // Race the first read against signal detection — if an async component
49
+ // throws a RedirectSignal or DenySignal, the onError callback fires
50
+ // signals.onSignal() and we can react immediately without waiting for
51
+ // the full macrotask queue.
52
+ //
53
+ // The rejection chain for an async-wrapped page component:
54
+ // 1. PageComponent throws RedirectSignal
55
+ // 2. withSpan catches and re-throws (microtask 1)
56
+ // 3. TracedPage promise rejects (microtask 2)
57
+ // 4. React Flight rejection handler → onError (microtask 3+)
58
+ //
59
+ // Promise.race reacts the instant onError fires, eliminating the
60
+ // per-request setTimeout(0) macrotask delay for the common case
61
+ // (no signal). A 50ms ceiling timeout guards against edge cases
62
+ // where onError never fires.
48
63
  const reader = rscStream.getReader();
49
- const firstRead = await reader.read();
64
+ const signalDetected = new Promise<void>((resolve) => {
65
+ signals.onSignal = resolve;
66
+ });
50
67
 
51
- // Yield to the microtask queue so that async component rejections
52
- // (e.g. an async-wrapped page component that throws redirect())
53
- // propagate to the onError callback before we check the signals.
54
- // The rejected Promise from an async component resolves in the next
55
- // microtask after read(), so we need at least one tick.
56
- //
57
- // Uses queueMicrotask instead of setTimeout(0) to stay within the
58
- // same tick — no full event loop round-trip needed.
59
- await new Promise<void>((r) => queueMicrotask(r));
68
+ type RaceResult =
69
+ | { type: 'data'; chunk: ReadableStreamReadResult<Uint8Array> }
70
+ | { type: 'signal' };
71
+
72
+ const first: RaceResult = await Promise.race([
73
+ reader.read().then((chunk) => ({ type: 'data' as const, chunk })),
74
+ signalDetected.then(() => ({ type: 'signal' as const })),
75
+ ]);
76
+
77
+ // If data arrived first, still check signals — they may have fired
78
+ // concurrently. Also do a final ceiling timeout check for edge cases
79
+ // where the signal fires just after the first read resolves.
80
+ if (first.type === 'data' && !signals.redirectSignal && !signals.denySignal) {
81
+ // Brief yield to let any in-flight microtask rejections complete.
82
+ await new Promise<void>((r) => setTimeout(r, 0));
83
+ }
84
+
85
+ // Detach the callback — no longer needed after this point.
86
+ signals.onSignal = undefined;
60
87
 
61
88
  // Check for redirect/deny signals detected during initial rendering
62
89
  const trackedRedirect = signals.redirectSignal as RedirectSignal | null;
@@ -75,6 +102,11 @@ export async function buildRscPayloadResponse(
75
102
  );
76
103
  }
77
104
 
105
+ // Extract the first chunk from the race result.
106
+ // If the signal won the race, read the first chunk now (the stream
107
+ // was already cancelled above, but we need a firstRead shape below).
108
+ const firstRead = first.type === 'data' ? first.chunk : await reader.read();
109
+
78
110
  // Reconstruct the stream: prepend the buffered first chunk,
79
111
  // then continue piping from the original reader.
80
112
  const patchedStream = new ReadableStream<Uint8Array>({
@@ -24,11 +24,17 @@ import { isDebug } from '#/server/debug.js';
24
24
  *
25
25
  * Signals fire asynchronously via `onError` during stream consumption.
26
26
  * The first signal of each type wins — subsequent signals are ignored.
27
+ *
28
+ * `onSignal` is an optional callback fired when a DenySignal or
29
+ * RedirectSignal is captured. Consumers use it with Promise.race to
30
+ * react immediately instead of polling with setTimeout/queueMicrotask.
27
31
  */
28
32
  export interface RenderSignals {
29
33
  denySignal: DenySignal | null;
30
34
  redirectSignal: RedirectSignal | null;
31
35
  renderError: { error: unknown; status: number } | null;
36
+ /** Callback fired when a redirect or deny signal is captured in onError. */
37
+ onSignal?: () => void;
32
38
  }
33
39
 
34
40
  export interface RscStreamResult {
@@ -67,11 +73,13 @@ export function renderRscStream(element: React.ReactElement, req: Request): RscS
67
73
  if (isAbortError(error) || req.signal?.aborted) return;
68
74
  if (error instanceof DenySignal) {
69
75
  signals.denySignal = error;
76
+ signals.onSignal?.();
70
77
  // Return structured digest for client-side error boundaries
71
78
  return JSON.stringify({ type: 'deny', status: error.status, data: error.data });
72
79
  }
73
80
  if (error instanceof RedirectSignal) {
74
81
  signals.redirectSignal = error;
82
+ signals.onSignal?.();
75
83
  return JSON.stringify({
76
84
  type: 'redirect',
77
85
  location: error.location,
@@ -98,11 +106,7 @@ export function renderRscStream(element: React.ReactElement, req: Request): RscS
98
106
  // directive isn't at the very top of the file, or the component is
99
107
  // re-exported through a barrel file without 'use client'.
100
108
  // See LOCAL-297.
101
- if (
102
- isDebug() &&
103
- error instanceof Error &&
104
- error.message.includes('Invalid hook call')
105
- ) {
109
+ if (isDebug() && error instanceof Error && error.message.includes('Invalid hook call')) {
106
110
  console.error(
107
111
  '[timber] A React hook was called during RSC rendering. This usually means a ' +
108
112
  "'use client' component is being executed as a server component instead of " +
@@ -156,16 +156,19 @@ export async function renderSsrResponse(opts: SsrRenderOptions): Promise<Respons
156
156
  try {
157
157
  const ssrResponse = await callSsr(ssrStream, navContext);
158
158
 
159
- // Signal promotion: yield one microtask so async component rejections
160
- // propagate to the RSC onError callback, then check if any signals
161
- // were captured during rendering inside Suspense boundaries.
162
- // The Response hasn't been sent yet — it's an unconsumed stream.
159
+ // Signal promotion: check if any signals were captured during rendering
160
+ // inside Suspense boundaries. If no signals are present yet, yield one
161
+ // microtask so async component rejections propagate to the RSC onError
162
+ // callback before we commit the response.
163
163
  //
164
- // Uses queueMicrotask instead of setTimeout(0) to avoid yielding to
165
- // the full event loop (timers phase). Microtask resolution happens
166
- // within the same tick, eliminating per-request idle time under load.
164
+ // When signals are already captured (onSignal already fired), skip the
165
+ // yield entirely react immediately. Uses queueMicrotask instead of
166
+ // setTimeout(0) for the fallback to avoid yielding to the full event
167
+ // loop (timers phase).
167
168
  // See design/05-streaming.md §"deferSuspenseFor and the Hold Window"
168
- await new Promise<void>((r) => queueMicrotask(r));
169
+ if (!signals.redirectSignal && !signals.denySignal && !signals.renderError) {
170
+ await new Promise<void>((r) => queueMicrotask(r));
171
+ }
169
172
 
170
173
  const promoted = checkCapturedSignals(/* skipHandledDeny */ true);
171
174
  if (promoted) {
@@ -252,6 +252,29 @@ export async function addSpanEvent(
252
252
  }
253
253
  }
254
254
 
255
+ /**
256
+ * Fire-and-forget span event — no await, no microtask overhead.
257
+ *
258
+ * Used on the cache hot path where awaiting addSpanEvent creates an
259
+ * unnecessary microtask per cache operation. If OTEL is not loaded yet,
260
+ * the event is silently dropped (acceptable for diagnostics).
261
+ *
262
+ * See TIM-370 for perf motivation.
263
+ */
264
+ export function addSpanEventSync(
265
+ name: string,
266
+ attributes?: Record<string, string | number | boolean>
267
+ ): void {
268
+ // Fast path: if OTEL API hasn't been loaded yet, skip entirely.
269
+ // _otelApi is undefined (not yet loaded), null (failed to load), or the module.
270
+ if (!_otelApi) return;
271
+
272
+ const activeSpan = _otelApi.trace.getActiveSpan();
273
+ if (activeSpan) {
274
+ activeSpan.addEvent(name, attributes);
275
+ }
276
+ }
277
+
255
278
  /**
256
279
  * Try to extract the OTEL trace ID from the current active span context.
257
280
  * Returns undefined if OTEL is not active or no span exists.
@@ -18,7 +18,10 @@ import { waitUntilAls } from './als-registry.js';
18
18
  * Called by generated entry points (Nitro node-server/bun, Cloudflare)
19
19
  * to bind the platform's lifecycle extension for the request duration.
20
20
  */
21
- export function runWithWaitUntil<T>(waitUntilFn: (promise: Promise<unknown>) => void, fn: () => T): T {
21
+ export function runWithWaitUntil<T>(
22
+ waitUntilFn: (promise: Promise<unknown>) => void,
23
+ fn: () => T
24
+ ): T {
22
25
  return waitUntilAls.run(waitUntilFn, fn);
23
26
  }
24
27