veryfront 0.0.80 → 0.0.82

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/esm/deno.js +1 -1
  2. package/esm/src/cache/backend.d.ts +20 -0
  3. package/esm/src/cache/backend.d.ts.map +1 -1
  4. package/esm/src/cache/backend.js +57 -0
  5. package/esm/src/cache/hash.d.ts +107 -0
  6. package/esm/src/cache/hash.d.ts.map +1 -0
  7. package/esm/src/cache/hash.js +166 -0
  8. package/esm/src/cache/index.d.ts +3 -0
  9. package/esm/src/cache/index.d.ts.map +1 -1
  10. package/esm/src/cache/index.js +3 -0
  11. package/esm/src/cache/module-cache.d.ts +82 -0
  12. package/esm/src/cache/module-cache.d.ts.map +1 -0
  13. package/esm/src/cache/module-cache.js +214 -0
  14. package/esm/src/cache/multi-tier.d.ts +177 -0
  15. package/esm/src/cache/multi-tier.d.ts.map +1 -0
  16. package/esm/src/cache/multi-tier.js +352 -0
  17. package/esm/src/cli/templates/integration-loader.d.ts.map +1 -1
  18. package/esm/src/cli/templates/integration-loader.js +2 -4
  19. package/esm/src/modules/react-loader/ssr-module-loader/loader.d.ts.map +1 -1
  20. package/esm/src/modules/react-loader/ssr-module-loader/loader.js +121 -14
  21. package/esm/src/observability/tracing/span-names.d.ts +2 -0
  22. package/esm/src/observability/tracing/span-names.d.ts.map +1 -1
  23. package/esm/src/observability/tracing/span-names.js +2 -0
  24. package/esm/src/rendering/orchestrator/module-loader/cache.d.ts +10 -2
  25. package/esm/src/rendering/orchestrator/module-loader/cache.d.ts.map +1 -1
  26. package/esm/src/rendering/orchestrator/module-loader/cache.js +11 -6
  27. package/esm/src/rendering/orchestrator/module-loader/index.d.ts.map +1 -1
  28. package/esm/src/rendering/orchestrator/module-loader/index.js +72 -77
  29. package/esm/src/transforms/esm/http-cache.d.ts.map +1 -1
  30. package/esm/src/transforms/esm/http-cache.js +6 -29
  31. package/esm/src/transforms/esm/transform-cache.d.ts +25 -0
  32. package/esm/src/transforms/esm/transform-cache.d.ts.map +1 -1
  33. package/esm/src/transforms/esm/transform-cache.js +45 -0
  34. package/esm/src/transforms/mdx/esm-module-loader/module-fetcher/index.d.ts.map +1 -1
  35. package/esm/src/transforms/mdx/esm-module-loader/module-fetcher/index.js +2 -36
  36. package/esm/src/utils/constants/cache.d.ts +4 -0
  37. package/esm/src/utils/constants/cache.d.ts.map +1 -1
  38. package/esm/src/utils/constants/cache.js +14 -1
  39. package/package.json +1 -1
  40. package/src/deno.js +1 -1
  41. package/src/src/cache/backend.ts +62 -0
  42. package/src/src/cache/hash.ts +205 -0
  43. package/src/src/cache/index.ts +3 -0
  44. package/src/src/cache/module-cache.ts +252 -0
  45. package/src/src/cache/multi-tier.ts +503 -0
  46. package/src/src/cli/templates/integration-loader.ts +2 -8
  47. package/src/src/modules/react-loader/ssr-module-loader/loader.ts +137 -18
  48. package/src/src/observability/tracing/span-names.ts +2 -0
  49. package/src/src/rendering/orchestrator/module-loader/cache.ts +14 -8
  50. package/src/src/rendering/orchestrator/module-loader/index.ts +94 -89
  51. package/src/src/transforms/esm/http-cache.ts +12 -32
  52. package/src/src/transforms/esm/transform-cache.ts +53 -0
  53. package/src/src/transforms/mdx/esm-module-loader/module-fetcher/index.ts +2 -40
  54. package/src/src/utils/constants/cache.ts +21 -1
@@ -0,0 +1,503 @@
1
+ /**
2
+ * Multi-Tier Cache Abstraction
3
+ *
4
+ * Generic implementation for L1 → L2 → L3 cache flows with automatic backfill.
5
+ * This provides consistent caching behavior across the codebase:
6
+ *
7
+ * - L1: In-memory (fastest, per-pod, lost on restart)
8
+ * - L2: Local disk (fast, per-pod, survives restart)
9
+ * - L3: Distributed (Redis/API, cross-pod, shared state)
10
+ *
11
+ * When a cache hit occurs at a lower tier (e.g., L3), the value is automatically
12
+ * backfilled to higher tiers (L1, L2) for faster subsequent access.
13
+ *
14
+ * @module cache/multi-tier
15
+ */
16
+
17
+ import { rendererLogger as logger } from "../utils/index.js";
18
+ import { withSpan } from "../observability/tracing/otlp-setup.js";
19
+ import { SpanNames } from "../observability/tracing/span-names.js";
20
+ import type { Span } from "@opentelemetry/api";
21
+
22
+ /**
23
+ * Generic cache tier interface.
24
+ * Each tier implements async get/set operations.
25
+ */
26
+ export interface CacheTier<T = string> {
27
+ /** Tier name for logging/debugging */
28
+ readonly name: string;
29
+
30
+ /** Get a value from this tier */
31
+ get(key: string): Promise<T | null>;
32
+
33
+ /** Set a value in this tier */
34
+ set(key: string, value: T, ttlSeconds?: number): Promise<void>;
35
+
36
+ /** Delete a value from this tier */
37
+ delete?(key: string): Promise<void>;
38
+
39
+ /** Check if key exists (optional, uses get if not implemented) */
40
+ has?(key: string): Promise<boolean>;
41
+
42
+ /** Get multiple values (optional batch operation) */
43
+ getBatch?(keys: string[]): Promise<Map<string, T | null>>;
44
+
45
+ /** Set multiple values (optional batch operation) */
46
+ setBatch?(entries: Array<{ key: string; value: T; ttl?: number }>): Promise<void>;
47
+ }
48
+
49
+ /**
50
+ * Configuration for multi-tier cache.
51
+ */
52
+ export interface MultiTierCacheConfig<T = string> {
53
+ /** Cache name for logging */
54
+ name: string;
55
+
56
+ /** L1: Memory tier (optional) */
57
+ l1?: CacheTier<T>;
58
+
59
+ /** L2: Disk tier (optional) */
60
+ l2?: CacheTier<T>;
61
+
62
+ /** L3: Distributed tier (optional) */
63
+ l3?: CacheTier<T>;
64
+
65
+ /** Default TTL in seconds for set operations */
66
+ defaultTtlSeconds?: number;
67
+
68
+ /** Whether to backfill higher tiers on lower-tier hits (default: true) */
69
+ backfillOnHit?: boolean;
70
+
71
+ /** Whether to use fire-and-forget for backfill operations (default: true) */
72
+ asyncBackfill?: boolean;
73
+ }
74
+
75
+ /**
76
+ * Cache hit statistics.
77
+ */
78
+ export interface CacheStats {
79
+ /** Total get operations */
80
+ gets: number;
81
+
82
+ /** Hits at each tier */
83
+ l1Hits: number;
84
+ l2Hits: number;
85
+ l3Hits: number;
86
+
87
+ /** Total misses (no tier had the value) */
88
+ misses: number;
89
+
90
+ /** Set operations */
91
+ sets: number;
92
+
93
+ /** Backfill operations triggered */
94
+ backfills: number;
95
+ }
96
+
97
+ /**
98
+ * Multi-tier cache implementation.
99
+ *
100
+ * Provides automatic fallthrough from L1 → L2 → L3 with backfill on hits.
101
+ *
102
+ * @example
103
+ * ```typescript
104
+ * const cache = new MultiTierCache({
105
+ * name: "http-module",
106
+ * l1: new MemoryTier(),
107
+ * l3: await CacheBackends.httpModule(),
108
+ * defaultTtlSeconds: 86400,
109
+ * });
110
+ *
111
+ * const value = await cache.get("my-key");
112
+ * // If found in L3, automatically backfills L1
113
+ * ```
114
+ */
115
+ export class MultiTierCache<T = string> {
116
+ private readonly config:
117
+ & Required<
118
+ Omit<MultiTierCacheConfig<T>, "l1" | "l2" | "l3">
119
+ >
120
+ & Pick<MultiTierCacheConfig<T>, "l1" | "l2" | "l3">;
121
+
122
+ private stats: CacheStats = {
123
+ gets: 0,
124
+ l1Hits: 0,
125
+ l2Hits: 0,
126
+ l3Hits: 0,
127
+ misses: 0,
128
+ sets: 0,
129
+ backfills: 0,
130
+ };
131
+
132
+ constructor(config: MultiTierCacheConfig<T>) {
133
+ this.config = {
134
+ defaultTtlSeconds: 300,
135
+ backfillOnHit: true,
136
+ asyncBackfill: true,
137
+ ...config,
138
+ };
139
+ }
140
+
141
+ /**
142
+ * Get a value from the cache.
143
+ *
144
+ * Checks tiers in order: L1 → L2 → L3.
145
+ * On hit at a lower tier, backfills higher tiers.
146
+ */
147
+ get(key: string): Promise<T | null> {
148
+ return withSpan(
149
+ SpanNames.CACHE_MULTI_TIER_GET,
150
+ async (span?: Span) => {
151
+ this.stats.gets++;
152
+ span?.setAttribute("cache.name", this.config.name);
153
+ span?.setAttribute("cache.key", key);
154
+
155
+ // L1: Memory
156
+ if (this.config.l1) {
157
+ try {
158
+ const value = await this.config.l1.get(key);
159
+ if (value !== null) {
160
+ this.stats.l1Hits++;
161
+ span?.setAttribute("cache.hit_tier", "l1");
162
+ logger.debug(`[${this.config.name}] L1 hit`, { key });
163
+ return value;
164
+ }
165
+ } catch (error) {
166
+ logger.debug(`[${this.config.name}] L1 get error`, { key, error });
167
+ }
168
+ }
169
+
170
+ // L2: Disk
171
+ if (this.config.l2) {
172
+ try {
173
+ const value = await this.config.l2.get(key);
174
+ if (value !== null) {
175
+ this.stats.l2Hits++;
176
+ span?.setAttribute("cache.hit_tier", "l2");
177
+ logger.debug(`[${this.config.name}] L2 hit`, { key });
178
+ const backfillPromise = this.backfill(key, value, ["l1"]);
179
+ if (this.config.asyncBackfill) {
180
+ void backfillPromise;
181
+ } else {
182
+ await backfillPromise;
183
+ }
184
+ return value;
185
+ }
186
+ } catch (error) {
187
+ logger.debug(`[${this.config.name}] L2 get error`, { key, error });
188
+ }
189
+ }
190
+
191
+ // L3: Distributed
192
+ if (this.config.l3) {
193
+ try {
194
+ const value = await this.config.l3.get(key);
195
+ if (value !== null) {
196
+ this.stats.l3Hits++;
197
+ span?.setAttribute("cache.hit_tier", "l3");
198
+ logger.debug(`[${this.config.name}] L3 hit`, { key });
199
+ const backfillPromise = this.backfill(key, value, ["l1", "l2"]);
200
+ if (this.config.asyncBackfill) {
201
+ void backfillPromise;
202
+ } else {
203
+ await backfillPromise;
204
+ }
205
+ return value;
206
+ }
207
+ } catch (error) {
208
+ logger.debug(`[${this.config.name}] L3 get error`, { key, error });
209
+ }
210
+ }
211
+
212
+ this.stats.misses++;
213
+ span?.setAttribute("cache.hit_tier", "miss");
214
+ return null;
215
+ },
216
+ { "cache.operation": "get" },
217
+ );
218
+ }
219
+
220
+ /**
221
+ * Set a value in all tiers.
222
+ *
223
+ * Writes to all configured tiers in parallel (or sequentially if asyncBackfill=false).
224
+ */
225
+ set(key: string, value: T, ttlSeconds?: number): Promise<void> {
226
+ return withSpan(
227
+ SpanNames.CACHE_MULTI_TIER_SET,
228
+ (span?: Span) => {
229
+ this.stats.sets++;
230
+ const ttl = ttlSeconds ?? this.config.defaultTtlSeconds;
231
+ span?.setAttribute("cache.name", this.config.name);
232
+ span?.setAttribute("cache.key", key);
233
+ span?.setAttribute("cache.ttl_seconds", ttl);
234
+
235
+ const tiers = [this.config.l1, this.config.l2, this.config.l3].filter(
236
+ (t): t is CacheTier<T> => t !== undefined,
237
+ );
238
+
239
+ const setOps = tiers.map((tier) =>
240
+ tier.set(key, value, ttl).catch((error) => {
241
+ logger.debug(`[${this.config.name}] Set error in ${tier.name}`, { key, error });
242
+ })
243
+ );
244
+
245
+ if (this.config.asyncBackfill) {
246
+ // Fire-and-forget for performance (don't await)
247
+ void Promise.all(setOps);
248
+ return Promise.resolve();
249
+ }
250
+
251
+ // Wait for all tiers
252
+ return Promise.all(setOps).then(() => {});
253
+ },
254
+ { "cache.operation": "set" },
255
+ );
256
+ }
257
+
258
+ /**
259
+ * Delete a value from all tiers.
260
+ */
261
+ async delete(key: string): Promise<void> {
262
+ const tiers = [this.config.l1, this.config.l2, this.config.l3].filter(
263
+ (t): t is CacheTier<T> => t !== undefined && t.delete !== undefined,
264
+ );
265
+
266
+ await Promise.all(
267
+ tiers.map((tier) =>
268
+ tier.delete?.(key).catch((error) => {
269
+ logger.debug(`[${this.config.name}] Delete error in ${tier.name}`, { key, error });
270
+ })
271
+ ),
272
+ );
273
+ }
274
+
275
+ /**
276
+ * Get or compute a value.
277
+ *
278
+ * If the key exists in any tier, returns it.
279
+ * Otherwise, calls the compute function and stores the result in all tiers.
280
+ */
281
+ async getOrCompute(
282
+ key: string,
283
+ computeFn: () => Promise<T>,
284
+ ttlSeconds?: number,
285
+ ): Promise<T> {
286
+ const cached = await this.get(key);
287
+ if (cached !== null) return cached;
288
+
289
+ const value = await computeFn();
290
+ await this.set(key, value, ttlSeconds);
291
+ return value;
292
+ }
293
+
294
+ /**
295
+ * Batch get multiple values.
296
+ *
297
+ * Uses batch operations where available for efficiency.
298
+ * Returns a map of key → value (null if not found).
299
+ */
300
+ async getBatch(keys: string[]): Promise<Map<string, T | null>> {
301
+ if (keys.length === 0) return new Map();
302
+
303
+ const results = new Map<string, T | null>();
304
+ let remainingKeys = [...keys];
305
+
306
+ const backfillPromises: Promise<void>[] = [];
307
+
308
+ // Check L1
309
+ if (this.config.l1 && remainingKeys.length > 0) {
310
+ try {
311
+ const l1Results = this.config.l1.getBatch
312
+ ? await this.config.l1.getBatch(remainingKeys)
313
+ : await this.individualGets(this.config.l1, remainingKeys);
314
+
315
+ for (const [key, value] of l1Results) {
316
+ if (value !== null) {
317
+ results.set(key, value);
318
+ this.stats.l1Hits++;
319
+ }
320
+ }
321
+ remainingKeys = remainingKeys.filter((k) => !results.has(k) || results.get(k) === null);
322
+ } catch (error) {
323
+ logger.debug(`[${this.config.name}] L1 getBatch error`, {
324
+ keyCount: remainingKeys.length,
325
+ error,
326
+ });
327
+ }
328
+ }
329
+
330
+ // Check L2
331
+ if (this.config.l2 && remainingKeys.length > 0) {
332
+ try {
333
+ const l2Results = this.config.l2.getBatch
334
+ ? await this.config.l2.getBatch(remainingKeys)
335
+ : await this.individualGets(this.config.l2, remainingKeys);
336
+
337
+ for (const [key, value] of l2Results) {
338
+ if (value !== null) {
339
+ results.set(key, value);
340
+ this.stats.l2Hits++;
341
+ backfillPromises.push(this.backfill(key, value, ["l1"]));
342
+ }
343
+ }
344
+ remainingKeys = remainingKeys.filter((k) => !results.has(k) || results.get(k) === null);
345
+ } catch (error) {
346
+ logger.debug(`[${this.config.name}] L2 getBatch error`, {
347
+ keyCount: remainingKeys.length,
348
+ error,
349
+ });
350
+ }
351
+ }
352
+
353
+ // Check L3
354
+ if (this.config.l3 && remainingKeys.length > 0) {
355
+ try {
356
+ const l3Results = this.config.l3.getBatch
357
+ ? await this.config.l3.getBatch(remainingKeys)
358
+ : await this.individualGets(this.config.l3, remainingKeys);
359
+
360
+ for (const [key, value] of l3Results) {
361
+ if (value !== null) {
362
+ results.set(key, value);
363
+ this.stats.l3Hits++;
364
+ backfillPromises.push(this.backfill(key, value, ["l1", "l2"]));
365
+ } else {
366
+ results.set(key, null);
367
+ this.stats.misses++;
368
+ }
369
+ }
370
+ } catch (error) {
371
+ logger.debug(`[${this.config.name}] L3 getBatch error`, {
372
+ keyCount: remainingKeys.length,
373
+ error,
374
+ });
375
+ }
376
+ }
377
+
378
+ // Mark remaining as misses
379
+ for (const key of remainingKeys) {
380
+ if (!results.has(key)) {
381
+ results.set(key, null);
382
+ this.stats.misses++;
383
+ }
384
+ }
385
+
386
+ if (backfillPromises.length > 0) {
387
+ if (this.config.asyncBackfill) {
388
+ void Promise.all(backfillPromises);
389
+ } else {
390
+ await Promise.all(backfillPromises);
391
+ }
392
+ }
393
+
394
+ return results;
395
+ }
396
+
397
+ /**
398
+ * Get cache statistics.
399
+ */
400
+ getStats(): CacheStats & { hitRate: number } {
401
+ const totalHits = this.stats.l1Hits + this.stats.l2Hits + this.stats.l3Hits;
402
+ const hitRate = this.stats.gets > 0 ? totalHits / this.stats.gets : 0;
403
+ return { ...this.stats, hitRate };
404
+ }
405
+
406
+ /**
407
+ * Reset statistics.
408
+ */
409
+ resetStats(): void {
410
+ this.stats = {
411
+ gets: 0,
412
+ l1Hits: 0,
413
+ l2Hits: 0,
414
+ l3Hits: 0,
415
+ misses: 0,
416
+ sets: 0,
417
+ backfills: 0,
418
+ };
419
+ }
420
+
421
+ /**
422
+ * Backfill higher tiers with a value found at a lower tier.
423
+ */
424
+ private backfill(key: string, value: T, tiers: ("l1" | "l2")[]): Promise<void> {
425
+ if (!this.config.backfillOnHit) return Promise.resolve();
426
+
427
+ this.stats.backfills++;
428
+ const ttl = this.config.defaultTtlSeconds;
429
+
430
+ const backfillOps: Promise<void>[] = [];
431
+
432
+ if (tiers.includes("l1") && this.config.l1) {
433
+ backfillOps.push(
434
+ this.config.l1.set(key, value, ttl).catch((error) => {
435
+ logger.debug(`[${this.config.name}] L1 backfill error`, { key, error });
436
+ }),
437
+ );
438
+ }
439
+
440
+ if (tiers.includes("l2") && this.config.l2) {
441
+ backfillOps.push(
442
+ this.config.l2.set(key, value, ttl).catch((error) => {
443
+ logger.debug(`[${this.config.name}] L2 backfill error`, { key, error });
444
+ }),
445
+ );
446
+ }
447
+ return Promise.all(backfillOps).then(() => {});
448
+ }
449
+
450
+ /**
451
+ * Helper for individual gets when batch operation is not available.
452
+ */
453
+ private async individualGets(
454
+ tier: CacheTier<T>,
455
+ keys: string[],
456
+ ): Promise<Map<string, T | null>> {
457
+ const results = await Promise.all(
458
+ keys.map(async (key) => [key, await tier.get(key)] as const),
459
+ );
460
+ return new Map(results);
461
+ }
462
+ }
463
+
464
+ /**
465
+ * Create a memory-backed cache tier from a CacheBackend.
466
+ */
467
+ export function createMemoryTier(backend: {
468
+ get(key: string): Promise<string | null>;
469
+ set(key: string, value: string, ttlSeconds?: number): Promise<void>;
470
+ del?(key: string): Promise<void>;
471
+ getBatch?(keys: string[]): Promise<Map<string, string | null>>;
472
+ setBatch?(entries: Array<{ key: string; value: string; ttl?: number }>): Promise<void>;
473
+ }): CacheTier<string> {
474
+ return {
475
+ name: "memory",
476
+ get: (key) => backend.get(key),
477
+ set: (key, value, ttl) => backend.set(key, value, ttl),
478
+ delete: backend.del?.bind(backend),
479
+ getBatch: backend.getBatch?.bind(backend),
480
+ setBatch: backend.setBatch?.bind(backend),
481
+ };
482
+ }
483
+
484
+ /**
485
+ * Create a distributed cache tier from a CacheBackend.
486
+ */
487
+ export function createDistributedTier(backend: {
488
+ readonly type: string;
489
+ get(key: string): Promise<string | null>;
490
+ set(key: string, value: string, ttlSeconds?: number): Promise<void>;
491
+ del?(key: string): Promise<void>;
492
+ getBatch?(keys: string[]): Promise<Map<string, string | null>>;
493
+ setBatch?(entries: Array<{ key: string; value: string; ttl?: number }>): Promise<void>;
494
+ }): CacheTier<string> {
495
+ return {
496
+ name: `distributed-${backend.type}`,
497
+ get: (key) => backend.get(key),
498
+ set: (key, value, ttl) => backend.set(key, value, ttl),
499
+ delete: backend.del?.bind(backend),
500
+ getBatch: backend.getBatch?.bind(backend),
501
+ setBatch: backend.setBatch?.bind(backend),
502
+ };
503
+ }
@@ -195,14 +195,9 @@ export async function loadIntegration(
195
195
  const config = await loadIntegrationConfig(integrationName);
196
196
  if (!config) return null;
197
197
 
198
- const filesDir = pathHelper.join(
199
- getIntegrationDirectory(integrationName),
200
- "files",
201
- );
202
-
203
198
  return {
204
199
  config,
205
- files: await loadTemplateFromDirectory(filesDir),
200
+ files: await loadTemplateFromDirectory(`integration:${integrationName}`),
206
201
  };
207
202
  }
208
203
 
@@ -317,8 +312,7 @@ export async function getAvailablePrompts(
317
312
  * These include setup guide page and status API
318
313
  */
319
314
  export function loadIntegrationBaseFilesFromDirectory(): Promise<TemplateFile[]> {
320
- const filesDir = pathHelper.join(getIntegrationDirectory("_base"), "files");
321
- return loadTemplateFromDirectory(filesDir);
315
+ return loadTemplateFromDirectory("integration:_base");
322
316
  }
323
317
 
324
318
  /**