awaitly 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (156) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +1278 -0
  3. package/dist/batch.cjs +2 -0
  4. package/dist/batch.cjs.map +1 -0
  5. package/dist/batch.d.cts +197 -0
  6. package/dist/batch.d.ts +197 -0
  7. package/dist/batch.js +2 -0
  8. package/dist/batch.js.map +1 -0
  9. package/dist/circuit-breaker.cjs +2 -0
  10. package/dist/circuit-breaker.cjs.map +1 -0
  11. package/dist/circuit-breaker.d.cts +208 -0
  12. package/dist/circuit-breaker.d.ts +208 -0
  13. package/dist/circuit-breaker.js +2 -0
  14. package/dist/circuit-breaker.js.map +1 -0
  15. package/dist/conditional.cjs +2 -0
  16. package/dist/conditional.cjs.map +1 -0
  17. package/dist/conditional.d.cts +249 -0
  18. package/dist/conditional.d.ts +249 -0
  19. package/dist/conditional.js +2 -0
  20. package/dist/conditional.js.map +1 -0
  21. package/dist/core-BuTBsR0x.d.cts +2325 -0
  22. package/dist/core-BuTBsR0x.d.ts +2325 -0
  23. package/dist/core.cjs +2 -0
  24. package/dist/core.cjs.map +1 -0
  25. package/dist/core.d.cts +3 -0
  26. package/dist/core.d.ts +3 -0
  27. package/dist/core.js +2 -0
  28. package/dist/core.js.map +1 -0
  29. package/dist/devtools.cjs +11 -0
  30. package/dist/devtools.cjs.map +1 -0
  31. package/dist/devtools.d.cts +176 -0
  32. package/dist/devtools.d.ts +176 -0
  33. package/dist/devtools.js +11 -0
  34. package/dist/devtools.js.map +1 -0
  35. package/dist/duration.cjs +2 -0
  36. package/dist/duration.cjs.map +1 -0
  37. package/dist/duration.d.cts +246 -0
  38. package/dist/duration.d.ts +246 -0
  39. package/dist/duration.js +2 -0
  40. package/dist/duration.js.map +1 -0
  41. package/dist/hitl.cjs +2 -0
  42. package/dist/hitl.cjs.map +1 -0
  43. package/dist/hitl.d.cts +337 -0
  44. package/dist/hitl.d.ts +337 -0
  45. package/dist/hitl.js +2 -0
  46. package/dist/hitl.js.map +1 -0
  47. package/dist/index.cjs +2 -0
  48. package/dist/index.cjs.map +1 -0
  49. package/dist/index.d.cts +4 -0
  50. package/dist/index.d.ts +4 -0
  51. package/dist/index.js +2 -0
  52. package/dist/index.js.map +1 -0
  53. package/dist/match.cjs +2 -0
  54. package/dist/match.cjs.map +1 -0
  55. package/dist/match.d.cts +209 -0
  56. package/dist/match.d.ts +209 -0
  57. package/dist/match.js +2 -0
  58. package/dist/match.js.map +1 -0
  59. package/dist/otel.cjs +2 -0
  60. package/dist/otel.cjs.map +1 -0
  61. package/dist/otel.d.cts +185 -0
  62. package/dist/otel.d.ts +185 -0
  63. package/dist/otel.js +2 -0
  64. package/dist/otel.js.map +1 -0
  65. package/dist/persistence.cjs +2 -0
  66. package/dist/persistence.cjs.map +1 -0
  67. package/dist/persistence.d.cts +572 -0
  68. package/dist/persistence.d.ts +572 -0
  69. package/dist/persistence.js +2 -0
  70. package/dist/persistence.js.map +1 -0
  71. package/dist/policies.cjs +2 -0
  72. package/dist/policies.cjs.map +1 -0
  73. package/dist/policies.d.cts +378 -0
  74. package/dist/policies.d.ts +378 -0
  75. package/dist/policies.js +2 -0
  76. package/dist/policies.js.map +1 -0
  77. package/dist/ratelimit.cjs +2 -0
  78. package/dist/ratelimit.cjs.map +1 -0
  79. package/dist/ratelimit.d.cts +279 -0
  80. package/dist/ratelimit.d.ts +279 -0
  81. package/dist/ratelimit.js +2 -0
  82. package/dist/ratelimit.js.map +1 -0
  83. package/dist/reliability.cjs +2 -0
  84. package/dist/reliability.cjs.map +1 -0
  85. package/dist/reliability.d.cts +5 -0
  86. package/dist/reliability.d.ts +5 -0
  87. package/dist/reliability.js +2 -0
  88. package/dist/reliability.js.map +1 -0
  89. package/dist/resource.cjs +2 -0
  90. package/dist/resource.cjs.map +1 -0
  91. package/dist/resource.d.cts +171 -0
  92. package/dist/resource.d.ts +171 -0
  93. package/dist/resource.js +2 -0
  94. package/dist/resource.js.map +1 -0
  95. package/dist/retry.cjs +2 -0
  96. package/dist/retry.cjs.map +1 -0
  97. package/dist/retry.d.cts +2 -0
  98. package/dist/retry.d.ts +2 -0
  99. package/dist/retry.js +2 -0
  100. package/dist/retry.js.map +1 -0
  101. package/dist/saga.cjs +2 -0
  102. package/dist/saga.cjs.map +1 -0
  103. package/dist/saga.d.cts +231 -0
  104. package/dist/saga.d.ts +231 -0
  105. package/dist/saga.js +2 -0
  106. package/dist/saga.js.map +1 -0
  107. package/dist/schedule.cjs +2 -0
  108. package/dist/schedule.cjs.map +1 -0
  109. package/dist/schedule.d.cts +387 -0
  110. package/dist/schedule.d.ts +387 -0
  111. package/dist/schedule.js +2 -0
  112. package/dist/schedule.js.map +1 -0
  113. package/dist/tagged-error.cjs +2 -0
  114. package/dist/tagged-error.cjs.map +1 -0
  115. package/dist/tagged-error.d.cts +252 -0
  116. package/dist/tagged-error.d.ts +252 -0
  117. package/dist/tagged-error.js +2 -0
  118. package/dist/tagged-error.js.map +1 -0
  119. package/dist/testing.cjs +2 -0
  120. package/dist/testing.cjs.map +1 -0
  121. package/dist/testing.d.cts +228 -0
  122. package/dist/testing.d.ts +228 -0
  123. package/dist/testing.js +2 -0
  124. package/dist/testing.js.map +1 -0
  125. package/dist/visualize.cjs +1573 -0
  126. package/dist/visualize.cjs.map +1 -0
  127. package/dist/visualize.d.cts +1415 -0
  128. package/dist/visualize.d.ts +1415 -0
  129. package/dist/visualize.js +1573 -0
  130. package/dist/visualize.js.map +1 -0
  131. package/dist/webhook.cjs +2 -0
  132. package/dist/webhook.cjs.map +1 -0
  133. package/dist/webhook.d.cts +469 -0
  134. package/dist/webhook.d.ts +469 -0
  135. package/dist/webhook.js +2 -0
  136. package/dist/webhook.js.map +1 -0
  137. package/dist/workflow-entry-C6nH8ByN.d.ts +858 -0
  138. package/dist/workflow-entry-RRTlSg_4.d.cts +858 -0
  139. package/dist/workflow.cjs +2 -0
  140. package/dist/workflow.cjs.map +1 -0
  141. package/dist/workflow.d.cts +2 -0
  142. package/dist/workflow.d.ts +2 -0
  143. package/dist/workflow.js +2 -0
  144. package/dist/workflow.js.map +1 -0
  145. package/docs/advanced.md +1548 -0
  146. package/docs/api.md +513 -0
  147. package/docs/coming-from-neverthrow.md +1013 -0
  148. package/docs/match.md +417 -0
  149. package/docs/pino-logging-example.md +396 -0
  150. package/docs/policies.md +508 -0
  151. package/docs/resource-management.md +509 -0
  152. package/docs/schedule.md +467 -0
  153. package/docs/tagged-error.md +785 -0
  154. package/docs/visualization.md +430 -0
  155. package/docs/visualize-examples.md +330 -0
  156. package/package.json +227 -0
@@ -0,0 +1,572 @@
1
+ import { R as Result, af as StepFailureMeta } from './core-BuTBsR0x.js';
2
+ import { e as ResumeStateEntry, R as ResumeState, S as StepCache } from './workflow-entry-C6nH8ByN.js';
3
+
4
+ /**
5
+ * awaitly/persistence
6
+ *
7
+ * Pluggable Persistence Adapters for StepCache and ResumeState.
8
+ * Provides adapters for Redis, file system, and in-memory storage,
9
+ * plus helpers for JSON-safe serialization of causes.
10
+ */
11
+
12
+ /**
13
+ * JSON-safe representation of a Result.
14
+ */
15
+ interface SerializedResult {
16
+ ok: boolean;
17
+ value?: unknown;
18
+ error?: unknown;
19
+ cause?: SerializedCause;
20
+ }
21
+ /**
22
+ * JSON-safe representation of a cause value.
23
+ * Handles Error objects and other non-JSON-safe types.
24
+ */
25
+ interface SerializedCause {
26
+ type: "error" | "value" | "undefined";
27
+ errorName?: string;
28
+ errorMessage?: string;
29
+ errorStack?: string;
30
+ value?: unknown;
31
+ }
32
+ /**
33
+ * JSON-safe representation of StepFailureMeta.
34
+ */
35
+ interface SerializedMeta {
36
+ origin: "result" | "throw";
37
+ resultCause?: SerializedCause;
38
+ thrown?: SerializedCause;
39
+ }
40
+ /**
41
+ * JSON-safe representation of a ResumeStateEntry.
42
+ */
43
+ interface SerializedEntry {
44
+ result: SerializedResult;
45
+ meta?: SerializedMeta;
46
+ }
47
+ /**
48
+ * JSON-safe representation of ResumeState.
49
+ */
50
+ interface SerializedState {
51
+ version: number;
52
+ entries: Record<string, SerializedEntry>;
53
+ metadata?: Record<string, unknown>;
54
+ }
55
+ /**
56
+ * Serialize a cause value to a JSON-safe format.
57
+ */
58
+ declare function serializeCause(cause: unknown): SerializedCause;
59
+ /**
60
+ * Deserialize a cause value from JSON-safe format.
61
+ */
62
+ declare function deserializeCause(serialized: SerializedCause): unknown;
63
+ /**
64
+ * Serialize a Result to a JSON-safe format.
65
+ */
66
+ declare function serializeResult(result: Result<unknown, unknown, unknown>): SerializedResult;
67
+ /**
68
+ * Deserialize a Result from JSON-safe format.
69
+ */
70
+ declare function deserializeResult(serialized: SerializedResult): Result<unknown, unknown, unknown>;
71
+ /**
72
+ * Serialize StepFailureMeta to a JSON-safe format.
73
+ */
74
+ declare function serializeMeta(meta: StepFailureMeta): SerializedMeta;
75
+ /**
76
+ * Deserialize StepFailureMeta from JSON-safe format.
77
+ */
78
+ declare function deserializeMeta(serialized: SerializedMeta): StepFailureMeta;
79
+ /**
80
+ * Serialize a ResumeStateEntry to a JSON-safe format.
81
+ */
82
+ declare function serializeEntry(entry: ResumeStateEntry): SerializedEntry;
83
+ /**
84
+ * Deserialize a ResumeStateEntry from JSON-safe format.
85
+ */
86
+ declare function deserializeEntry(serialized: SerializedEntry): ResumeStateEntry;
87
+ /**
88
+ * Serialize ResumeState to a JSON-safe format.
89
+ */
90
+ declare function serializeState(state: ResumeState, metadata?: Record<string, unknown>): SerializedState;
91
+ /**
92
+ * Deserialize ResumeState from JSON-safe format.
93
+ */
94
+ declare function deserializeState(serialized: SerializedState): ResumeState;
95
+ /**
96
+ * Convert ResumeState to a JSON string.
97
+ */
98
+ declare function stringifyState(state: ResumeState, metadata?: Record<string, unknown>): string;
99
+ /**
100
+ * Parse ResumeState from a JSON string.
101
+ */
102
+ declare function parseState(json: string): ResumeState;
103
+ /**
104
+ * Options for the in-memory cache adapter.
105
+ */
106
+ interface MemoryCacheOptions {
107
+ /**
108
+ * Maximum number of entries to store.
109
+ * Oldest entries are evicted when limit is reached.
110
+ */
111
+ maxSize?: number;
112
+ /**
113
+ * Time-to-live in milliseconds.
114
+ * Entries are automatically removed after this duration.
115
+ */
116
+ ttl?: number;
117
+ }
118
+ /**
119
+ * Create an in-memory StepCache with optional LRU eviction and TTL.
120
+ *
121
+ * @param options - Cache options
122
+ * @returns StepCache implementation
123
+ *
124
+ * @example
125
+ * ```typescript
126
+ * const cache = createMemoryCache({ maxSize: 1000, ttl: 60000 });
127
+ * const workflow = createWorkflow(deps, { cache });
128
+ * ```
129
+ */
130
+ declare function createMemoryCache(options?: MemoryCacheOptions): StepCache;
131
+ /**
132
+ * Options for the file system cache adapter.
133
+ */
134
+ interface FileCacheOptions {
135
+ /**
136
+ * Directory to store cache files.
137
+ */
138
+ directory: string;
139
+ /**
140
+ * File extension for cache files.
141
+ * @default '.json'
142
+ */
143
+ extension?: string;
144
+ /**
145
+ * Custom file system interface (for testing or custom implementations).
146
+ */
147
+ fs?: FileSystemInterface;
148
+ }
149
+ /**
150
+ * Minimal file system interface for cache operations.
151
+ */
152
+ interface FileSystemInterface {
153
+ readFile(path: string): Promise<string>;
154
+ writeFile(path: string, data: string): Promise<void>;
155
+ unlink(path: string): Promise<void>;
156
+ exists(path: string): Promise<boolean>;
157
+ readdir(path: string): Promise<string[]>;
158
+ mkdir(path: string, options?: {
159
+ recursive?: boolean;
160
+ }): Promise<void>;
161
+ }
162
+ /**
163
+ * Create a file system-based StepCache.
164
+ * Each step result is stored as a separate JSON file.
165
+ *
166
+ * @param options - Cache options
167
+ * @returns StepCache implementation (async operations wrapped in sync interface)
168
+ *
169
+ * @example
170
+ * ```typescript
171
+ * import * as fs from 'fs/promises';
172
+ *
173
+ * const cache = createFileCache({
174
+ * directory: './workflow-cache',
175
+ * fs: {
176
+ * readFile: (path) => fs.readFile(path, 'utf-8'),
177
+ * writeFile: (path, data) => fs.writeFile(path, data, 'utf-8'),
178
+ * unlink: fs.unlink,
179
+ * exists: async (path) => fs.access(path).then(() => true).catch(() => false),
180
+ * readdir: fs.readdir,
181
+ * mkdir: fs.mkdir,
182
+ * },
183
+ * });
184
+ * ```
185
+ */
186
+ declare function createFileCache(options: FileCacheOptions): StepCache & {
187
+ /** Initialize the cache directory. Call before using the cache. */
188
+ init(): Promise<void>;
189
+ /** Get a result asynchronously. */
190
+ getAsync(key: string): Promise<Result<unknown, unknown, unknown> | undefined>;
191
+ /** Set a result asynchronously. */
192
+ setAsync(key: string, result: Result<unknown, unknown, unknown>): Promise<void>;
193
+ /** Delete a result asynchronously. */
194
+ deleteAsync(key: string): Promise<boolean>;
195
+ /** Clear all results asynchronously. */
196
+ clearAsync(): Promise<void>;
197
+ };
198
+ /**
199
+ * Generic key-value store interface.
200
+ * Implement this for Redis, DynamoDB, etc.
201
+ */
202
+ interface KeyValueStore {
203
+ get(key: string): Promise<string | null>;
204
+ set(key: string, value: string, options?: {
205
+ ttl?: number;
206
+ }): Promise<void>;
207
+ delete(key: string): Promise<boolean>;
208
+ exists(key: string): Promise<boolean>;
209
+ keys(pattern: string): Promise<string[]>;
210
+ }
211
+ /**
212
+ * Options for key-value store cache adapter.
213
+ */
214
+ interface KVCacheOptions {
215
+ /**
216
+ * Key-value store implementation.
217
+ */
218
+ store: KeyValueStore;
219
+ /**
220
+ * Key prefix for all cache entries.
221
+ * @default 'workflow:'
222
+ */
223
+ prefix?: string;
224
+ /**
225
+ * Time-to-live in seconds for cache entries.
226
+ */
227
+ ttl?: number;
228
+ }
229
+ /**
230
+ * Create a StepCache backed by a key-value store (Redis, DynamoDB, etc.).
231
+ *
232
+ * @param options - Cache options
233
+ * @returns StepCache implementation with async methods
234
+ *
235
+ * @example
236
+ * ```typescript
237
+ * // With Redis
238
+ * import { createClient } from 'redis';
239
+ *
240
+ * const redis = createClient();
241
+ * await redis.connect();
242
+ *
243
+ * const cache = createKVCache({
244
+ * store: {
245
+ * get: (key) => redis.get(key),
246
+ * set: (key, value, opts) => redis.set(key, value, { EX: opts?.ttl }),
247
+ * delete: (key) => redis.del(key).then(n => n > 0),
248
+ * exists: (key) => redis.exists(key).then(n => n > 0),
249
+ * keys: (pattern) => redis.keys(pattern),
250
+ * },
251
+ * prefix: 'myapp:workflow:',
252
+ * ttl: 3600, // 1 hour
253
+ * });
254
+ * ```
255
+ */
256
+ declare function createKVCache(options: KVCacheOptions): StepCache & {
257
+ /** Get a result asynchronously. */
258
+ getAsync(key: string): Promise<Result<unknown, unknown, unknown> | undefined>;
259
+ /** Set a result asynchronously. */
260
+ setAsync(key: string, result: Result<unknown, unknown, unknown>): Promise<void>;
261
+ /** Check if key exists asynchronously. */
262
+ hasAsync(key: string): Promise<boolean>;
263
+ /** Delete a result asynchronously. */
264
+ deleteAsync(key: string): Promise<boolean>;
265
+ /** Clear all results asynchronously. */
266
+ clearAsync(): Promise<void>;
267
+ };
268
+ /**
269
+ * Interface for persisting workflow state.
270
+ */
271
+ interface StatePersistence {
272
+ /**
273
+ * Save workflow state.
274
+ */
275
+ save(runId: string, state: ResumeState, metadata?: Record<string, unknown>): Promise<void>;
276
+ /**
277
+ * Load workflow state.
278
+ */
279
+ load(runId: string): Promise<ResumeState | undefined>;
280
+ /**
281
+ * Delete workflow state.
282
+ */
283
+ delete(runId: string): Promise<boolean>;
284
+ /**
285
+ * List all saved workflow IDs.
286
+ */
287
+ list(): Promise<string[]>;
288
+ }
289
+ /**
290
+ * Create a state persistence adapter using a key-value store.
291
+ *
292
+ * @param store - Key-value store implementation
293
+ * @param prefix - Key prefix for state entries
294
+ * @returns StatePersistence implementation
295
+ */
296
+ declare function createStatePersistence(store: KeyValueStore, prefix?: string): StatePersistence;
297
+ /**
298
+ * Create a cache that hydrates from persistent storage on first access.
299
+ *
300
+ * @param memoryCache - In-memory cache for fast access
301
+ * @param persistence - Persistent storage for durability
302
+ * @returns Hydrating cache implementation
303
+ */
304
+ declare function createHydratingCache(memoryCache: StepCache, persistence: StatePersistence, runId: string): StepCache & {
305
+ hydrate(): Promise<void>;
306
+ };
307
+
308
+ /**
309
+ * Workflow Versioning and Migration
310
+ *
311
+ * Handle schema changes when resuming workflows that were persisted
312
+ * with older step shapes.
313
+ *
314
+ * @example
315
+ * ```typescript
316
+ * import { createVersionedWorkflow } from 'awaitly';
317
+ *
318
+ * const workflow = createVersionedWorkflow(
319
+ * { fetchUser, chargeCard },
320
+ * {
321
+ * version: 2,
322
+ * migrations: {
323
+ * 1: (state) => migrateV1ToV2(state),
324
+ * },
325
+ * resumeState: loadState(runId),
326
+ * }
327
+ * );
328
+ * ```
329
+ */
330
+
331
+ /**
332
+ * Version number type.
333
+ */
334
+ type Version = number;
335
+ /**
336
+ * Migration function that transforms state from one version to the next.
337
+ */
338
+ type MigrationFn = (state: ResumeState) => ResumeState | Promise<ResumeState>;
339
+ /**
340
+ * Map of migrations keyed by the source version.
341
+ * Migration at key N transforms state from version N to version N+1.
342
+ */
343
+ type Migrations = Record<Version, MigrationFn>;
344
+ /**
345
+ * Versioned state includes the version number.
346
+ */
347
+ interface VersionedState {
348
+ version: Version;
349
+ state: ResumeState;
350
+ }
351
+ /**
352
+ * Configuration for versioned workflow.
353
+ */
354
+ interface VersionedWorkflowConfig {
355
+ /**
356
+ * Current workflow version.
357
+ */
358
+ version: Version;
359
+ /**
360
+ * Migrations for upgrading old states.
361
+ * Key is the source version, value transforms to next version.
362
+ */
363
+ migrations?: Migrations;
364
+ /**
365
+ * Strict mode - fail if state version is higher than current.
366
+ * @default true
367
+ */
368
+ strictVersioning?: boolean;
369
+ }
370
+ /**
371
+ * Error when version migration fails.
372
+ */
373
+ interface MigrationError {
374
+ type: "MIGRATION_ERROR";
375
+ fromVersion: Version;
376
+ toVersion: Version;
377
+ cause: unknown;
378
+ }
379
+ /**
380
+ * Error when state version is incompatible.
381
+ */
382
+ interface VersionIncompatibleError {
383
+ type: "VERSION_INCOMPATIBLE";
384
+ stateVersion: Version;
385
+ currentVersion: Version;
386
+ reason: string;
387
+ }
388
+ /**
389
+ * Type guard for MigrationError.
390
+ */
391
+ declare function isMigrationError(error: unknown): error is MigrationError;
392
+ /**
393
+ * Type guard for VersionIncompatibleError.
394
+ */
395
+ declare function isVersionIncompatibleError(error: unknown): error is VersionIncompatibleError;
396
+ /**
397
+ * Migrate state from one version to another.
398
+ *
399
+ * @param state - The versioned state to migrate
400
+ * @param targetVersion - The target version
401
+ * @param migrations - Migration functions
402
+ * @returns The migrated state or an error
403
+ *
404
+ * @example
405
+ * ```typescript
406
+ * const migrated = await migrateState(
407
+ * { version: 1, state: oldState },
408
+ * 3,
409
+ * {
410
+ * 1: (s) => transformV1ToV2(s),
411
+ * 2: (s) => transformV2ToV3(s),
412
+ * }
413
+ * );
414
+ * ```
415
+ */
416
+ declare function migrateState(state: VersionedState, targetVersion: Version, migrations: Migrations): Promise<Result<VersionedState, MigrationError | VersionIncompatibleError>>;
417
+ /**
418
+ * Create a versioned resume state loader.
419
+ *
420
+ * This wraps a state loader to automatically apply migrations
421
+ * when loading older state versions.
422
+ *
423
+ * @param config - Versioning configuration
424
+ * @returns A function that loads and migrates state
425
+ *
426
+ * @example
427
+ * ```typescript
428
+ * const loadVersionedState = createVersionedStateLoader({
429
+ * version: 3,
430
+ * migrations: {
431
+ * 1: migrateV1ToV2,
432
+ * 2: migrateV2ToV3,
433
+ * },
434
+ * });
435
+ *
436
+ * // In workflow
437
+ * const workflow = createWorkflow(deps, {
438
+ * resumeState: () => loadVersionedState(savedState),
439
+ * });
440
+ * ```
441
+ */
442
+ declare function createVersionedStateLoader(config: VersionedWorkflowConfig): (versionedState: VersionedState | null | undefined) => Promise<Result<ResumeState | undefined, MigrationError | VersionIncompatibleError>>;
443
+ /**
444
+ * Create versioned state from current resume state.
445
+ *
446
+ * Use this when saving state to storage.
447
+ *
448
+ * @param state - The current resume state
449
+ * @param version - The current workflow version
450
+ * @returns A versioned state object
451
+ *
452
+ * @example
453
+ * ```typescript
454
+ * const collector = createStepCollector();
455
+ * // ... run workflow ...
456
+ *
457
+ * const versionedState = createVersionedState(collector.getState(), 2);
458
+ * await db.saveWorkflowState(workflowId, versionedState);
459
+ * ```
460
+ */
461
+ declare function createVersionedState(state: ResumeState, version: Version): VersionedState;
462
+ /**
463
+ * Parse versioned state from JSON.
464
+ *
465
+ * Handles the serialization/deserialization of ResumeState with Map.
466
+ *
467
+ * @param json - The JSON string or parsed object
468
+ * @returns The versioned state or null if invalid
469
+ *
470
+ * @example
471
+ * ```typescript
472
+ * const json = await db.loadWorkflowState(workflowId);
473
+ * const versionedState = parseVersionedState(json);
474
+ * if (versionedState) {
475
+ * const loader = createVersionedStateLoader(config);
476
+ * const state = await loader(versionedState);
477
+ * }
478
+ * ```
479
+ */
480
+ interface SerializedVersionedState {
481
+ version: number;
482
+ state: {
483
+ steps: Array<[string, ResumeStateEntry]>;
484
+ };
485
+ }
486
+ declare function parseVersionedState(json: string | SerializedVersionedState | null | undefined): VersionedState | null;
487
+ /**
488
+ * Serialize versioned state to JSON.
489
+ *
490
+ * Converts the Map to an array for JSON serialization.
491
+ *
492
+ * @param state - The versioned state
493
+ * @returns JSON string
494
+ *
495
+ * @example
496
+ * ```typescript
497
+ * const json = stringifyVersionedState(versionedState);
498
+ * await db.saveWorkflowState(workflowId, json);
499
+ * ```
500
+ */
501
+ declare function stringifyVersionedState(state: VersionedState): string;
502
+ /**
503
+ * Create a migration that renames step keys.
504
+ *
505
+ * @param renames - Map of old key to new key
506
+ * @returns A migration function
507
+ *
508
+ * @example
509
+ * ```typescript
510
+ * const migrations = {
511
+ * 1: createKeyRenameMigration({
512
+ * 'user:fetch': 'user:load',
513
+ * 'order:create': 'order:submit',
514
+ * }),
515
+ * };
516
+ * ```
517
+ */
518
+ declare function createKeyRenameMigration(renames: Record<string, string>): MigrationFn;
519
+ /**
520
+ * Create a migration that removes specific step keys.
521
+ *
522
+ * @param keysToRemove - Array of keys to remove
523
+ * @returns A migration function
524
+ *
525
+ * @example
526
+ * ```typescript
527
+ * const migrations = {
528
+ * 1: createKeyRemoveMigration(['deprecated:step', 'old:cache']),
529
+ * };
530
+ * ```
531
+ */
532
+ declare function createKeyRemoveMigration(keysToRemove: string[]): MigrationFn;
533
+ /**
534
+ * Create a migration that transforms step values.
535
+ *
536
+ * @param transforms - Map of key to transform function
537
+ * @returns A migration function
538
+ *
539
+ * @example
540
+ * ```typescript
541
+ * const migrations = {
542
+ * 1: createValueTransformMigration({
543
+ * 'user:fetch': (entry) => ({
544
+ * ...entry,
545
+ * result: entry.result.ok
546
+ * ? ok({ ...entry.result.value, newField: 'default' })
547
+ * : entry.result,
548
+ * }),
549
+ * }),
550
+ * };
551
+ * ```
552
+ */
553
+ declare function createValueTransformMigration(transforms: Record<string, (entry: ResumeStateEntry) => ResumeStateEntry>): MigrationFn;
554
+ /**
555
+ * Compose multiple migrations into a single migration.
556
+ *
557
+ * @param migrations - Array of migration functions
558
+ * @returns A single migration function that applies all migrations in order
559
+ *
560
+ * @example
561
+ * ```typescript
562
+ * const migrations = {
563
+ * 1: composeMigrations([
564
+ * createKeyRenameMigration({ 'old': 'new' }),
565
+ * createKeyRemoveMigration(['deprecated']),
566
+ * ]),
567
+ * };
568
+ * ```
569
+ */
570
+ declare function composeMigrations(migrations: MigrationFn[]): MigrationFn;
571
+
572
+ export { type FileCacheOptions, type FileSystemInterface, type KVCacheOptions, type KeyValueStore, type MemoryCacheOptions, type MigrationError, type MigrationFn, type Migrations, type SerializedCause, type SerializedEntry, type SerializedMeta, type SerializedResult, type SerializedState, type StatePersistence, type Version, type VersionIncompatibleError, type VersionedState, type VersionedWorkflowConfig, composeMigrations, createFileCache, createHydratingCache, createKVCache, createKeyRemoveMigration, createKeyRenameMigration, createMemoryCache, createStatePersistence, createValueTransformMigration, createVersionedState, createVersionedStateLoader, deserializeCause, deserializeEntry, deserializeMeta, deserializeResult, deserializeState, isMigrationError, isVersionIncompatibleError, migrateState, parseState, parseVersionedState, serializeCause, serializeEntry, serializeMeta, serializeResult, serializeState, stringifyState, stringifyVersionedState };
@@ -0,0 +1,2 @@
1
+ var I=e=>({ok:!0,value:e}),h=(e,n)=>({ok:!1,error:e,...n?.cause!==void 0?{cause:n.cause}:{}});var de=e=>typeof e=="object"&&e!==null&&e.type==="UNEXPECTED_ERROR",N=Symbol.for("step_timeout_marker");function re(e){return typeof e!="object"||e===null?!1:e.type==="STEP_TIMEOUT"?!0:N in e}function we(e){if(!(typeof e!="object"||e===null)){if(e.type==="STEP_TIMEOUT"){let n=e;return{timeoutMs:n.timeoutMs,stepName:n.stepName,stepKey:n.stepKey,attempt:n.attempt}}if(N in e)return e[N]}}var ue=Symbol("early-exit");function fe(e,n){return{[ue]:!0,error:e,meta:n}}function ke(e){return typeof e=="object"&&e!==null&&e[ue]===!0}var ie=Symbol("mapper-exception");function Te(e){return{[ie]:!0,thrown:e}}function Re(e){return typeof e=="object"&&e!==null&&e[ie]===!0}function ge(e){return typeof e=="string"?{name:e}:e??{}}function Z(e,n){let{backoff:u,initialDelay:o,maxDelay:s,jitter:i}=n,r;switch(u){case"fixed":r=o;break;case"linear":r=o*e;break;case"exponential":r=o*Math.pow(2,e-1);break}if(r=Math.min(r,s),i){let t=r*.25*Math.random();r=r+t}return Math.floor(r)}function Q(e){return new Promise(n=>setTimeout(n,e))}var oe=Symbol("timeout");async function Ce(e,n,u){let o=new AbortController,s=n.error??{type:"STEP_TIMEOUT",stepName:u.name,stepKey:u.key,timeoutMs:n.ms,attempt:u.attempt},i,r=new Promise((k,w)=>{i=setTimeout(()=>{o.abort(),w({[oe]:!0,error:s})},n.ms)}),t;n.signal?t=Promise.resolve(e(o.signal)):t=Promise.resolve(e());try{return await Promise.race([t,r])}catch(k){if(typeof k=="object"&&k!==null&&k[oe]===!0){let w=k.error;if(typeof w=="object"&&w!==null&&w.type!=="STEP_TIMEOUT"){let _={timeoutMs:n.ms,stepName:u.name,stepKey:u.key,attempt:u.attempt};N in w?w[N]=_:Object.defineProperty(w,N,{value:_,enumerable:!1,writable:!0,configurable:!1})}throw w}throw k}finally{clearTimeout(i)}}var F={backoff:"exponential",initialDelay:100,maxDelay:3e4,jitter:!0,retryOn:()=>!0,onRetry:()=>{}};async function se(e,n){let{onError:u,onEvent:o,catchUnexpected:s,workflowId:i,context:r}=n&&typeof n=="object"?n:{},t=i??crypto.randomUUID(),k=!u&&!s,w=[],_=0,B=l=>l??`step_${++_}`,d=l=>{let P=l.context!==void 0||r===void 0?l:{...l,context:r};if(P.type==="step_success"){let O=P.stepId;for(let K=w.length-1;K>=0;K--){let z=w[K];if(z.type==="race"&&!z.winnerId){z.winnerId=O;break}}}o?.(P,r)},V=fe,te=l=>ke(l),Y=(l,P)=>k?P?.origin==="result"?{type:"UNEXPECTED_ERROR",cause:{type:"STEP_FAILURE",origin:"result",error:l,...P.resultCause!==void 0?{cause:P.resultCause}:{}}}:P?.origin==="throw"?{type:"UNEXPECTED_ERROR",cause:{type:"STEP_FAILURE",origin:"throw",error:l,thrown:P.thrown}}:{type:"UNEXPECTED_ERROR",cause:{type:"STEP_FAILURE",origin:"result",error:l}}:l,Ee=l=>({type:"UNEXPECTED_ERROR",cause:l.meta.origin==="result"?{type:"STEP_FAILURE",origin:"result",error:l.error,...l.meta.resultCause!==void 0?{cause:l.meta.resultCause}:{}}:{type:"STEP_FAILURE",origin:"throw",error:l.error,thrown:l.meta.thrown}});try{let P=function(T,y){let p=`scope_${Date.now()}_${Math.random().toString(36).slice(2,8)}`;return(async()=>{let a=performance.now(),c=!1;w.push({scopeId:p,type:"parallel"});let S=()=>{if(c)return;c=!0;let m=w.findIndex(E=>E.scopeId===p);m!==-1&&w.splice(m,1),d({type:"scope_end",workflowId:t,scopeId:p,ts:Date.now(),durationMs:performance.now()-a})};d({type:"scope_start",workflowId:t,scopeId:p,scopeType:"parallel",name:T,ts:Date.now()});try{let m=await y();if(S(),!m.ok)throw u?.(m.error,T,r),V(m.error,{origin:"result",resultCause:m.cause});return m.value}catch(m){throw S(),m}})()},O=function(T,y){let p=Object.keys(T),a=y.name??`Parallel(${p.join(", ")})`,c=`scope_${Date.now()}_${Math.random().toString(36).slice(2,8)}`;return(async()=>{let S=performance.now(),m=!1;w.push({scopeId:c,type:"parallel"});let E=()=>{if(m)return;m=!0;let R=w.findIndex(C=>C.scopeId===c);R!==-1&&w.splice(R,1),d({type:"scope_end",workflowId:t,scopeId:c,ts:Date.now(),durationMs:performance.now()-S})};d({type:"scope_start",workflowId:t,scopeId:c,scopeType:"parallel",name:a,ts:Date.now()});try{let R=await new Promise(A=>{if(p.length===0){A([]);return}let b=!1,x=p.length,j=new Array(p.length);for(let M=0;M<p.length;M++){let D=p[M],G=M;Promise.resolve(T[D]()).catch(g=>h({type:"PROMISE_REJECTED",cause:g},{cause:{type:"PROMISE_REJECTION",reason:g}})).then(g=>{if(!b){if(!g.ok){b=!0,A([{key:D,result:g}]);return}j[G]={key:D,result:g},x--,x===0&&A(j)}})}});E();let C={};for(let{key:A,result:b}of R){if(!b.ok)throw u?.(b.error,A,r),V(b.error,{origin:"result",resultCause:b.cause});C[A]=b.value}return C}catch(R){throw E(),R}})()};var ze=P,je=O;let l=(T,y)=>(async()=>{let p=ge(y),{name:a,key:c,retry:S,timeout:m}=p,E=B(c),R=o,C=R?performance.now():0;if(!(typeof T=="function")){if(S&&S.attempts>1)throw new Error("step: retry options require a function operation. Direct Promise/Result values cannot be re-executed on retry. Wrap your operation in a function: step(() => yourOperation, { retry: {...} })");if(m)throw new Error("step: timeout options require a function operation. Direct Promise/Result values cannot be wrapped with timeout after they've started. Wrap your operation in a function: step(() => yourOperation, { timeout: {...} })")}let x={attempts:Math.max(1,S?.attempts??1),backoff:S?.backoff??F.backoff,initialDelay:S?.initialDelay??F.initialDelay,maxDelay:S?.maxDelay??F.maxDelay,jitter:S?.jitter??F.jitter,retryOn:S?.retryOn??F.retryOn,onRetry:S?.onRetry??F.onRetry};o&&d({type:"step_start",workflowId:t,stepId:E,stepKey:c,name:a,ts:Date.now()});let j;for(let g=1;g<=x.attempts;g++){let me=R?performance.now():0;try{let f;if(typeof T=="function"?m?f=await Ce(T,m,{name:a,key:c,attempt:g}):f=await T():f=await T,f.ok){let U=performance.now()-C;return d({type:"step_success",workflowId:t,stepId:E,stepKey:c,name:a,ts:Date.now(),durationMs:U}),c&&d({type:"step_complete",workflowId:t,stepKey:c,name:a,ts:Date.now(),durationMs:U,result:f}),f.value}if(j=f,g<x.attempts&&x.retryOn(f.error,g)){let U=Z(g,x);d({type:"step_retry",workflowId:t,stepId:E,stepKey:c,name:a,ts:Date.now(),attempt:g+1,maxAttempts:x.attempts,delayMs:U,error:f.error}),x.onRetry(f.error,g,U),await Q(U);continue}x.attempts>1&&d({type:"step_retries_exhausted",workflowId:t,stepId:E,stepKey:c,name:a,ts:Date.now(),durationMs:performance.now()-C,attempts:g,lastError:f.error});break}catch(f){let U=performance.now()-me;if(te(f))throw d({type:"step_aborted",workflowId:t,stepId:E,stepKey:c,name:a,ts:Date.now(),durationMs:U}),f;if(re(f)){let v=we(f),q=m?.ms??v?.timeoutMs??0;if(d({type:"step_timeout",workflowId:t,stepId:E,stepKey:c,name:a,ts:Date.now(),timeoutMs:q,attempt:g}),g<x.attempts&&x.retryOn(f,g)){let H=Z(g,x);d({type:"step_retry",workflowId:t,stepId:E,stepKey:c,name:a,ts:Date.now(),attempt:g+1,maxAttempts:x.attempts,delayMs:H,error:f}),x.onRetry(f,g,H),await Q(H);continue}x.attempts>1&&d({type:"step_retries_exhausted",workflowId:t,stepId:E,stepKey:c,name:a,ts:Date.now(),durationMs:performance.now()-C,attempts:g,lastError:f})}if(g<x.attempts&&x.retryOn(f,g)){let v=Z(g,x);d({type:"step_retry",workflowId:t,stepId:E,stepKey:c,name:a,ts:Date.now(),attempt:g+1,maxAttempts:x.attempts,delayMs:v,error:f}),x.onRetry(f,g,v),await Q(v);continue}x.attempts>1&&!re(f)&&d({type:"step_retries_exhausted",workflowId:t,stepId:E,stepKey:c,name:a,ts:Date.now(),durationMs:performance.now()-C,attempts:g,lastError:f});let L=performance.now()-C;if(s){let v;try{v=s(f)}catch(q){throw Te(q)}throw d({type:"step_error",workflowId:t,stepId:E,stepKey:c,name:a,ts:Date.now(),durationMs:L,error:v}),c&&d({type:"step_complete",workflowId:t,stepKey:c,name:a,ts:Date.now(),durationMs:L,result:h(v,{cause:f}),meta:{origin:"throw",thrown:f}}),u?.(v,a,r),V(v,{origin:"throw",thrown:f})}else{let v={type:"UNEXPECTED_ERROR",cause:{type:"UNCAUGHT_EXCEPTION",thrown:f}};throw d({type:"step_error",workflowId:t,stepId:E,stepKey:c,name:a,ts:Date.now(),durationMs:L,error:v}),c&&d({type:"step_complete",workflowId:t,stepKey:c,name:a,ts:Date.now(),durationMs:L,result:h(v,{cause:f}),meta:{origin:"throw",thrown:f}}),f}}}let M=j,D=performance.now()-C,G=Y(M.error,{origin:"result",resultCause:M.cause});throw d({type:"step_error",workflowId:t,stepId:E,stepKey:c,name:a,ts:Date.now(),durationMs:D,error:G}),c&&d({type:"step_complete",workflowId:t,stepKey:c,name:a,ts:Date.now(),durationMs:D,result:M,meta:{origin:"result",resultCause:M.cause}}),u?.(M.error,a,r),V(M.error,{origin:"result",resultCause:M.cause})})();l.try=(T,y)=>{let p=y.name,a=y.key,c=B(a),S="error"in y?()=>y.error:y.onError,m=o;return(async()=>{let E=m?performance.now():0;o&&d({type:"step_start",workflowId:t,stepId:c,stepKey:a,name:p,ts:Date.now()});try{let R=await T(),C=performance.now()-E;return d({type:"step_success",workflowId:t,stepId:c,stepKey:a,name:p,ts:Date.now(),durationMs:C}),a&&d({type:"step_complete",workflowId:t,stepKey:a,name:p,ts:Date.now(),durationMs:C,result:I(R)}),R}catch(R){let C=S(R),A=performance.now()-E,b=Y(C,{origin:"throw",thrown:R});throw d({type:"step_error",workflowId:t,stepId:c,stepKey:a,name:p,ts:Date.now(),durationMs:A,error:b}),a&&d({type:"step_complete",workflowId:t,stepKey:a,name:p,ts:Date.now(),durationMs:A,result:h(C,{cause:R}),meta:{origin:"throw",thrown:R}}),u?.(C,p,r),V(C,{origin:"throw",thrown:R})}})()},l.fromResult=(T,y)=>{let p=y.name,a=y.key,c=B(a),S="error"in y?()=>y.error:y.onError,m=o;return(async()=>{let E=m?performance.now():0;o&&d({type:"step_start",workflowId:t,stepId:c,stepKey:a,name:p,ts:Date.now()});let R=await T();if(R.ok){let C=performance.now()-E;return d({type:"step_success",workflowId:t,stepId:c,stepKey:a,name:p,ts:Date.now(),durationMs:C}),a&&d({type:"step_complete",workflowId:t,stepKey:a,name:p,ts:Date.now(),durationMs:C,result:I(R.value)}),R.value}else{let C=S(R.error),A=performance.now()-E,b=Y(C,{origin:"result",resultCause:R.error});throw d({type:"step_error",workflowId:t,stepId:c,stepKey:a,name:p,ts:Date.now(),durationMs:A,error:b}),a&&d({type:"step_complete",workflowId:t,stepKey:a,name:p,ts:Date.now(),durationMs:A,result:h(C,{cause:R.error}),meta:{origin:"result",resultCause:R.error}}),u?.(C,p,r),V(C,{origin:"result",resultCause:R.error})}})()},l.retry=(T,y)=>l(T,{name:y.name,key:y.key,retry:{attempts:y.attempts,backoff:y.backoff,initialDelay:y.initialDelay,maxDelay:y.maxDelay,jitter:y.jitter,retryOn:y.retryOn,onRetry:y.onRetry},timeout:y.timeout}),l.withTimeout=(T,y)=>l(T,{name:y.name,key:y.key,timeout:y}),l.parallel=((...T)=>{if(typeof T[0]=="string"){let y=T[0],p=T[1];return P(y,p)}else{let y=T[0],p=T[1]??{};return O(y,p)}}),l.race=(T,y)=>{let p=`scope_${Date.now()}_${Math.random().toString(36).slice(2,8)}`;return(async()=>{let a=performance.now(),c=!1,S={scopeId:p,type:"race",winnerId:void 0};w.push(S);let m=()=>{if(c)return;c=!0;let E=w.findIndex(R=>R.scopeId===p);E!==-1&&w.splice(E,1),d({type:"scope_end",workflowId:t,scopeId:p,ts:Date.now(),durationMs:performance.now()-a,winnerId:S.winnerId})};d({type:"scope_start",workflowId:t,scopeId:p,scopeType:"race",name:T,ts:Date.now()});try{let E=await y();if(m(),!E.ok)throw u?.(E.error,T,r),V(E.error,{origin:"result",resultCause:E.cause});return E.value}catch(E){throw m(),E}})()},l.allSettled=(T,y)=>{let p=`scope_${Date.now()}_${Math.random().toString(36).slice(2,8)}`;return(async()=>{let a=performance.now(),c=!1;w.push({scopeId:p,type:"allSettled"});let S=()=>{if(c)return;c=!0;let m=w.findIndex(E=>E.scopeId===p);m!==-1&&w.splice(m,1),d({type:"scope_end",workflowId:t,scopeId:p,ts:Date.now(),durationMs:performance.now()-a})};d({type:"scope_start",workflowId:t,scopeId:p,scopeType:"allSettled",name:T,ts:Date.now()});try{let m=await y();if(S(),!m.ok)throw u?.(m.error,T,r),V(m.error,{origin:"result",resultCause:m.cause});return m.value}catch(m){throw S(),m}})()};let z=await e(l);return I(z)}catch(l){if(Re(l))throw l.thrown;if(te(l)){let O=l.meta.origin==="throw"?l.meta.thrown:l.meta.resultCause;if(s||u)return h(l.error,{cause:O});if(de(l.error))return h(l.error,{cause:O});let K=Ee(l);return h(K,{cause:O})}if(s){let O=s(l);return u?.(O,"unexpected",r),h(O,{cause:l})}let P={type:"UNEXPECTED_ERROR",cause:{type:"UNCAUGHT_EXCEPTION",thrown:l}};return u?.(P,"unexpected",r),h(P,{cause:l})}}se.strict=(e,n)=>se(e,n);function $(e){if(e===void 0)return{type:"undefined"};if(e instanceof Error)return{type:"error",errorName:e.name,errorMessage:e.message,errorStack:e.stack};try{return JSON.stringify(e),{type:"value",value:e}}catch{return{type:"value",value:String(e)}}}function W(e){if(e.type!=="undefined"){if(e.type==="error"){let n=new Error(e.errorMessage??"Unknown error");return n.name=e.errorName??"Error",e.errorStack&&(n.stack=e.errorStack),n}return e.value}}function J(e){return e.ok?{ok:!0,value:e.value}:{ok:!1,error:e.error,cause:e.cause!==void 0?$(e.cause):void 0}}function X(e){if(e.ok)return I(e.value);let n=e.cause?W(e.cause):void 0;return h(e.error,n!==void 0?{cause:n}:void 0)}function ae(e){return e.origin==="result"?{origin:"result",resultCause:e.resultCause!==void 0?$(e.resultCause):void 0}:{origin:"throw",thrown:$(e.thrown)}}function le(e){return e.origin==="result"?{origin:"result",resultCause:e.resultCause?W(e.resultCause):void 0}:{origin:"throw",thrown:e.thrown?W(e.thrown):void 0}}function ce(e){return{result:J(e.result),meta:e.meta?ae(e.meta):void 0}}function pe(e){return{result:X(e.result),meta:e.meta?le(e.meta):void 0}}function ee(e,n){let u={};for(let[o,s]of e.steps)u[o]=ce(s);return{version:1,entries:u,metadata:n}}function ne(e){let n=new Map;for(let[u,o]of Object.entries(e.entries))n.set(u,pe(o));return{steps:n}}function Se(e,n){return JSON.stringify(ee(e,n))}function xe(e){let n=JSON.parse(e);return ne(n)}function he(e={}){let{maxSize:n,ttl:u}=e,o=new Map,s=t=>u?Date.now()-t>u:!1,i=()=>{if(u)for(let[t,k]of o)s(k.timestamp)&&o.delete(t)},r=()=>{if(!n||o.size<n)return;let t,k=1/0;for(let[w,_]of o)_.timestamp<k&&(k=_.timestamp,t=w);t&&o.delete(t)};return{get(t){i();let k=o.get(t);if(k){if(s(k.timestamp)){o.delete(t);return}return k.result}},set(t,k){i(),r(),o.set(t,{result:k,timestamp:Date.now()})},has(t){i();let k=o.get(t);return k?s(k.timestamp)?(o.delete(t),!1):!0:!1},delete(t){return o.delete(t)},clear(){o.clear()}}}function Pe(e){let{directory:n,extension:u=".json",fs:o}=e;if(!o)throw new Error("File system interface is required. Pass fs option with readFile, writeFile, etc.");let s=r=>{let t=r.replace(/[^a-zA-Z0-9_-]/g,"_");return`${n}/${t}${u}`},i=new Map;return{async init(){await o.mkdir(n,{recursive:!0})},get(r){return i.get(r)},async getAsync(r){let t=s(r);try{if(!await o.exists(t))return;let k=await o.readFile(t),w=JSON.parse(k),_=X(w);return i.set(r,_),_}catch{return}},set(r,t){i.set(r,t)},async setAsync(r,t){let k=s(r),w=J(t);await o.writeFile(k,JSON.stringify(w,null,2)),i.set(r,t)},has(r){return i.has(r)},delete(r){return i.delete(r)},async deleteAsync(r){let t=s(r);try{return await o.unlink(t),i.delete(r),!0}catch{return!1}},clear(){i.clear()},async clearAsync(){try{let r=await o.readdir(n);for(let t of r)t.endsWith(u)&&await o.unlink(`${n}/${t}`);i.clear()}catch{}}}}function ve(e){let{store:n,prefix:u="workflow:",ttl:o}=e,s=r=>`${u}${r}`,i=new Map;return{get(r){return i.get(r)},async getAsync(r){let t=await n.get(s(r));if(t)try{let k=JSON.parse(t),w=X(k);return i.set(r,w),w}catch{return}},set(r,t){i.set(r,t)},async setAsync(r,t){let k=J(t);await n.set(s(r),JSON.stringify(k),o?{ttl:o}:void 0),i.set(r,t)},has(r){return i.has(r)},async hasAsync(r){return n.exists(s(r))},delete(r){return i.delete(r)},async deleteAsync(r){return i.delete(r),n.delete(s(r))},clear(){i.clear()},async clearAsync(){let r=await n.keys(`${u}*`);for(let t of r)await n.delete(t);i.clear()}}}function Ae(e,n="workflow:state:"){let u=o=>`${n}${o}`;return{async save(o,s,i){let r=ee(s,i);await e.set(u(o),JSON.stringify(r))},async load(o){let s=await e.get(u(o));if(s)try{let i=JSON.parse(s);return ne(i)}catch{return}},async delete(o){return e.delete(u(o))},async list(){return(await e.keys(`${n}*`)).map(s=>s.slice(n.length))}}}function be(e,n,u){let o=!1;return{async hydrate(){if(o)return;let s=await n.load(u);if(s)for(let[i,r]of s.steps)e.set(i,r.result);o=!0},get(s){return e.get(s)},set(s,i){e.set(s,i)},has(s){return e.has(s)},delete(s){return e.delete(s)},clear(){e.clear()}}}function Me(e){return typeof e=="object"&&e!==null&&e.type==="MIGRATION_ERROR"}function Oe(e){return typeof e=="object"&&e!==null&&e.type==="VERSION_INCOMPATIBLE"}async function ye(e,n,u){let o=e.state,s=e.version;if(s>n)return h({type:"VERSION_INCOMPATIBLE",stateVersion:s,currentVersion:n,reason:"State version is higher than current workflow version. Cannot downgrade."});if(s===n)return I({version:s,state:o});for(;s<n;){let i=u[s];if(!i)return h({type:"VERSION_INCOMPATIBLE",stateVersion:e.version,currentVersion:n,reason:`No migration found for version ${s} to ${s+1}`});try{o=await i(o),s++}catch(r){return h({type:"MIGRATION_ERROR",fromVersion:s,toVersion:s+1,cause:r})}}return I({version:s,state:o})}function Ie(e){let{version:n,migrations:u={},strictVersioning:o=!0}=e;return async s=>{if(!s)return I(void 0);if(o&&s.version>n)return h({type:"VERSION_INCOMPATIBLE",stateVersion:s.version,currentVersion:n,reason:"Saved state is from a newer workflow version"});if(s.version===n)return I(s.state);let i=await ye(s,n,u);return i.ok?I(i.value.state):i}}function _e(e,n){return{version:n,state:e}}function Ve(e){if(!e)return null;try{let n=typeof e=="string"?JSON.parse(e):e;if(typeof n!="object"||n===null||!("version"in n)||typeof n.version!="number"||!("state"in n)||!n.state||!Array.isArray(n.state.steps))return null;let u=n,o=new Map(u.state.steps);return{version:u.version,state:{steps:o}}}catch{return null}}function Ue(e){return JSON.stringify({version:e.version,state:{steps:Array.from(e.state.steps.entries())}})}function Ke(e){return n=>{let u=new Map;for(let[o,s]of n.steps){let i=e[o]??o;u.set(i,s)}return{steps:u}}}function De(e){let n=new Set(e);return u=>{let o=new Map;for(let[s,i]of u.steps)n.has(s)||o.set(s,i);return{steps:o}}}function Fe(e){return n=>{let u=new Map;for(let[o,s]of n.steps){let i=e[o];u.set(o,i?i(s):s)}return{steps:u}}}function Ne(e){return async n=>{let u=n;for(let o of e)u=await o(u);return u}}export{Ne as composeMigrations,Pe as createFileCache,be as createHydratingCache,ve as createKVCache,De as createKeyRemoveMigration,Ke as createKeyRenameMigration,he as createMemoryCache,Ae as createStatePersistence,Fe as createValueTransformMigration,_e as createVersionedState,Ie as createVersionedStateLoader,W as deserializeCause,pe as deserializeEntry,le as deserializeMeta,X as deserializeResult,ne as deserializeState,Me as isMigrationError,Oe as isVersionIncompatibleError,ye as migrateState,xe as parseState,Ve as parseVersionedState,$ as serializeCause,ce as serializeEntry,ae as serializeMeta,J as serializeResult,ee as serializeState,Se as stringifyState,Ue as stringifyVersionedState};
2
+ //# sourceMappingURL=persistence.js.map