@uploadista/core 0.0.20-beta.6 → 0.0.20-beta.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dist/flow/index.cjs +1 -1
  2. package/dist/flow/index.d.cts +2 -2
  3. package/dist/flow/index.d.mts +2 -2
  4. package/dist/flow/index.mjs +1 -1
  5. package/dist/flow-BHVkk_6W.cjs +1 -0
  6. package/dist/{flow-_OmguvZm.mjs → flow-DlhHOlMk.mjs} +2 -2
  7. package/dist/flow-DlhHOlMk.mjs.map +1 -0
  8. package/dist/index-9gyMMEIB.d.cts.map +1 -1
  9. package/dist/{index-Dim9X1-G.d.cts → index-BtDyiX5-.d.cts} +1604 -1604
  10. package/dist/index-BtDyiX5-.d.cts.map +1 -0
  11. package/dist/{index-Bhlv8wF9.d.mts → index-XXHmCYAu.d.mts} +1604 -1604
  12. package/dist/index-XXHmCYAu.d.mts.map +1 -0
  13. package/dist/index.cjs +1 -1
  14. package/dist/index.d.cts +2 -2
  15. package/dist/index.d.mts +2 -2
  16. package/dist/index.mjs +1 -1
  17. package/dist/testing/index.cjs +2 -2
  18. package/dist/testing/index.d.cts +9 -9
  19. package/dist/testing/index.d.cts.map +1 -1
  20. package/dist/testing/index.d.mts +9 -9
  21. package/dist/testing/index.d.mts.map +1 -1
  22. package/dist/testing/index.mjs +2 -2
  23. package/dist/testing/index.mjs.map +1 -1
  24. package/dist/types/index.d.cts +1 -1
  25. package/dist/types/index.d.mts +1 -1
  26. package/dist/upload/index.cjs +1 -1
  27. package/dist/upload/index.d.cts +2 -2
  28. package/dist/upload/index.d.mts +2 -2
  29. package/dist/upload/index.mjs +1 -1
  30. package/dist/{upload-tLC7uR9U.mjs → upload-C-C7hn1-.mjs} +2 -2
  31. package/dist/{upload-tLC7uR9U.mjs.map → upload-C-C7hn1-.mjs.map} +1 -1
  32. package/dist/{upload-BHDuuJ80.cjs → upload-DWBlRXHh.cjs} +1 -1
  33. package/package.json +7 -7
  34. package/src/flow/{flow-server.ts → flow-engine.ts} +106 -106
  35. package/src/flow/index.ts +10 -10
  36. package/src/flow/nodes/input-node.ts +5 -5
  37. package/src/flow/nodes/transform-node.ts +11 -14
  38. package/src/flow/typed-flow.ts +22 -20
  39. package/src/testing/index.ts +1 -1
  40. package/src/testing/{mock-upload-server.ts → mock-upload-engine.ts} +10 -10
  41. package/src/upload/index.ts +1 -1
  42. package/src/upload/{upload-server.ts → upload-engine.ts} +44 -40
  43. package/dist/flow-Cv8vCBQ2.cjs +0 -1
  44. package/dist/flow-_OmguvZm.mjs.map +0 -1
  45. package/dist/index-Bhlv8wF9.d.mts.map +0 -1
  46. package/dist/index-Dim9X1-G.d.cts.map +0 -1
@@ -2,14 +2,11 @@ import { Effect, Stream } from "effect";
2
2
  import type { UploadistaError } from "../../errors";
3
3
  import type { StreamingConfig, UploadFile } from "../../types";
4
4
  import { DEFAULT_STREAMING_CONFIG, uploadFileSchema } from "../../types";
5
- import { UploadServer } from "../../upload";
5
+ import { UploadEngine } from "../../upload";
6
6
  import { createFlowNode, NodeType } from "../node";
7
7
  import { completeNodeExecution, type FileNamingConfig } from "../types";
8
8
  import type { FlowCircuitBreakerConfig } from "../types/flow-types";
9
- import {
10
- applyFileNaming,
11
- buildNamingContext,
12
- } from "../utils/file-naming";
9
+ import { applyFileNaming, buildNamingContext } from "../utils/file-naming";
13
10
  import { resolveUploadMetadata } from "../utils/resolve-upload-metadata";
14
11
 
15
12
  /**
@@ -231,7 +228,7 @@ export function createTransformNode({
231
228
  };
232
229
 
233
230
  return Effect.gen(function* () {
234
- const uploadServer = yield* UploadServer;
231
+ const uploadEngine = yield* UploadEngine;
235
232
 
236
233
  return yield* createFlowNode<UploadFile, UploadFile>({
237
234
  id,
@@ -277,8 +274,8 @@ export function createTransformNode({
277
274
  return false;
278
275
  }
279
276
 
280
- // Check DataStore capabilities via UploadServer
281
- const capabilities = yield* uploadServer.getCapabilities(
277
+ // Check DataStore capabilities via UploadEngine
278
+ const capabilities = yield* uploadEngine.getCapabilities(
282
279
  storageId,
283
280
  clientId,
284
281
  );
@@ -303,7 +300,7 @@ export function createTransformNode({
303
300
  yield* Effect.logDebug(`Using streaming transform for ${file.id}`);
304
301
 
305
302
  // Get input stream
306
- const inputStream = yield* uploadServer.readStream(
303
+ const inputStream = yield* uploadEngine.readStream(
307
304
  file.id,
308
305
  clientId,
309
306
  effectiveStreamingConfig,
@@ -341,7 +338,7 @@ export function createTransformNode({
341
338
  }
342
339
 
343
340
  // Check if DataStore supports streaming writes
344
- const capabilities = yield* uploadServer.getCapabilities(
341
+ const capabilities = yield* uploadEngine.getCapabilities(
345
342
  storageId,
346
343
  clientId,
347
344
  );
@@ -354,7 +351,7 @@ export function createTransformNode({
354
351
  `Using streaming write for ${file.id} - no intermediate buffering`,
355
352
  );
356
353
 
357
- result = yield* uploadServer.uploadStream(
354
+ result = yield* uploadEngine.uploadStream(
358
355
  {
359
356
  storageId,
360
357
  uploadLengthDeferred: true,
@@ -402,7 +399,7 @@ export function createTransformNode({
402
399
  },
403
400
  });
404
401
 
405
- result = yield* uploadServer.upload(
402
+ result = yield* uploadEngine.upload(
406
403
  {
407
404
  storageId,
408
405
  size: outputBytes.byteLength,
@@ -451,7 +448,7 @@ export function createTransformNode({
451
448
  }
452
449
 
453
450
  // Read input bytes from upload server
454
- const inputBytes = yield* uploadServer.read(file.id, clientId);
451
+ const inputBytes = yield* uploadEngine.read(file.id, clientId);
455
452
 
456
453
  // Transform the bytes using the provided function
457
454
  const transformResult = yield* transform(inputBytes, file);
@@ -498,7 +495,7 @@ export function createTransformNode({
498
495
 
499
496
  // Upload the transformed bytes back to the upload server
500
497
  // Use output metadata if provided, otherwise fall back to original
501
- const result = yield* uploadServer.upload(
498
+ const result = yield* uploadEngine.upload(
502
499
  {
503
500
  storageId,
504
501
  size: outputBytes.byteLength,
@@ -5,7 +5,7 @@ import { Effect } from "effect";
5
5
  import { z } from "zod";
6
6
  import type { UploadistaError as CoreUploadistaError } from "../errors";
7
7
  import { UploadistaError } from "../errors";
8
- import type { UploadServer } from "../upload";
8
+ import type { UploadEngine } from "../upload";
9
9
  import type { FlowEvent } from "./event";
10
10
  import type { Flow, FlowExecutionResult } from "./flow";
11
11
  import { createFlowWithSchema } from "./flow";
@@ -59,13 +59,14 @@ export type NodeDefinitionsRecord = Record<string, NodeDefinition<any, any>>;
59
59
  * If the node is an Effect, extracts its error type.
60
60
  * If the node is a plain FlowNode, returns never (no errors).
61
61
  */
62
- type NodeDefinitionError<T> = T extends Effect.Effect<
63
- FlowNode<any, any, CoreUploadistaError>,
64
- infer TError,
65
- any
66
- >
67
- ? TError
68
- : never;
62
+ type NodeDefinitionError<T> =
63
+ T extends Effect.Effect<
64
+ FlowNode<any, any, CoreUploadistaError>,
65
+ infer TError,
66
+ any
67
+ >
68
+ ? TError
69
+ : never;
69
70
 
70
71
  /**
71
72
  * Extracts the requirements (dependencies) from a NodeDefinition.
@@ -111,7 +112,7 @@ type NodesRequirementsUnion<TNodes extends NodeDefinitionsRecord> = {
111
112
  * Extracts all service requirements from a flow's nodes.
112
113
  *
113
114
  * This includes all services required by any node in the flow,
114
- * including UploadServer (which is provided by the runtime).
115
+ * including UploadEngine (which is provided by the runtime).
115
116
  *
116
117
  * @template TNodes - The record of node definitions
117
118
  *
@@ -125,17 +126,17 @@ type NodesRequirementsUnion<TNodes extends NodeDefinitionsRecord> = {
125
126
  * edges: [...]
126
127
  * });
127
128
  * type AllRequirements = FlowRequirements<typeof myFlow.nodes>;
128
- * // AllRequirements = ImagePlugin | UploadServer
129
+ * // AllRequirements = ImagePlugin | UploadEngine
129
130
  * ```
130
131
  */
131
132
  export type FlowRequirements<TNodes extends NodeDefinitionsRecord> =
132
133
  NodesRequirementsUnion<TNodes>;
133
134
 
134
135
  /**
135
- * Extracts plugin service requirements from a flow, excluding UploadServer.
136
+ * Extracts plugin service requirements from a flow, excluding UploadEngine.
136
137
  *
137
138
  * This type is useful for determining which plugin layers need to be
138
- * provided when creating a server, as UploadServer is automatically
139
+ * provided when creating a server, as UploadEngine is automatically
139
140
  * provided by the runtime.
140
141
  *
141
142
  * @template TNodes - The record of node definitions
@@ -145,16 +146,16 @@ export type FlowRequirements<TNodes extends NodeDefinitionsRecord> =
145
146
  * const myFlow = createFlow({
146
147
  * nodes: {
147
148
  * resize: imageResizeNode, // requires ImagePlugin
148
- * upload: s3OutputNode, // requires UploadServer
149
+ * upload: s3OutputNode, // requires UploadEngine
149
150
  * },
150
151
  * edges: [...]
151
152
  * });
152
153
  * type PluginRequirements = FlowPluginRequirements<typeof myFlow.nodes>;
153
- * // PluginRequirements = ImagePlugin (UploadServer excluded)
154
+ * // PluginRequirements = ImagePlugin (UploadEngine excluded)
154
155
  * ```
155
156
  */
156
157
  export type FlowPluginRequirements<TNodes extends NodeDefinitionsRecord> =
157
- Exclude<FlowRequirements<TNodes>, UploadServer>;
158
+ Exclude<FlowRequirements<TNodes>, UploadEngine>;
158
159
 
159
160
  /**
160
161
  * Infers the concrete FlowNode type from a NodeDefinition.
@@ -164,11 +165,12 @@ export type FlowPluginRequirements<TNodes extends NodeDefinitionsRecord> =
164
165
  *
165
166
  * Uses the shared ResolveEffect utility for consistency.
166
167
  */
167
- type InferNode<T> = T extends FlowNode<any, any, CoreUploadistaError>
168
- ? T
169
- : ResolveEffect<T> extends FlowNode<any, any, CoreUploadistaError>
170
- ? ResolveEffect<T>
171
- : never;
168
+ type InferNode<T> =
169
+ T extends FlowNode<any, any, CoreUploadistaError>
170
+ ? T
171
+ : ResolveEffect<T> extends FlowNode<any, any, CoreUploadistaError>
172
+ ? ResolveEffect<T>
173
+ : never;
172
174
 
173
175
  type ResolvedNodesRecord<TNodes extends NodeDefinitionsRecord> = {
174
176
  [K in keyof TNodes]: InferNode<TNodes[K]>;
@@ -11,7 +11,7 @@ export { TestDocumentAiPlugin } from "./mock-document-ai-plugin";
11
11
  export { TestDocumentPlugin } from "./mock-document-plugin";
12
12
  export { TestImageAiPlugin } from "./mock-image-ai-plugin";
13
13
  export { TestImagePlugin } from "./mock-image-plugin";
14
- export { TestUploadServer } from "./mock-upload-server";
14
+ export { TestUploadEngine } from "./mock-upload-engine";
15
15
  export { TestVideoPlugin } from "./mock-video-plugin";
16
16
  export { TestVirusScanPlugin } from "./mock-virus-scan-plugin";
17
17
  export { TestZipPlugin } from "./mock-zip-plugin";
@@ -2,32 +2,32 @@ import { Effect, Layer, Stream } from "effect";
2
2
  import type { UploadistaError } from "../errors";
3
3
  import type { InputFile, UploadFile, WebSocketConnection } from "../types";
4
4
  import {
5
- DEFAULT_STREAMING_CONFIG,
6
5
  type DataStoreCapabilities,
6
+ DEFAULT_STREAMING_CONFIG,
7
7
  type StreamingConfig,
8
8
  } from "../types/data-store";
9
- import { UploadServer } from "../upload";
9
+ import { UploadEngine } from "../upload";
10
10
 
11
11
  /**
12
- * Mock UploadServer implementation for testing.
12
+ * Mock UploadEngine implementation for testing.
13
13
  *
14
- * Provides a complete in-memory implementation of all UploadServer methods
14
+ * Provides a complete in-memory implementation of all UploadEngine methods
15
15
  * suitable for unit and integration tests.
16
16
  *
17
17
  * @example
18
18
  * ```typescript
19
- * import { TestUploadServer } from "@uploadista/core/testing";
19
+ * import { TestUploadEngine } from "@uploadista/core/testing";
20
20
  *
21
21
  * const program = Effect.gen(function* () {
22
- * const server = yield* UploadServer;
22
+ * const server = yield* UploadEngine;
23
23
  * const upload = yield* server.createUpload(inputFile, "client-123");
24
24
  * return upload;
25
- * }).pipe(Effect.provide(TestUploadServer));
25
+ * }).pipe(Effect.provide(TestUploadEngine));
26
26
  * ```
27
27
  */
28
- export const TestUploadServer = Layer.succeed(
29
- UploadServer,
30
- UploadServer.of({
28
+ export const TestUploadEngine = Layer.succeed(
29
+ UploadEngine,
30
+ UploadEngine.of({
31
31
  read: (fileId: string, _clientId: string | null) =>
32
32
  Effect.sync(() => {
33
33
  // Generate mock file data based on fileId
@@ -1,3 +1,3 @@
1
1
  export * from "./mime";
2
- export * from "./upload-server";
2
+ export * from "./upload-engine";
3
3
  export * from "./upload-strategy-negotiator";
@@ -24,7 +24,7 @@ import { uploadChunk } from "./upload-chunk";
24
24
  import { arrayBuffer, fetchFile } from "./upload-url";
25
25
 
26
26
  /**
27
- * Legacy configuration options for UploadServer.
27
+ * Legacy configuration options for UploadEngine.
28
28
  *
29
29
  * @deprecated Use Effect Layers instead of this configuration object.
30
30
  * This type is kept for backward compatibility.
@@ -36,7 +36,7 @@ import { arrayBuffer, fetchFile } from "./upload-url";
36
36
  * @property middlewares - Optional request middlewares
37
37
  * @property withTracing - Enable Effect tracing for debugging
38
38
  */
39
- export type UploadServerOptions = {
39
+ export type UploadEngineOptions = {
40
40
  dataStore:
41
41
  | ((storageId: string) => Promise<DataStore<UploadFile>>)
42
42
  | DataStore<UploadFile>;
@@ -48,7 +48,7 @@ export type UploadServerOptions = {
48
48
  };
49
49
 
50
50
  /**
51
- * UploadServer service interface.
51
+ * UploadEngine service interface.
52
52
  *
53
53
  * This is the core upload handling service that provides all file upload operations.
54
54
  * It manages upload lifecycle, resumable uploads, progress tracking, and storage integration.
@@ -70,7 +70,7 @@ export type UploadServerOptions = {
70
70
  * ```typescript
71
71
  * // Basic upload flow
72
72
  * const program = Effect.gen(function* () {
73
- * const server = yield* UploadServer;
73
+ * const server = yield* UploadEngine;
74
74
  *
75
75
  * // 1. Create upload
76
76
  * const inputFile: InputFile = {
@@ -93,7 +93,7 @@ export type UploadServerOptions = {
93
93
  *
94
94
  * // Upload with WebSocket progress tracking
95
95
  * const uploadWithProgress = Effect.gen(function* () {
96
- * const server = yield* UploadServer;
96
+ * const server = yield* UploadEngine;
97
97
  *
98
98
  * // Subscribe to progress events
99
99
  * yield* server.subscribeToUploadEvents(uploadId, websocket);
@@ -109,7 +109,7 @@ export type UploadServerOptions = {
109
109
  *
110
110
  * // Upload from URL
111
111
  * const urlUpload = Effect.gen(function* () {
112
- * const server = yield* UploadServer;
112
+ * const server = yield* UploadEngine;
113
113
  *
114
114
  * const inputFile: InputFile = {
115
115
  * storageId: "s3-production",
@@ -128,7 +128,7 @@ export type UploadServerOptions = {
128
128
  * });
129
129
  * ```
130
130
  */
131
- export type UploadServerShape = {
131
+ export type UploadEngineShape = {
132
132
  createUpload: (
133
133
  inputFile: InputFile,
134
134
  clientId: string | null,
@@ -172,7 +172,7 @@ export type UploadServerShape = {
172
172
  *
173
173
  * @example
174
174
  * ```typescript
175
- * const server = yield* UploadServer;
175
+ * const server = yield* UploadEngine;
176
176
  * const stream = yield* server.readStream(uploadId, clientId, { chunkSize: 65536 });
177
177
  * // Process stream chunk by chunk with bounded memory
178
178
  * yield* Stream.runForEach(stream, (chunk) => processChunk(chunk));
@@ -182,7 +182,10 @@ export type UploadServerShape = {
182
182
  uploadId: string,
183
183
  clientId: string | null,
184
184
  config?: StreamingConfig,
185
- ) => Effect.Effect<Stream.Stream<Uint8Array, UploadistaError>, UploadistaError>;
185
+ ) => Effect.Effect<
186
+ Stream.Stream<Uint8Array, UploadistaError>,
187
+ UploadistaError
188
+ >;
186
189
  /**
187
190
  * Uploads file content from a stream with unknown final size.
188
191
  * Creates upload with deferred length, streams content to storage,
@@ -198,7 +201,7 @@ export type UploadServerShape = {
198
201
  *
199
202
  * @example
200
203
  * ```typescript
201
- * const server = yield* UploadServer;
204
+ * const server = yield* UploadEngine;
202
205
  * const result = yield* server.uploadStream(
203
206
  * {
204
207
  * storageId: "s3-production",
@@ -231,40 +234,40 @@ export type UploadServerShape = {
231
234
  };
232
235
 
233
236
  /**
234
- * Effect-TS context tag for the UploadServer service.
237
+ * Effect-TS context tag for the UploadEngine service.
235
238
  *
236
- * Use this tag to access the UploadServer in an Effect context.
239
+ * Use this tag to access the UploadEngine in an Effect context.
237
240
  * The server must be provided via a Layer or dependency injection.
238
241
  *
239
242
  * @example
240
243
  * ```typescript
241
- * // Access UploadServer in an Effect
244
+ * // Access UploadEngine in an Effect
242
245
  * const uploadEffect = Effect.gen(function* () {
243
- * const server = yield* UploadServer;
246
+ * const server = yield* UploadEngine;
244
247
  * const upload = yield* server.createUpload(inputFile, clientId);
245
248
  * return upload;
246
249
  * });
247
250
  *
248
- * // Provide UploadServer layer
251
+ * // Provide UploadEngine layer
249
252
  * const program = uploadEffect.pipe(
250
- * Effect.provide(uploadServer),
253
+ * Effect.provide(uploadEngine),
251
254
  * Effect.provide(uploadFileKvStore),
252
255
  * Effect.provide(dataStoreLayer),
253
256
  * Effect.provide(eventEmitterLayer)
254
257
  * );
255
258
  * ```
256
259
  */
257
- export class UploadServer extends Context.Tag("UploadServer")<
258
- UploadServer,
259
- UploadServerShape
260
+ export class UploadEngine extends Context.Tag("UploadEngine")<
261
+ UploadEngine,
262
+ UploadEngineShape
260
263
  >() {}
261
264
 
262
265
  /**
263
- * Creates the UploadServer implementation.
266
+ * Creates the UploadEngine implementation.
264
267
  *
265
- * This function constructs the UploadServer service by composing all required
268
+ * This function constructs the UploadEngine service by composing all required
266
269
  * dependencies (KV store, data stores, event emitter, ID generator). It implements
267
- * all upload operations defined in UploadServerShape.
270
+ * all upload operations defined in UploadEngineShape.
268
271
  *
269
272
  * The server automatically handles:
270
273
  * - Upload lifecycle management (create, resume, complete)
@@ -272,24 +275,24 @@ export class UploadServer extends Context.Tag("UploadServer")<
272
275
  * - Storage backend routing based on storageId
273
276
  * - Error handling with proper UploadistaError types
274
277
  *
275
- * @returns An Effect that yields the UploadServerShape implementation
278
+ * @returns An Effect that yields the UploadEngineShape implementation
276
279
  *
277
280
  * @example
278
281
  * ```typescript
279
- * // Create a custom UploadServer layer
280
- * const myUploadServer = Layer.effect(
281
- * UploadServer,
282
- * createUploadServer()
282
+ * // Create a custom UploadEngine layer
283
+ * const myUploadEngine = Layer.effect(
284
+ * UploadEngine,
285
+ * createUploadEngine()
283
286
  * );
284
287
  *
285
288
  * // Use in a program
286
289
  * const program = Effect.gen(function* () {
287
- * const server = yield* UploadServer;
290
+ * const server = yield* UploadEngine;
288
291
  * // Use server operations...
289
- * }).pipe(Effect.provide(myUploadServer));
292
+ * }).pipe(Effect.provide(myUploadEngine));
290
293
  * ```
291
294
  */
292
- export function createUploadServer() {
295
+ export function createUploadEngine() {
293
296
  return Effect.gen(function* () {
294
297
  const kvStore = yield* UploadFileKVStore;
295
298
  const eventEmitter = yield* UploadEventEmitter;
@@ -401,9 +404,7 @@ export function createUploadServer() {
401
404
  const capabilities = dataStore.getCapabilities();
402
405
  if (capabilities.supportsStreamingRead && dataStore.readStream) {
403
406
  // Use native streaming
404
- yield* Effect.logDebug(
405
- `Using streaming read for file ${uploadId}`,
406
- );
407
+ yield* Effect.logDebug(`Using streaming read for file ${uploadId}`);
407
408
  return yield* dataStore.readStream(uploadId, config);
408
409
  }
409
410
 
@@ -518,7 +519,10 @@ export function createUploadServer() {
518
519
  );
519
520
 
520
521
  // Calculate total size
521
- const totalSize = chunks.reduce((acc, chunk) => acc + chunk.length, 0);
522
+ const totalSize = chunks.reduce(
523
+ (acc, chunk) => acc + chunk.length,
524
+ 0,
525
+ );
522
526
 
523
527
  // Create a combined buffer
524
528
  const buffer = new Uint8Array(totalSize);
@@ -586,14 +590,14 @@ export function createUploadServer() {
586
590
  Effect.gen(function* () {
587
591
  yield* eventEmitter.unsubscribe(uploadId);
588
592
  }),
589
- } satisfies UploadServerShape;
593
+ } satisfies UploadEngineShape;
590
594
  });
591
595
  }
592
596
 
593
597
  /**
594
- * Pre-built UploadServer Effect Layer.
598
+ * Pre-built UploadEngine Effect Layer.
595
599
  *
596
- * This layer provides a ready-to-use UploadServer implementation that can be
600
+ * This layer provides a ready-to-use UploadEngine implementation that can be
597
601
  * composed with other layers to build a complete upload system.
598
602
  *
599
603
  * Required dependencies:
@@ -606,7 +610,7 @@ export function createUploadServer() {
606
610
  * ```typescript
607
611
  * // Compose a complete upload system
608
612
  * const fullUploadSystem = Layer.mergeAll(
609
- * uploadServer,
613
+ * uploadEngine,
610
614
  * uploadFileKvStore,
611
615
  * dataStoreLayer,
612
616
  * uploadEventEmitter,
@@ -615,9 +619,9 @@ export function createUploadServer() {
615
619
  *
616
620
  * // Use in application
617
621
  * const app = Effect.gen(function* () {
618
- * const server = yield* UploadServer;
622
+ * const server = yield* UploadEngine;
619
623
  * // Perform uploads...
620
624
  * }).pipe(Effect.provide(fullUploadSystem));
621
625
  * ```
622
626
  */
623
- export const uploadServer = Layer.effect(UploadServer, createUploadServer());
627
+ export const uploadEngine = Layer.effect(UploadEngine, createUploadEngine());
@@ -1 +0,0 @@
1
- const e=require(`./types-Cws60JHC.cjs`),t=require(`./uploadista-error-BgQU45we.cjs`),n=require(`./upload-BHDuuJ80.cjs`);let r=require(`effect`),i=require(`zod`),a=require(`micromustache`);const o={enabled:!1,failureThreshold:5,resetTimeout:3e4,halfOpenRequests:3,windowDuration:6e4,fallback:{type:`fail`}},s=`uploadista:circuit-breaker:`;function c(t){let n=e=>`${s}${e}`,i=i=>r.Effect.gen(function*(){let r=n(i),a=yield*t.get(r);if(a===null)return null;try{return e.I.deserialize(a)}catch{return yield*t.delete(r),null}}),a=(r,i)=>{let a=n(r),o=e.I.serialize(i);return t.set(a,o)};return{getState:i,setState:a,incrementFailures:(t,n)=>r.Effect.gen(function*(){let r=Date.now(),o=yield*i(t);return o===null&&(o=e.O({failureThreshold:5,resetTimeout:3e4,halfOpenRequests:3,windowDuration:n})),o=r-o.windowStart>n?{...o,failureCount:1,windowStart:r}:{...o,failureCount:o.failureCount+1},yield*a(t,o),o.failureCount}),resetFailures:e=>r.Effect.gen(function*(){let t=yield*i(e);t!==null&&(yield*a(e,{...t,failureCount:0,windowStart:Date.now()}))}),incrementHalfOpenSuccesses:e=>r.Effect.gen(function*(){let t=yield*i(e);if(t===null)return 1;let n={...t,halfOpenSuccesses:t.halfOpenSuccesses+1};return yield*a(e,n),n.halfOpenSuccesses}),getAllStats:()=>r.Effect.gen(function*(){let e=new Map;if(!t.list)return e;let n=yield*t.list(s),r=Date.now();for(let t of n){let n=t,a=yield*i(n);if(a!==null){let t=r-a.lastStateChange;e.set(n,{nodeType:n,state:a.state,failureCount:a.failureCount,halfOpenSuccesses:a.halfOpenSuccesses,timeSinceLastStateChange:t,timeUntilHalfOpen:a.state===`open`?Math.max(0,a.config.resetTimeout-t):void 0})}}return e}),delete:e=>t.delete(n(e))}}function l(){let t=new Map;return{getState:e=>r.Effect.succeed(t.get(e)??null),setState:(e,n)=>r.Effect.sync(()=>{t.set(e,n)}),incrementFailures:(n,i)=>r.Effect.sync(()=>{let r=Date.now(),a=t.get(n);return a===void 0&&(a=e.O({failureThreshold:5,resetTimeout:3e4,halfOpenRequests:3,windowDuration:i})),a=r-a.windowStart>i?{...a,failureCount:1,windowStart:r}:{...a,failureCount:a.failureCount+1},t.set(n,a),a.failureCount}),resetFailures:e=>r.Effect.sync(()=>{let n=t.get(e);n!==void 0&&t.set(e,{...n,failureCount:0,windowStart:Date.now()})}),incrementHalfOpenSuccesses:e=>r.Effect.sync(()=>{let n=t.get(e);if(n===void 0)return 1;let r={...n,halfOpenSuccesses:n.halfOpenSuccesses+1};return t.set(e,r),r.halfOpenSuccesses}),getAllStats:()=>r.Effect.sync(()=>{let e=new Map,n=Date.now();for(let[r,i]of t){let t=n-i.lastStateChange;e.set(r,{nodeType:r,state:i.state,failureCount:i.failureCount,halfOpenSuccesses:i.halfOpenSuccesses,timeSinceLastStateChange:t,timeUntilHalfOpen:i.state===`open`?Math.max(0,i.config.resetTimeout-t):void 0})}return e}),delete:e=>r.Effect.sync(()=>{t.delete(e)})}}const u=r.Layer.effect(e.D,r.Effect.gen(function*(){return c(yield*e.k)})),d=r.Layer.succeed(e.D,l());var f=class{eventHandler;nodeType;config;store;constructor(e,t,n){this.nodeType=e,this.config={enabled:t.enabled??o.enabled,failureThreshold:t.failureThreshold??o.failureThreshold,resetTimeout:t.resetTimeout??o.resetTimeout,halfOpenRequests:t.halfOpenRequests??o.halfOpenRequests,windowDuration:t.windowDuration??o.windowDuration,fallback:t.fallback??o.fallback},this.store=n}setEventHandler(e){this.eventHandler=e}allowRequest(){let t=this;return r.Effect.gen(function*(){if(!t.config.enabled)return{allowed:!0,state:`closed`,failureCount:0};let n=yield*t.store.getState(t.nodeType),r=Date.now();if(n===null&&(n=e.O({failureThreshold:t.config.failureThreshold,resetTimeout:t.config.resetTimeout,halfOpenRequests:t.config.halfOpenRequests,windowDuration:t.config.windowDuration}),yield*t.store.setState(t.nodeType,n)),n.state===`open`&&r-n.lastStateChange>=t.config.resetTimeout){let e=n.state;n={...n,state:`half-open`,halfOpenSuccesses:0,lastStateChange:r},yield*t.store.setState(t.nodeType,n),yield*t.emitEvent(e,`half-open`,n.failureCount)}return{allowed:n.state!==`open`,state:n.state,failureCount:n.failureCount}})}getState(){let e=this;return r.Effect.gen(function*(){return(yield*e.store.getState(e.nodeType))?.state??`closed`})}getFailureCount(){let e=this;return r.Effect.gen(function*(){return(yield*e.store.getState(e.nodeType))?.failureCount??0})}recordSuccess(){let e=this;return r.Effect.gen(function*(){if(!e.config.enabled)return;let t=yield*e.store.getState(e.nodeType);t!==null&&(t.state===`half-open`?(yield*e.store.incrementHalfOpenSuccesses(e.nodeType))>=e.config.halfOpenRequests&&(yield*e.transitionTo(`closed`,t.failureCount)):t.state===`closed`&&(yield*e.store.resetFailures(e.nodeType)))})}recordFailure(e){let t=this;return r.Effect.gen(function*(){if(!t.config.enabled)return;let e=yield*t.store.getState(t.nodeType);if(e===null||e.state===`closed`){let e=yield*t.store.incrementFailures(t.nodeType,t.config.windowDuration);e>=t.config.failureThreshold&&(yield*t.transitionTo(`open`,e))}else e.state===`half-open`&&(yield*t.transitionTo(`open`,e.failureCount))})}getFallback(){return this.config.fallback}reset(){let t=this;return r.Effect.gen(function*(){let n=(yield*t.store.getState(t.nodeType))?.state??`closed`;yield*t.store.setState(t.nodeType,e.O({failureThreshold:t.config.failureThreshold,resetTimeout:t.config.resetTimeout,halfOpenRequests:t.config.halfOpenRequests,windowDuration:t.config.windowDuration})),n!==`closed`&&(yield*t.emitEvent(n,`closed`,0))})}transitionTo(e,t){let n=this;return r.Effect.gen(function*(){let r=yield*n.store.getState(n.nodeType),i=r?.state??`closed`;if(i===e)return;let a=Date.now(),o={state:e,failureCount:e===`closed`?0:t,lastStateChange:a,halfOpenSuccesses:0,windowStart:e===`closed`?a:r?.windowStart??a,config:{failureThreshold:n.config.failureThreshold,resetTimeout:n.config.resetTimeout,halfOpenRequests:n.config.halfOpenRequests,windowDuration:n.config.windowDuration}};yield*n.store.setState(n.nodeType,o),yield*n.emitEvent(i,e,t)})}emitEvent(e,t,n){let i=this;return r.Effect.gen(function*(){i.eventHandler&&(yield*i.eventHandler({nodeType:i.nodeType,previousState:e,newState:t,timestamp:Date.now(),failureCount:n}))})}},p=class{breakers=new Map;eventHandler;constructor(e){this.store=e}setEventHandler(e){this.eventHandler=e;for(let t of this.breakers.values())t.setEventHandler(e)}getOrCreate(e,t){let n=this.breakers.get(e);return n||(n=new f(e,t,this.store),this.eventHandler&&n.setEventHandler(this.eventHandler),this.breakers.set(e,n)),n}get(e){return this.breakers.get(e)}getAllStats(){return this.store.getAllStats()}resetAll(){let e=this;return r.Effect.gen(function*(){for(let t of e.breakers.values())yield*t.reset()})}clear(){this.breakers.clear()}};function m({source:e,target:t,sourcePort:n,targetPort:r}){return{source:e,target:t,sourcePort:n,targetPort:r}}let h=function(e){return e.JobStart=`job-start`,e.JobEnd=`job-end`,e.FlowStart=`flow-start`,e.FlowEnd=`flow-end`,e.FlowError=`flow-error`,e.FlowPause=`flow-pause`,e.FlowCancel=`flow-cancel`,e.NodeStart=`node-start`,e.NodeEnd=`node-end`,e.NodePause=`node-pause`,e.NodeResume=`node-resume`,e.NodeError=`node-error`,e.NodeStream=`node-stream`,e.NodeResponse=`node-response`,e.DlqItemAdded=`dlq-item-added`,e.DlqRetryStart=`dlq-retry-start`,e.DlqRetrySuccess=`dlq-retry-success`,e.DlqRetryFailed=`dlq-retry-failed`,e.DlqItemExhausted=`dlq-item-exhausted`,e.DlqItemResolved=`dlq-item-resolved`,e}({});var g=class{types;constructor(){this.types=new Map}register(e){if(this.types.has(e.id))throw t.n.fromCode(`VALIDATION_ERROR`,{body:`Input type "${e.id}" is already registered. Types cannot be modified or re-registered.`,details:{typeId:e.id}});this.types.set(e.id,e)}get(e){return this.types.get(e)}list(){return Array.from(this.types.values())}validate(e,n){let r=this.types.get(e);if(!r)return{success:!1,error:t.n.fromCode(`VALIDATION_ERROR`,{body:`Input type "${e}" is not registered`,details:{typeId:e}})};try{return{success:!0,data:r.schema.parse(n)}}catch(n){return{success:!1,error:t.n.fromCode(`VALIDATION_ERROR`,{body:`Data validation failed for input type "${e}"`,cause:n,details:{typeId:e,validationErrors:n}})}}}has(e){return this.types.has(e)}size(){return this.types.size}};const _=new g;function v(e,t){return _.validate(e,t)}var y=class{types;constructor(){this.types=new Map}register(e){if(this.types.has(e.id))throw t.n.fromCode(`VALIDATION_ERROR`,{body:`Output type "${e.id}" is already registered. Types cannot be modified or re-registered.`,details:{typeId:e.id}});this.types.set(e.id,e)}get(e){return this.types.get(e)}list(){return Array.from(this.types.values())}validate(e,n){let r=this.types.get(e);if(!r)return{success:!1,error:t.n.fromCode(`VALIDATION_ERROR`,{body:`Output type "${e}" is not registered`,details:{typeId:e}})};try{return{success:!0,data:r.schema.parse(n)}}catch(n){return{success:!1,error:t.n.fromCode(`VALIDATION_ERROR`,{body:`Data validation failed for output type "${e}"`,cause:n,details:{typeId:e,validationErrors:n}})}}}has(e){return this.types.has(e)}size(){return this.types.size}};const b=new y;function x(e,t){return b.validate(e,t)}let S=function(e){return e.input=`input`,e.process=`process`,e.conditional=`conditional`,e.multiplex=`multiplex`,e.merge=`merge`,e}({});function C({id:e,name:n,description:i,type:a,inputSchema:o,outputSchema:s,run:c,condition:l,multiInput:u=!1,multiOutput:d=!1,pausable:f=!1,retry:p,inputTypeId:m,outputTypeId:h,keepOutput:g=!1,circuitBreaker:v,nodeTypeId:y}){return r.Effect.gen(function*(){return m&&!_.get(m)?yield*t.n.fromCode(`INVALID_INPUT_TYPE`,{body:`Input type "${m}" is not registered in inputTypeRegistry`,details:{inputTypeId:m,nodeId:e}}).toEffect():h&&!b.get(h)?yield*t.n.fromCode(`INVALID_OUTPUT_TYPE`,{body:`Output type "${h}" is not registered in outputTypeRegistry`,details:{outputTypeId:h,nodeId:e}}).toEffect():{id:e,name:n,description:i,type:a,inputTypeId:m,outputTypeId:h,keepOutput:g,inputSchema:o,outputSchema:s,pausable:f,run:({data:i,jobId:a,flowId:l,storageId:u,clientId:d})=>r.Effect.gen(function*(){let f=yield*c({data:yield*r.Effect.try({try:()=>o.parse(i),catch:r=>{let i=r instanceof Error?r.message:String(r);return t.n.fromCode(`FLOW_INPUT_VALIDATION_ERROR`,{body:`Node '${n}' (${e}) input validation failed: ${i}`,cause:r})}}),jobId:a,storageId:u,flowId:l,clientId:d});return f.type===`waiting`?{type:`waiting`,partialData:f.partialData,nodeType:h,nodeId:e}:{type:`complete`,data:yield*r.Effect.try({try:()=>s.parse(f.data),catch:r=>{let i=r instanceof Error?r.message:String(r);return t.n.fromCode(`FLOW_OUTPUT_VALIDATION_ERROR`,{body:`Node '${n}' (${e}) output validation failed: ${i}`,cause:r})}}),nodeType:h,nodeId:e}}),condition:l,multiInput:u,multiOutput:d,retry:p,circuitBreaker:v,nodeTypeId:y}})}const w=e=>({id:e.id,name:e.name,description:e.description,type:e.type,inputTypeId:e.inputTypeId,outputTypeId:e.outputTypeId,nodeTypeId:e.nodeTypeId}),T=e=>({type:`complete`,data:e}),E=e=>({type:`waiting`,partialData:e}),D=(e,t)=>{if(e===t)return!0;try{return!!(e&&t&&typeof e==`object`&&typeof t==`object`)}catch{return!0}};var O=class{typeChecker;constructor(e=D){this.typeChecker=e}validateConnection(e,t,n){return this.getCompatibleTypes(e.outputSchema,t.inputSchema)}getCompatibleTypes(e,t){return this.typeChecker(e,t)}validateFlow(e,t){let n=[],r=new Map(e.map(e=>[e.id,e]));for(let e of t){let t=r.get(e.source),i=r.get(e.target);if(!t){n.push(`Source node ${e.source} not found`);continue}if(!i){n.push(`Target node ${e.target} not found`);continue}this.validateConnection(t,i,e)||n.push(`Schema mismatch: ${t.id} output schema incompatible with ${i.id} input schema`)}return{isValid:n.length===0,errors:n}}getExpectedInputSchemas(e,t,n){let r=new Map(t.map(e=>[e.id,e])),i={};for(let t of n)if(t.target===e){let e=r.get(t.source);if(e){let n=t.sourcePort||t.source;i[n]=e.outputSchema}}return i}getActualOutputSchemas(e,t,n){let r=new Map(t.map(e=>[e.id,e])),i={};for(let t of n)if(t.source===e){let e=r.get(t.target);if(e){let n=t.targetPort||t.target;i[n]=e.inputSchema}}return i}validateData(e,t){try{return t.parse(e),{isValid:!0,errors:[]}}catch(e){return e instanceof Error&&`errors`in e?{isValid:!1,errors:e.errors.map(e=>`${e.path.join(`.`)}: ${e.message}`)}:{isValid:!1,errors:[e instanceof Error?e.message:`Validation failed`]}}}};function k(e){if(!e)return{type:``,fileName:``,metadata:void 0,metadataJson:void 0};let t={...e},n=String(t.type||t.mimeType||t[`content-type`]||``);n&&(t.type||=n,t.mimeType||=n);let r=String(t.fileName||t.originalName||t.name||``);return r&&(t.fileName||=r,t.originalName||=r,t.name||=r),{type:n,fileName:r,metadata:t,metadataJson:JSON.stringify(t)}}const A=i.z.object({operation:i.z.literal(`init`),storageId:i.z.string(),metadata:i.z.record(i.z.string(),i.z.any()).optional()}),j=i.z.object({operation:i.z.literal(`finalize`),uploadId:i.z.string()}),M=i.z.object({operation:i.z.literal(`url`),url:i.z.string(),storageId:i.z.string().optional(),metadata:i.z.record(i.z.string(),i.z.any()).optional()}),N=i.z.union([A,j,M]),P=i.z.object({allowedMimeTypes:i.z.array(i.z.string()).optional(),minSize:i.z.number().positive().optional(),maxSize:i.z.number().positive().optional()});function ee(e,n){return r.Effect.gen(function*(){if(n){if(n.allowedMimeTypes&&n.allowedMimeTypes.length>0&&!n.allowedMimeTypes.some(t=>{if(t.endsWith(`/*`)){let n=t.slice(0,-2);return e.type.startsWith(n)}return e.type===t}))throw yield*t.n.fromCode(`VALIDATION_ERROR`,{cause:Error(`File type "${e.type}" is not allowed. Allowed types: ${n.allowedMimeTypes.join(`, `)}`)}).toEffect();if(n.minSize!==void 0&&e.size<n.minSize)throw yield*t.n.fromCode(`VALIDATION_ERROR`,{cause:Error(`File size (${e.size} bytes) is below minimum (${n.minSize} bytes)`)}).toEffect();if(n.maxSize!==void 0&&e.size>n.maxSize)throw yield*t.n.fromCode(`VALIDATION_ERROR`,{cause:Error(`File size (${e.size} bytes) exceeds maximum (${n.maxSize} bytes)`)}).toEffect()}})}function te(i,a,o){let s=o?.keepOutput??!1;return r.Effect.gen(function*(){let o=yield*n.n;return yield*C({id:i,name:`Input`,description:`Handles file input through multiple methods - streaming upload (init/finalize) or direct URL fetch`,type:S.input,nodeTypeId:`input`,inputSchema:N,outputSchema:e.E,keepOutput:s,inputTypeId:R,outputTypeId:F,run:({data:e,flowId:s,jobId:c,clientId:l})=>r.Effect.gen(function*(){switch(e.operation){case`init`:{let t={storageId:e.storageId,size:e.metadata?.size||0,type:e.metadata?.mimeType||`application/octet-stream`,fileName:e.metadata?.originalName,lastModified:e.metadata?.size?Date.now():void 0,metadata:e.metadata?JSON.stringify(e.metadata):void 0,flow:{flowId:s,nodeId:i,jobId:c}};return E(yield*o.createUpload(t,l))}case`finalize`:{let t=yield*o.getUpload(e.uploadId),{type:n}=k(t.metadata);return yield*ee({type:n,size:t.size||0},a),T(t)}case`url`:{let t=yield*n.o(e.url),r=yield*n.a(t),u=e.metadata?.mimeType||t.headers.get(`content-type`)||`application/octet-stream`,d=e.metadata?.size||Number(t.headers.get(`content-length`)||0),f=e.metadata?.originalName||e.url.split(`/`).pop()||`file`;yield*ee({type:u,size:d},a);let p=new ReadableStream({start(e){e.enqueue(new Uint8Array(r)),e.close()}}),m={storageId:e.storageId||`buffer`,size:d,type:u,fileName:f,lastModified:Date.now(),metadata:e.metadata?JSON.stringify(e.metadata):void 0};return T({...yield*o.upload(m,l,p),flow:{flowId:s,nodeId:i,jobId:c}})}default:throw yield*t.n.fromCode(`VALIDATION_ERROR`,{cause:Error(`Invalid operation`)}).toEffect()}})})})}const F=`storage-output-v1`,I=`ocr-output-v1`,L=`image-description-output-v1`,R=`streaming-input-v1`,z=i.z.object({extractedText:i.z.string(),format:i.z.enum([`markdown`,`plain`,`structured`]),taskType:i.z.enum([`convertToMarkdown`,`freeOcr`,`parseFigure`,`locateObject`]),confidence:i.z.number().min(0).max(1).optional()}),B=i.z.object({description:i.z.string(),confidence:i.z.number().min(0).max(1).optional(),metadata:i.z.record(i.z.string(),i.z.unknown()).optional()});_.register({id:R,schema:N,version:`1.0.0`,description:`Streaming file input with init/finalize/url operations for flexible file ingestion`}),b.register({id:F,schema:e.E,version:`1.0.0`,description:`Storage output node that saves files to configured storage backend`}),b.register({id:I,schema:z,version:`1.0.0`,description:`OCR output node that extracts structured text from documents using AI`}),b.register({id:L,schema:B,version:`1.0.0`,description:`Image description output node that generates AI-powered descriptions of images`});var ne=class{maxConcurrency;constructor(e={}){this.maxConcurrency=e.maxConcurrency??4}groupNodesByExecutionLevel(e,t){let n={},r={};e.forEach(e=>{n[e.id]=[],r[e.id]=0}),t.forEach(e=>{n[e.source]?.push(e.target),r[e.target]=(r[e.target]||0)+1});let i=[],a=new Set,o=0;for(;a.size<e.length;){let e=Object.keys(r).filter(e=>r[e]===0&&!a.has(e));if(e.length===0)throw Error(`Cycle detected in flow graph - cannot execute in parallel`);i.push({level:o++,nodes:e}),e.forEach(e=>{a.add(e),delete r[e],n[e]?.forEach(e=>{r[e]!==void 0&&r[e]--})})}return i}executeNodesInParallel(e){return r.Effect.all(e.map(e=>e()),{concurrency:this.maxConcurrency})}canExecuteInParallel(e,t,n){return e.every(e=>(n[e]||[]).every(e=>t.has(e)))}getStats(){return{maxConcurrency:this.maxConcurrency}}};function V(e){return t=>{if(t.nodeType!==e)return!1;let n=b.get(e);return n?n.schema.safeParse(t.data).success:!1}}function re(t){return!t||typeof t!=`object`?!1:e.E.safeParse(t).success}const ie=V(`storage-output-v1`),ae=V(I),oe=V(L);function H(e,t){return e.filter(t)}function se(e,n){return r.Effect.gen(function*(){let r=H(e,n);return r.length===0?yield*t.n.fromCode(`OUTPUT_NOT_FOUND`,{body:`No output of the specified type was found in the flow results`}).toEffect():r.length>1?yield*t.n.fromCode(`MULTIPLE_OUTPUTS_FOUND`,{body:`Found ${r.length} outputs of the specified type, expected exactly one`,details:{foundCount:r.length,nodeIds:r.map(e=>e.nodeId)}}).toEffect():r[0]})}function ce(e,t){return H(e,t)[0]}function le(e,t){return e.find(e=>e.nodeId===t)}function ue(e,t){return e.some(t)}function de(e){return e.operation===`init`}function fe(e){return e.operation===`finalize`}function pe(e){return e.operation===`url`}function me(e){return e.operation===`init`||e.operation===`url`}const U=e=>({id:e.id,name:e.name,nodes:e.nodes.map(w),edges:e.edges});function W(n){return r.Effect.gen(function*(){let a=yield*r.Effect.all(n.nodes.map(e=>r.Effect.isEffect(e)?e:r.Effect.succeed(e))),{flowId:o,name:s,onEvent:c,checkJobStatus:l,edges:u,inputSchema:d,outputSchema:f,typeChecker:m,circuitBreaker:g}=n,_=a,v=new O(m),y=e=>{let t=e.circuitBreaker,n=e.nodeTypeId?g?.nodeTypeOverrides?.[e.nodeTypeId]:void 0,r=g?.defaults;if(!(!t&&!n&&!r))return{...r,...n,...t}},b=()=>{let e={},t={},n={};return _.forEach(r=>{e[r.id]=[],n[r.id]=[],t[r.id]=0}),u.forEach(r=>{e[r.source]?.push(r.target),n[r.target]?.push(r.source),t[r.target]=(t[r.target]||0)+1}),{graph:e,reverseGraph:n,inDegree:t}},x=()=>{let{graph:e,inDegree:t}=b(),n=[],r=[];for(Object.keys(t).forEach(e=>{t[e]===0&&n.push(e)});n.length>0;){let i=n.shift();if(!i)throw Error(`No current node found`);r.push(i),e[i]?.forEach(e=>{t[e]=(t[e]||0)-1,t[e]===0&&n.push(e)})}return r},S=(e,t)=>{if(!e.condition)return r.Effect.succeed(!0);let{field:n,operator:i,value:a}=e.condition,o=t,s=o?.metadata?.[n]||o?.[n],c=(()=>{switch(i){case`equals`:return s===a;case`notEquals`:return s!==a;case`greaterThan`:return Number(s)>Number(a);case`lessThan`:return Number(s)<Number(a);case`contains`:return String(s).includes(String(a));case`startsWith`:return String(s).startsWith(String(a));default:return!0}})();return r.Effect.succeed(c)},C=(e,t)=>{let{reverseGraph:n}=b(),r=n[e]||[],i={};return r.forEach(e=>{let n=t.get(e);n!==void 0&&(i[e]=n)}),i},w=e=>{let t=_.filter(e=>e.type===`input`),n={};return t.forEach(t=>{e&&typeof e==`object`&&t.id in e&&(n[t.id]=d.parse(e[t.id]))}),n},T=e=>!u.some(t=>t.source===e),E=e=>{let t=_.find(t=>t.id===e);return T(e)||t?.keepOutput===!0},D=e=>{let t=_.filter(e=>E(e.id)),n={};return t.forEach(t=>{let r=e.get(t.id);r!==void 0&&(n[t.id]=r)}),n},k=(e,t)=>{let n=_.filter(e=>E(e.id)),r=[];return n.forEach(n=>{let i=e.get(n.id);if(i!==void 0){let e=t.get(n.id);r.push({nodeId:n.id,nodeType:e,data:i,timestamp:new Date().toISOString()})}}),r},A=(t,n,i)=>r.Effect.gen(function*(){if(t.storage.id===n)return t;let a=yield*e.S,o=yield*a.getDataStore(t.storage.id,i),s=yield*a.getDataStore(n,i),c=yield*o.read(t.id),l=r.Stream.make(c),u={...t,storage:{id:n,type:t.storage.type}},d=yield*s.create(u);return yield*s.write({file_id:d.id,stream:l,offset:0},{}),d}),j=(e,i,a,s,u,d,f,p)=>r.Effect.gen(function*(){let m=u.get(e);if(!m)return yield*t.n.fromCode(`FLOW_NODE_NOT_FOUND`).toEffect();if(l){let e=yield*l(d);if(e===`paused`)return yield*t.n.fromCode(`FLOW_PAUSED`,{cause:`Flow ${o} was paused by user at job ${d}`}).toEffect();if(e===`cancelled`)return yield*t.n.fromCode(`FLOW_CANCELLED`,{cause:`Flow ${o} was cancelled by user at job ${d}`}).toEffect()}c&&(yield*c({jobId:d,flowId:o,nodeId:e,eventType:h.NodeStart,nodeName:m.name,nodeType:m.type}));let g=m.retry?.maxRetries??0,_=m.retry?.retryDelay??1e3,v=m.retry?.exponentialBackoff??!0,b=y(m),x=b?.enabled&&m.nodeTypeId&&p?p.getOrCreate(m.nodeTypeId,b):null;if(x){let{allowed:n,state:i,failureCount:s}=yield*x.allowRequest();if(!n){let n=x.getFallback();return yield*r.Effect.logWarning(`Circuit breaker OPEN for node type "${m.nodeTypeId}" - applying fallback`),n.type===`skip`?(c&&(yield*c({jobId:d,flowId:o,nodeId:e,eventType:h.NodeEnd,nodeName:m.name})),{nodeId:e,result:a[e],success:!0,waiting:!1}):n.type===`default`?(c&&(yield*c({jobId:d,flowId:o,nodeId:e,eventType:h.NodeEnd,nodeName:m.name,result:n.value})),{nodeId:e,result:n.value,success:!0,waiting:!1}):yield*t.n.fromCode(`CIRCUIT_BREAKER_OPEN`,{body:`Circuit breaker is open for node type "${m.name}"`,details:{nodeType:m.name,nodeId:e,state:i,failureCount:s}}).toEffect()}}let w=0,T=null;for(;w<=g;)try{let l,u={};if(m.type===`input`){if(l=a[e],l===void 0)return yield*r.Effect.logError(`Input node ${e} has no input data`),yield*t.n.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Input node ${e} has no input data`)}).toEffect()}else{if(u=C(e,s),Object.keys(u).length===0)return yield*r.Effect.logError(`Node ${e} has no input data`),yield*t.n.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Node ${e} has no input data`)}).toEffect();if(m.multiInput)l=u;else{let n=Object.keys(u)[0];if(!n)return yield*t.n.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Node ${e} has no input data`)}).toEffect();l=u[n]}}if(m.type===`conditional`&&!(yield*S(m,l)))return c&&(yield*c({jobId:d,flowId:o,nodeId:e,eventType:h.NodeEnd,nodeName:m.name})),{nodeId:e,result:l,success:!0,waiting:!1};let p=yield*m.run({data:l,inputs:u,jobId:d,flowId:o,storageId:i,clientId:f});if(p.type===`waiting`){let t=p.partialData;return c&&(yield*c({jobId:d,flowId:o,nodeId:e,eventType:h.NodePause,nodeName:m.name,partialData:t})),{nodeId:e,result:t,success:!0,waiting:!0,nodeType:p.nodeType}}let g=p.data;if(E(e)&&(re(g)&&g.storage.id!==i&&(yield*r.Effect.logDebug(`Auto-persisting output node ${e} output from ${g.storage.id} to ${i}`),g=yield*A(g,i,f)),n.hooks?.onNodeOutput)){yield*r.Effect.logDebug(`Calling onNodeOutput hook for sink node ${e}`);let t=n.hooks.onNodeOutput({output:g,nodeId:e,flowId:o,jobId:d,storageId:i,clientId:f});g=yield*r.Effect.isEffect(t)?t:r.Effect.promise(()=>t)}return x&&(yield*x.recordSuccess()),c&&(yield*c({jobId:d,flowId:o,nodeId:e,eventType:h.NodeEnd,nodeName:m.name,result:g})),{nodeId:e,result:g,success:!0,waiting:!1,nodeType:p.nodeType}}catch(n){if(T=n instanceof t.n?n:t.n.fromCode(`FLOW_NODE_ERROR`,{cause:n}),x&&(yield*x.recordFailure(T.body)),w<g){w++;let t=v?_*2**(w-1):_;yield*r.Effect.logWarning(`Node ${e} (${m.name}) failed, retrying (${w}/${g}) after ${t}ms`),yield*r.Effect.sleep(t);continue}return c&&(yield*c({jobId:d,flowId:o,nodeId:e,eventType:h.NodeError,nodeName:m.name,error:T.body,retryCount:w})),yield*T.toEffect()}return T?yield*T.toEffect():yield*t.n.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Unexpected error in retry loop`)}).toEffect()}).pipe((()=>{let t=u.get(e),n=t?.nodeTypeId??t?.type??`unknown`;return r.Effect.withSpan(`node-${n}`,{attributes:{"node.id":e,"node.type":t?.type??`unknown`,"node.type_id":t?.nodeTypeId??`unknown`,"node.name":t?.name??`unknown`,"flow.id":o,"flow.job_id":d}})})()),M=({inputs:a,storageId:s,jobId:l,resumeFrom:d,clientId:m})=>r.Effect.gen(function*(){let g=yield*r.Effect.serviceOption(e.D),v=g._tag===`Some`?new p(g.value):null;!d&&c&&(yield*c({jobId:l,eventType:h.FlowStart,flowId:o}));let y=w(a||{}),b,S,C;d?(b=d.executionOrder,S=d.nodeResults,C=d.currentIndex):(b=x(),S=new Map,C=0);let T=new Map;if(b.length!==_.length)return yield*t.n.fromCode(`FLOW_CYCLE_ERROR`).toEffect();let O=new Map(_.map(e=>[e.id,e]));if(n.parallelExecution?.enabled??!1){yield*r.Effect.logDebug(`Flow ${o}: Executing in parallel mode (maxConcurrency: ${n.parallelExecution?.maxConcurrency??4})`);let e=new ne({maxConcurrency:n.parallelExecution?.maxConcurrency??4}),t=e.groupNodesByExecutionLevel(_,u);yield*r.Effect.logDebug(`Flow ${o}: Grouped nodes into ${t.length} execution levels`);let i={};_.forEach(e=>{i[e.id]=[]}),u.forEach(e=>{i[e.target]?.push(e.source)});for(let n of t){yield*r.Effect.logDebug(`Flow ${o}: Executing level ${n.level} with nodes: ${n.nodes.join(`, `)}`);let t=n.nodes.map(e=>()=>r.Effect.gen(function*(){if(d&&e===d.executionOrder[C]&&c){let t=O.get(e);t&&(yield*c({jobId:l,flowId:o,nodeId:e,eventType:h.NodeResume,nodeName:t.name,nodeType:t.type}))}return{nodeId:e,nodeResult:yield*j(e,s,y,S,O,l,m,v)}})),i=yield*e.executeNodesInParallel(t);for(let{nodeId:e,nodeResult:t}of i){if(t.waiting)return t.result!==void 0&&(S.set(e,t.result),t.nodeType&&T.set(e,t.nodeType)),{type:`paused`,nodeId:e,executionState:{executionOrder:b,currentIndex:b.indexOf(e),inputs:y}};t.success&&(S.set(e,t.result),t.nodeType&&T.set(e,t.nodeType))}}}else{yield*r.Effect.logDebug(`Flow ${o}: Executing in sequential mode`);for(let e=C;e<b.length;e++){let n=b[e];if(!n)return yield*t.n.fromCode(`FLOW_NODE_NOT_FOUND`).toEffect();if(d&&e===C&&c){let e=O.get(n);e&&(yield*c({jobId:l,flowId:o,nodeId:n,eventType:h.NodeResume,nodeName:e.name,nodeType:e.type}))}let r=yield*j(n,s,y,S,O,l,m,v);if(r.waiting)return r.result!==void 0&&(S.set(r.nodeId,r.result),r.nodeType&&T.set(r.nodeId,r.nodeType)),{type:`paused`,nodeId:r.nodeId,executionState:{executionOrder:b,currentIndex:e,inputs:y}};r.success&&(S.set(r.nodeId,r.result),r.nodeType&&T.set(r.nodeId,r.nodeType))}}let A=D(S),M=k(S,T),N=i.z.record(i.z.string(),f).safeParse(A);if(!N.success){let e=`Flow output validation failed: ${N.error.message}. Expected outputs: ${JSON.stringify(Object.keys(D(S)))}. Output nodes (sinks + keepOutput): ${_.filter(e=>E(e.id)).map(e=>e.id).join(`, `)}`;return c&&(yield*c({jobId:l,eventType:h.FlowError,flowId:o,error:e})),yield*t.n.fromCode(`FLOW_OUTPUT_VALIDATION_ERROR`,{body:e,cause:N.error}).toEffect()}let P=N.data;return c&&(yield*c({jobId:l,eventType:h.FlowEnd,flowId:o,outputs:M,result:P})),{type:`completed`,result:P,outputs:M}});return{id:o,name:s,nodes:_,edges:u,inputSchema:d,outputSchema:f,onEvent:c,checkJobStatus:l,hooks:n.hooks,run:({inputs:e,storageId:t,jobId:n,clientId:r})=>M({inputs:e,storageId:t,jobId:n,clientId:r}),resume:({jobId:e,storageId:t,nodeResults:n,executionState:r,clientId:i})=>M({inputs:r.inputs,storageId:t,jobId:e,resumeFrom:{executionOrder:r.executionOrder,nodeResults:new Map(Object.entries(n)),currentIndex:r.currentIndex},clientId:i}),validateTypes:()=>{let e=_;return v.validateFlow(e,u)},validateInputs:e=>v.validateData(e,d),validateOutputs:e=>v.validateData(e,f)}})}const G={enabled:!0,maxRetries:3,backoff:{type:`exponential`,initialDelayMs:1e3,maxDelayMs:3e5,multiplier:2,jitter:!0},ttlMs:6048e5};function K(e,t){switch(e.type){case`immediate`:return 0;case`fixed`:return e.delayMs;case`exponential`:{let n=e.initialDelayMs*e.multiplier**+t,r=Math.min(n,e.maxDelayMs);if(e.jitter){let e=.5+Math.random();return Math.floor(r*e)}return r}default:return 0}}function he(e,t){return!t.enabled||t.nonRetryableErrors?.includes(e)?!1:t.retryableErrors&&t.retryableErrors.length>0?t.retryableErrors.includes(e):!0}function ge(e,t){if(!(t===void 0||t<=0))return new Date(e.getTime()+t)}var q=class e extends r.Context.Tag(`DeadLetterQueueService`)(){static optional=r.Effect.serviceOption(e)};function _e(){return r.Effect.gen(function*(){let t=yield*e.A,n=()=>`dlq_${crypto.randomUUID()}`,i=e=>({...e,createdAt:new Date(e.createdAt),updatedAt:new Date(e.updatedAt),expiresAt:e.expiresAt?new Date(e.expiresAt):void 0,nextRetryAt:e.nextRetryAt?new Date(e.nextRetryAt):void 0,retryHistory:e.retryHistory.map(e=>({...e,attemptedAt:new Date(e.attemptedAt)}))}),a=()=>r.Effect.gen(function*(){if(!t.list)return[];let e=yield*t.list(),n=[];for(let a of e){let e=yield*r.Effect.catchAll(t.get(a),()=>r.Effect.succeed(null));e&&n.push(i(e))}return n});return{add:(e,i,a=G)=>r.Effect.gen(function*(){let r=n(),o=new Date,s={code:i.code||`UNKNOWN_ERROR`,message:i.body||i.message||`Unknown error`,nodeId:void 0,stack:i.stack},c=e.tasks.find(e=>e.status===`failed`);c&&(s.nodeId=c.nodeId);let l={};for(let t of e.tasks)t.result!==void 0&&(l[t.nodeId]=t.result);let u=he(s.code,a),d;if(a.enabled&&u&&a.maxRetries>0){let e=K(a.backoff,0);d=new Date(o.getTime()+e)}let f={id:r,jobId:e.id,flowId:e.flowId,storageId:e.storageId,clientId:e.clientId,error:s,inputs:e.executionState?.inputs||{},nodeResults:l,failedAtNodeId:s.nodeId,retryCount:0,maxRetries:a.maxRetries,nextRetryAt:d,retryHistory:[],createdAt:o,updatedAt:o,expiresAt:ge(o,a.ttlMs),status:u&&a.enabled?`pending`:`exhausted`};return yield*t.set(r,f),f}),get:e=>r.Effect.gen(function*(){return i(yield*t.get(e))}),getOption:e=>r.Effect.gen(function*(){let n=yield*r.Effect.either(t.get(e));return n._tag===`Left`?n.left.code===`FILE_NOT_FOUND`?r.Option.none():yield*r.Effect.fail(n.left):r.Option.some(i(n.right))}),delete:e=>t.delete(e),list:(e={})=>r.Effect.gen(function*(){let t=yield*a(),{status:n,flowId:r,clientId:i,limit:o=50,offset:s=0}=e,c=t;n&&(c=c.filter(e=>e.status===n)),r&&(c=c.filter(e=>e.flowId===r)),i&&(c=c.filter(e=>e.clientId===i)),c.sort((e,t)=>t.createdAt.getTime()-e.createdAt.getTime());let l=c.length;return{items:c.slice(s,s+o),total:l}}),update:(e,n)=>r.Effect.gen(function*(){let r={...i(yield*t.get(e)),...n,updatedAt:new Date};return yield*t.set(e,r),r}),markRetrying:e=>r.Effect.gen(function*(){let n={...i(yield*t.get(e)),status:`retrying`,updatedAt:new Date};return yield*t.set(e,n),n}),recordRetryFailure:(e,n,a)=>r.Effect.gen(function*(){let r=i(yield*t.get(e)),o=new Date,s=r.retryCount+1,c=[...r.retryHistory,{attemptedAt:o,error:n,durationMs:a}],l=`pending`,u;if(s>=r.maxRetries)l=`exhausted`,u=void 0;else{let e=K(G.backoff,s);u=new Date(o.getTime()+e)}let d={...r,retryCount:s,retryHistory:c,status:l,nextRetryAt:u,updatedAt:o};return yield*t.set(e,d),d}),markResolved:e=>r.Effect.gen(function*(){let n={...i(yield*t.get(e)),status:`resolved`,nextRetryAt:void 0,updatedAt:new Date};return yield*t.set(e,n),n}),getScheduledRetries:(e=100)=>r.Effect.gen(function*(){let t=yield*a(),n=new Date;return t.filter(e=>e.status===`pending`&&e.nextRetryAt&&e.nextRetryAt<=n).sort((e,t)=>(e.nextRetryAt?.getTime()||0)-(t.nextRetryAt?.getTime()||0)).slice(0,e)}),cleanup:(e={})=>r.Effect.gen(function*(){let n=yield*a(),{olderThan:i,status:o}=e,s=new Date,c=0;for(let e of n){let n=!1;e.expiresAt&&e.expiresAt<=s&&(n=!0),i&&e.createdAt<=i&&(o?n=e.status===o:(e.status===`exhausted`||e.status===`resolved`)&&(n=!0)),n&&(yield*r.Effect.catchAll(t.delete(e.id),()=>r.Effect.succeed(void 0)),c++)}return{deleted:c}}),getStats:()=>r.Effect.gen(function*(){let e=yield*a(),t={pending:0,retrying:0,exhausted:0,resolved:0},n={},r,i=0;for(let a of e)t[a.status]++,n[a.flowId]=(n[a.flowId]||0)+1,(!r||a.createdAt<r)&&(r=a.createdAt),i+=a.retryCount;let o=e.length>0?i/e.length:0;return{totalItems:e.length,byStatus:t,byFlow:n,oldestItem:r,averageRetryCount:o}})}})}const ve=r.Layer.effect(q,_e());var J=class e extends r.Context.Tag(`FlowWaitUntil`)(){static optional=r.Effect.serviceOption(e)},ye=class extends r.Context.Tag(`FlowProvider`)(){},be=class extends r.Context.Tag(`FlowServer`)(){};const xe=e=>typeof e==`object`&&!!e&&`id`in e,Se=e=>typeof e==`object`&&e&&`nodeId`in e&&`data`in e&&`timestamp`in e?e.data:e;function Ce(e,n,i){let a=t=>{let a=e=>r.Effect.gen(function*(){let n=yield*i.get(t);n&&(yield*i.set(t,{...n,...e,updatedAt:new Date}))});return o=>r.Effect.gen(function*(){switch(e.onEvent&&(yield*r.Effect.catchAll(e.onEvent(o),e=>(r.Effect.logError(`Original onEvent failed`,e),r.Effect.succeed({eventId:null})))),yield*n.emit(t,o),r.Effect.logInfo(`Updating job ${t} with event ${o.eventType}`),o.eventType){case h.FlowStart:yield*a({status:`running`});break;case h.FlowEnd:yield*r.Effect.gen(function*(){let e=yield*i.get(t);e&&o.outputs&&(yield*i.set(t,{...e,result:o.outputs,updatedAt:new Date}))});break;case h.FlowError:yield*a({status:`failed`,error:o.error});break;case h.NodeStart:yield*r.Effect.gen(function*(){let e=yield*i.get(t);if(e){let n=e.tasks.find(e=>e.nodeId===o.nodeId)?e.tasks.map(e=>e.nodeId===o.nodeId?{...e,status:`running`,updatedAt:new Date}:e):[...e.tasks,{nodeId:o.nodeId,status:`running`,createdAt:new Date,updatedAt:new Date}];yield*i.set(t,{...e,tasks:n,updatedAt:new Date})}});break;case h.NodePause:yield*r.Effect.gen(function*(){let e=yield*i.get(t);if(e){let n=e.tasks.find(e=>e.nodeId===o.nodeId)?e.tasks.map(e=>e.nodeId===o.nodeId?{...e,status:`paused`,result:o.partialData,updatedAt:new Date}:e):[...e.tasks,{nodeId:o.nodeId,status:`paused`,result:o.partialData,createdAt:new Date,updatedAt:new Date}];yield*i.set(t,{...e,tasks:n,updatedAt:new Date})}});break;case h.NodeResume:yield*r.Effect.gen(function*(){let e=yield*i.get(t);if(e){let n=e.tasks.map(e=>e.nodeId===o.nodeId?{...e,status:`running`,updatedAt:new Date}:e);yield*i.set(t,{...e,tasks:n,updatedAt:new Date})}});break;case h.NodeEnd:yield*r.Effect.gen(function*(){let n=yield*i.get(t);if(n){let a=n.tasks.map(e=>e.nodeId===o.nodeId?{...e,status:`completed`,result:o.result,updatedAt:new Date}:e),s=!e.edges.some(e=>e.source===o.nodeId),c=e.nodes.find(e=>e.id===o.nodeId)?.keepOutput===!0,l=o.result,u=Se(l),d=n.intermediateFiles||[],f=s||c;f&&xe(u)&&u.id?(d=d.filter(e=>e!==u.id),c&&!s&&r.Effect.logInfo(`Preserving output from node ${o.nodeId} due to keepOutput flag`)):!f&&xe(u)&&u.id&&(d.includes(u.id)||d.push(u.id)),yield*i.set(t,{...n,tasks:a,intermediateFiles:d,updatedAt:new Date})}});break;case h.NodeError:yield*r.Effect.gen(function*(){let e=yield*i.get(t);if(e){let n=e.tasks.map(e=>e.nodeId===o.nodeId?{...e,status:`failed`,error:o.error,retryCount:o.retryCount,updatedAt:new Date}:e);yield*i.set(t,{...e,tasks:n,error:o.error,updatedAt:new Date})}});break}return{eventId:t}})},o=e=>e=>r.Effect.gen(function*(){let n=yield*i.get(e);return n?n.status===`paused`?`paused`:n.status===`cancelled`?`cancelled`:`running`:yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found`}))});return{...e,run:t=>r.Effect.gen(function*(){let n=t.jobId||crypto.randomUUID(),r=a(n),i=o(n);return yield*(yield*W({flowId:e.id,name:e.name,nodes:e.nodes,edges:e.edges,inputSchema:e.inputSchema,outputSchema:e.outputSchema,onEvent:r,checkJobStatus:i})).run({...t,jobId:n,clientId:t.clientId})}),resume:t=>r.Effect.gen(function*(){let n=t.jobId,r=a(n),i=o(n);return yield*(yield*W({flowId:e.id,name:e.name,nodes:e.nodes,edges:e.edges,inputSchema:e.inputSchema,outputSchema:e.outputSchema,onEvent:r,checkJobStatus:i})).resume(t)})}}function we(){return r.Effect.gen(function*(){let i=yield*ye,a=yield*e.d,o=yield*e.j,s=yield*n.n,c=yield*q.optional,l=(e,n)=>r.Effect.gen(function*(){let i=yield*o.get(e);return i?yield*o.set(e,{...i,...n}):yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found`}))}),u=(e,t)=>r.Effect.gen(function*(){let n=yield*o.get(e);!n||!n.intermediateFiles||n.intermediateFiles.length===0||(yield*r.Effect.logInfo(`Cleaning up ${n.intermediateFiles.length} intermediate files for job ${e}`),yield*r.Effect.all(n.intermediateFiles.map(e=>r.Effect.gen(function*(){yield*s.delete(e,t),yield*r.Effect.logDebug(`Deleted intermediate file ${e}`)}).pipe(r.Effect.catchAll(t=>r.Effect.gen(function*(){return yield*r.Effect.logWarning(`Failed to delete intermediate file ${e}: ${t}`),r.Effect.succeed(void 0)})))),{concurrency:5}),yield*l(e,{intermediateFiles:[]}))}),d=(e,t)=>r.Effect.gen(function*(){if(r.Option.isNone(c)){yield*r.Effect.logDebug(`[FlowServer] DLQ not configured, skipping for job: ${e}`);return}let n=c.value,i=yield*r.Effect.catchAll(o.get(e),()=>r.Effect.succeed(null));if(!i){yield*r.Effect.logWarning(`[FlowServer] Job ${e} not found when adding to DLQ`);return}yield*r.Effect.catchAll(n.add(i,t),t=>r.Effect.gen(function*(){return yield*r.Effect.logError(`[FlowServer] Failed to add job ${e} to DLQ`,t),r.Effect.succeed(void 0)})),yield*r.Effect.logInfo(`[FlowServer] Added job ${e} to Dead Letter Queue`)}),f=r.Effect.gen(function*(){let e=yield*r.Effect.currentSpan.pipe(r.Effect.option);return r.Option.match(e,{onNone:()=>void 0,onSome:e=>({traceId:e.traceId,spanId:e.spanId,traceFlags:e.sampled?1:0})})}),p=({jobId:e,flow:n,storageId:i,clientId:s,inputs:c})=>r.Effect.gen(function*(){return console.log(`[FlowServer] executeFlowInBackground started for job: ${e}`),yield*l(e,{status:`running`,traceContext:yield*f}),yield*r.Effect.gen(function*(){console.log(`[FlowServer] Creating flowWithEvents for job: ${e}`);let t=Ce(n,a,o);console.log(`[FlowServer] Running flow for job: ${e}`);let r=yield*t.run({inputs:c,storageId:i,jobId:e,clientId:s});return console.log(`[FlowServer] Flow completed for job: ${e}, result type: ${r.type}`),r.type===`paused`?yield*l(e,{status:`paused`,pausedAt:r.nodeId,executionState:r.executionState,updatedAt:new Date}):(yield*l(e,{status:`completed`,updatedAt:new Date,endedAt:new Date}),yield*u(e,s)),r}).pipe(r.Effect.withSpan(`flow-execution`,{attributes:{"flow.id":n.id,"flow.name":n.name,"flow.job_id":e,"flow.storage_id":i,"flow.node_count":n.nodes.length}}))}).pipe(r.Effect.withSpan(`flow`,{attributes:{"flow.id":n.id,"flow.name":n.name,"flow.job_id":e,"flow.storage_id":i,"flow.node_count":n.nodes.length}}),r.Effect.catchAll(i=>r.Effect.gen(function*(){yield*r.Effect.logError(`Flow execution failed`,i);let c=i instanceof t.n?i.body:String(i);yield*r.Effect.logInfo(`Updating job ${e} to failed status with error: ${c}`),yield*l(e,{status:`failed`,error:c,updatedAt:new Date}).pipe(r.Effect.catchAll(t=>r.Effect.gen(function*(){return yield*r.Effect.logError(`Failed to update job ${e}`,t),r.Effect.succeed(void 0)})));let f=yield*o.get(e);throw f&&(yield*a.emit(e,{jobId:e,eventType:h.FlowError,flowId:f.flowId,error:c}).pipe(r.Effect.catchAll(t=>r.Effect.gen(function*(){return yield*r.Effect.logError(`Failed to emit FlowError event for job ${e}`,t),r.Effect.succeed(void 0)})))),n.onEvent&&(yield*n.onEvent({jobId:e,eventType:h.FlowError,flowId:n.id,error:c}).pipe(r.Effect.catchAll(t=>r.Effect.gen(function*(){return yield*r.Effect.logError(`Failed to call flow.onEvent for FlowError event for job ${e}`,t),r.Effect.succeed({eventId:null})})))),yield*u(e,s).pipe(r.Effect.catchAll(t=>r.Effect.gen(function*(){return yield*r.Effect.logWarning(`Failed to cleanup intermediate files for job ${e}`,t),r.Effect.succeed(void 0)}))),yield*d(e,i instanceof t.n?i:new t.n({code:`UNKNOWN_ERROR`,status:500,body:String(i),cause:i})),i})));return{getFlow:(e,t)=>r.Effect.gen(function*(){return yield*i.getFlow(e,t)}),getFlowData:(e,t)=>r.Effect.gen(function*(){return U(yield*i.getFlow(e,t))}),runFlow:({flowId:e,storageId:n,clientId:a,inputs:s})=>r.Effect.gen(function*(){let c=yield*J.optional,l=yield*r.Effect.try({try:()=>$.parse({inputs:s}),catch:e=>t.n.fromCode(`FLOW_INPUT_VALIDATION_ERROR`,{cause:e})}),u=crypto.randomUUID(),d=new Date,f={id:u,flowId:e,storageId:n,clientId:a,status:`started`,createdAt:d,updatedAt:d,tasks:[]};yield*o.set(u,f);let m=yield*i.getFlow(e,a);console.log(`[FlowServer] About to fork flow execution for job: ${u}`);let h=p({jobId:u,flow:m,storageId:n,clientId:a,inputs:l.inputs}).pipe(r.Effect.tapErrorCause(e=>r.Effect.logError(`Flow execution failed`,e)));if(r.Option.isSome(c)){console.log(`[FlowServer] Using waitUntil for job: ${u}`);let e=yield*r.Effect.runtime(),t=r.Runtime.runPromise(e)(h);c.value(t)}else console.log(`[FlowServer] Using Effect.forkDaemon for job: ${u}`),yield*r.Effect.forkDaemon(h);return console.log(`[FlowServer] Flow execution started for job: ${u}`),f}),getJobStatus:e=>r.Effect.gen(function*(){return(yield*o.get(e))||(yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found`})))}),resumeFlow:({jobId:e,nodeId:n,newData:s,clientId:c})=>r.Effect.gen(function*(){let f=yield*J.optional,p=yield*o.get(e);if(!p)return console.error(`Job not found`),yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found`}));if(p.status!==`paused`)return console.error(`Job is not paused`),yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${e} is not paused (status: ${p.status})`}));if(p.pausedAt!==n)return console.error(`Job is not paused at the expected node`),yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${e} is paused at node ${p.pausedAt}, not ${n}`}));if(!p.executionState)return console.error(`Job has no execution state`),yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${e} has no execution state`}));let m={...p.tasks.reduce((e,t)=>(t.result!==void 0&&(e[t.nodeId]=t.result),e),{}),[n]:s},g={...p.executionState.inputs,[n]:s};yield*l(e,{status:`running`});let _=yield*i.getFlow(p.flowId,p.clientId),v=p.traceContext?r.Tracer.externalSpan({traceId:p.traceContext.traceId,spanId:p.traceContext.spanId,sampled:p.traceContext.traceFlags===1}):void 0,y=r.Effect.gen(function*(){let n=Ce(_,a,o);if(!p.executionState)return yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${e} has no execution state`}));let i=yield*n.resume({jobId:e,storageId:p.storageId,nodeResults:m,executionState:{...p.executionState,inputs:g},clientId:p.clientId});return i.type===`paused`?yield*l(e,{status:`paused`,pausedAt:i.nodeId,executionState:i.executionState,updatedAt:new Date}):(yield*l(e,{status:`completed`,pausedAt:void 0,executionState:void 0,updatedAt:new Date,endedAt:new Date}),yield*u(e,c)),i}).pipe(r.Effect.withSpan(`flow-execution-resume`,{attributes:{"flow.id":_.id,"flow.name":_.name,"flow.job_id":e,"flow.storage_id":p.storageId,"flow.resumed_from_node":n},parent:v})).pipe(r.Effect.catchAll(n=>r.Effect.gen(function*(){yield*r.Effect.logError(`Flow resume failed`,n);let i=n instanceof t.n?n.body:String(n);yield*r.Effect.logInfo(`Updating job ${e} to failed status with error: ${i}`),yield*l(e,{status:`failed`,error:i,updatedAt:new Date}).pipe(r.Effect.catchAll(t=>r.Effect.gen(function*(){return yield*r.Effect.logError(`Failed to update job ${e}`,t),r.Effect.succeed(void 0)})));let s=yield*o.get(e);throw s&&(yield*a.emit(e,{jobId:e,eventType:h.FlowError,flowId:s.flowId,error:i}).pipe(r.Effect.catchAll(t=>r.Effect.gen(function*(){return yield*r.Effect.logError(`Failed to emit FlowError event for job ${e}`,t),r.Effect.succeed(void 0)})))),yield*u(e,c).pipe(r.Effect.catchAll(t=>r.Effect.gen(function*(){return yield*r.Effect.logWarning(`Failed to cleanup intermediate files for job ${e}`,t),r.Effect.succeed(void 0)}))),yield*d(e,n instanceof t.n?n:new t.n({code:`UNKNOWN_ERROR`,status:500,body:String(n),cause:n})),n}))).pipe(r.Effect.tapErrorCause(e=>r.Effect.logError(`Flow resume failed`,e)));if(r.Option.isSome(f)){console.log(`[FlowServer] Using waitUntil for resume job: ${e}`);let t=yield*r.Effect.runtime(),n=r.Runtime.runPromise(t)(y);f.value(n)}else console.log(`[FlowServer] Using Effect.forkDaemon for resume job: ${e}`),yield*r.Effect.forkDaemon(y);return(yield*o.get(e))||(yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found after update`})))}),pauseFlow:(e,n)=>r.Effect.gen(function*(){let i=yield*o.get(e);if(!i)return yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found`}));if(n!==null&&i.clientId!==n)return yield*r.Effect.fail(t.n.fromCode(`FLOW_NOT_AUTHORIZED`,{cause:`Client ${n} is not authorized to pause job ${e}`}));if(i.status!==`running`)return yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${e} cannot be paused (current status: ${i.status})`}));let s=i.tasks.find(e=>e.status===`running`)?.nodeId;return yield*l(e,{status:`paused`,pausedAt:s,updatedAt:new Date}),yield*a.emit(e,{jobId:e,flowId:i.flowId,eventType:h.FlowPause,pausedAt:s}),(yield*o.get(e))||(yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found after pause`})))}),cancelFlow:(e,n)=>r.Effect.gen(function*(){let i=yield*o.get(e);return i?n!==null&&i.clientId!==n?yield*r.Effect.fail(t.n.fromCode(`FLOW_NOT_AUTHORIZED`,{cause:`Client ${n} is not authorized to cancel job ${e}`})):i.status!==`running`&&i.status!==`paused`&&i.status!==`started`?yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${e} cannot be cancelled (current status: ${i.status})`})):(yield*l(e,{status:`cancelled`,updatedAt:new Date,endedAt:new Date}),yield*a.emit(e,{jobId:e,flowId:i.flowId,eventType:h.FlowCancel}),yield*u(e,n),(yield*o.get(e))||(yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found after cancellation`})))):yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found`}))}),subscribeToFlowEvents:(e,t)=>r.Effect.gen(function*(){yield*a.subscribe(e,t)}),unsubscribeFromFlowEvents:e=>r.Effect.gen(function*(){yield*a.unsubscribe(e)})}})}const Te=r.Layer.effect(be,we());function Ee(e){let t=e.lastIndexOf(`.`);return t===-1||t===0?e:e.substring(0,t)}function De(e){let t=e.lastIndexOf(`.`);return t===-1||t===0?``:e.substring(t+1)}function Y(e,t,n){let r=e.metadata??{},i=r.fileName??r.originalName??r.name??`unnamed`;return{baseName:Ee(i),extension:De(i),fileName:i,nodeType:t.nodeType,nodeId:t.nodeId,flowId:t.flowId,jobId:t.jobId,timestamp:new Date().toISOString(),...n}}function Oe(e,t){try{let n={};for(let[e,r]of Object.entries(t))r!==void 0&&(n[e]=String(r));return(0,a.render)(e,n)}catch{return e}}function X(e,t,n){let r=t.fileName;if(!n)return r;try{if(n.mode===`auto`){if(n.autoSuffix){let e=n.autoSuffix(t);if(e){let{baseName:n,extension:r}=t;return r?`${n}-${e}.${r}`:`${n}-${e}`}}return r}if(n.mode===`custom`){if(n.rename)return n.rename(e,t)||r;if(n.pattern)return Oe(n.pattern,t)||r}return r}catch{return r}}function ke(e){if(!e||e.trim()===``)return{isValid:!1,error:`Pattern cannot be empty`};let t=(e.match(/\{\{/g)||[]).length,n=(e.match(/\}\}/g)||[]).length;if(t!==n)return{isValid:!1,error:`Unbalanced braces: ${t} opening, ${n} closing`};let r=e.match(/\{\{[^}]*[^a-zA-Z0-9_}][^}]*\}\}/g);return r?{isValid:!1,error:`Invalid variable syntax: ${r[0]}`}:{isValid:!0}}const Ae=[{name:`baseName`,description:`Filename without extension`,example:`photo`},{name:`extension`,description:`File extension without dot`,example:`jpg`},{name:`fileName`,description:`Full original filename`,example:`photo.jpg`},{name:`nodeType`,description:`Type of processing node`,example:`resize`},{name:`nodeId`,description:`Specific node instance ID`,example:`resize-1`},{name:`flowId`,description:`Flow identifier`,example:`flow-abc`},{name:`jobId`,description:`Execution job ID`,example:`job-123`},{name:`timestamp`,description:`ISO 8601 processing time`,example:`2024-01-15T10:30:00Z`},{name:`width`,description:`Output width (image/video)`,example:`800`},{name:`height`,description:`Output height (image/video)`,example:`600`},{name:`format`,description:`Output format`,example:`webp`},{name:`quality`,description:`Quality setting`,example:`80`},{name:`pageNumber`,description:`Page number (documents)`,example:`1`}];function Z(e){return!(`stream`in e)}function je({id:t,name:i,description:a,outputTypeId:o,keepOutput:s,naming:c,nodeType:l=`transform`,nodeTypeId:u,namingVars:d,circuitBreaker:f,mode:p=`auto`,streamingConfig:m,transform:h,streamingTransform:g}){if(p===`streaming`&&!g)throw Error(`Transform node "${t}": mode is "streaming" but no streamingTransform function provided`);if(p===`buffered`&&!h)throw Error(`Transform node "${t}": mode is "buffered" but no transform function provided`);if(p===`auto`&&!h&&!g)throw Error(`Transform node "${t}": mode is "auto" but neither transform nor streamingTransform provided`);let _={...e.b,...m};return r.Effect.gen(function*(){let m=yield*n.n;return yield*C({id:t,name:i,description:a,type:S.process,outputTypeId:o,keepOutput:s,nodeTypeId:u,circuitBreaker:f,inputSchema:e.E,outputSchema:e.E,run:({data:e,storageId:n,flowId:i,jobId:a,clientId:o})=>r.Effect.gen(function*(){let s={flowId:i,nodeId:t,jobId:a},u=yield*r.Effect.gen(function*(){if(p===`buffered`)return!1;if(p===`streaming`)return!0;let t=e.size??0,i=_.fileSizeThreshold;return t>0&&t<i?(yield*r.Effect.logDebug(`File ${e.id} (${t} bytes) below threshold (${i}), using buffered mode`),!1):g?(yield*m.getCapabilities(n,o)).supportsStreamingRead?(yield*r.Effect.logDebug(`File ${e.id} qualifies for streaming mode`),!0):(yield*r.Effect.logDebug(`DataStore doesn't support streaming read, using buffered mode`),!1):(yield*r.Effect.logDebug(`No streamingTransform function, using buffered mode`),!1)}),{type:f,fileName:v,metadata:y,metadataJson:b}=k(e.metadata);if(u&&g){yield*r.Effect.logDebug(`Using streaming transform for ${e.id}`);let u=yield*g(yield*m.readStream(e.id,o,_),e),p=Z(u)?u:u.stream,h=Z(u)?void 0:u.type,x=Z(u)?void 0:u.estimatedSize,S=Z(u)?void 0:u.fileName;!S&&c&&(S=X(e,Y(e,{flowId:i,jobId:a,nodeId:t,nodeType:l},d),c));let C=yield*m.getCapabilities(n,o),w;if(C.supportsStreamingWrite)yield*r.Effect.logDebug(`Using streaming write for ${e.id} - no intermediate buffering`),w=yield*m.uploadStream({storageId:n,uploadLengthDeferred:!0,sizeHint:x,type:h??f,fileName:S??v,lastModified:0,metadata:b,flow:s},o,p);else{yield*r.Effect.logDebug(`Falling back to buffered upload for ${e.id} (streaming write not supported)`);let t=[];yield*r.Stream.runForEach(p,e=>r.Effect.sync(()=>{t.push(e)}));let i=t.reduce((e,t)=>e+t.byteLength,0),a=new Uint8Array(i),c=0;for(let e of t)a.set(e,c),c+=e.byteLength;let l=new ReadableStream({start(e){e.enqueue(a),e.close()}});w=yield*m.upload({storageId:n,size:a.byteLength,type:h??f,fileName:S??v,lastModified:0,metadata:b,flow:s},o,l)}let E=y?{...y,...h&&{mimeType:h,type:h,"content-type":h},...S&&{fileName:S,originalName:S,name:S,extension:S.split(`.`).pop()||y.extension}}:w.metadata;return T(E?{...w,metadata:E}:w)}if(!h)throw Error(`Transform node "${t}": buffered mode selected but no transform function provided`);let x=yield*h(yield*m.read(e.id,o),e),S=x instanceof Uint8Array?x:x.bytes,C=x instanceof Uint8Array?void 0:x.type,w=x instanceof Uint8Array?void 0:x.fileName;!w&&c&&(w=X(e,Y(e,{flowId:i,jobId:a,nodeId:t,nodeType:l},d),c));let E=new ReadableStream({start(e){e.enqueue(S),e.close()}}),D=yield*m.upload({storageId:n,size:S.byteLength,type:C??f,fileName:w??v,lastModified:0,metadata:b,flow:s},o,E),O=y?{...y,...C&&{mimeType:C,type:C,"content-type":C},...w&&{fileName:w,originalName:w,name:w,extension:w.split(`.`).pop()||y.extension}}:D.metadata;return T(O?{...D,metadata:O}:D)})})})}var Me=class extends r.Context.Tag(`CredentialProvider`)(){},Ne=class extends r.Context.Tag(`DocumentAiPlugin`)(){},Pe=class extends r.Context.Tag(`DocumentPlugin`)(){},Fe=class extends r.Context.Tag(`ImageAiPlugin`)(){},Ie=class extends r.Context.Tag(`ImagePlugin`)(){};const Le=i.z.object({serviceType:i.z.enum([`replicate`]).optional()}),Re=i.z.object({duration:i.z.number().nonnegative(),width:i.z.number().positive(),height:i.z.number().positive(),codec:i.z.string(),format:i.z.string(),bitrate:i.z.number().nonnegative(),frameRate:i.z.number().positive(),aspectRatio:i.z.string(),hasAudio:i.z.boolean(),audioCodec:i.z.string().optional(),audioBitrate:i.z.number().nonnegative().optional(),size:i.z.number().nonnegative()}),ze=i.z.object({timestamp:i.z.number().nonnegative(),format:i.z.enum([`png`,`jpeg`]).optional(),quality:i.z.number().min(1).max(100).optional()}),Be=i.z.object({quality:i.z.number().min(0).max(100),format:i.z.enum([`jpeg`,`webp`,`png`,`avif`])}),Ve=i.z.object({serviceType:i.z.enum([`replicate`]).optional()}),He=i.z.object({width:i.z.number().positive().optional(),height:i.z.number().positive().optional(),fit:i.z.enum([`contain`,`cover`,`fill`])}).refine(e=>e.width||e.height,`Either width or height must be specified for resize`),Ue=i.z.object({width:i.z.number().positive().optional(),height:i.z.number().positive().optional(),aspectRatio:i.z.enum([`keep`,`ignore`]).optional(),scaling:i.z.enum([`bicubic`,`bilinear`,`lanczos`]).optional()}).refine(e=>e.width||e.height,`Either width or height must be specified for video resize`),We=i.z.object({format:i.z.enum([`mp4`,`webm`,`mov`,`avi`]),codec:i.z.enum([`h264`,`h265`,`vp9`,`av1`]).optional(),videoBitrate:i.z.string().optional(),audioBitrate:i.z.string().optional(),audioCodec:i.z.enum([`aac`,`mp3`,`opus`,`vorbis`]).optional()}),Ge=i.z.object({type:i.z.literal(`resize`),width:i.z.number().positive().optional(),height:i.z.number().positive().optional(),fit:i.z.enum([`contain`,`cover`,`fill`])}),Ke=i.z.object({type:i.z.literal(`blur`),sigma:i.z.number().min(.3).max(1e3)}),qe=i.z.object({type:i.z.literal(`rotate`),angle:i.z.number(),background:i.z.string().optional()}),Je=i.z.object({type:i.z.literal(`flip`),direction:i.z.enum([`horizontal`,`vertical`])}),Ye=i.z.object({type:i.z.literal(`grayscale`)}),Q=i.z.object({type:i.z.literal(`sepia`)}),Xe=i.z.object({type:i.z.literal(`brightness`),value:i.z.number().min(-100).max(100)}),Ze=i.z.object({type:i.z.literal(`contrast`),value:i.z.number().min(-100).max(100)}),Qe=i.z.object({type:i.z.literal(`sharpen`),sigma:i.z.number().positive().optional()}),$e=i.z.object({type:i.z.literal(`watermark`),imagePath:i.z.string().min(1).url(),position:i.z.enum([`top-left`,`top-right`,`bottom-left`,`bottom-right`,`center`]),opacity:i.z.number().min(0).max(1),offsetX:i.z.number().optional(),offsetY:i.z.number().optional()}),et=i.z.object({type:i.z.literal(`logo`),imagePath:i.z.string().min(1).url(),position:i.z.enum([`top-left`,`top-right`,`bottom-left`,`bottom-right`,`center`]),scale:i.z.number().min(.1).max(2),offsetX:i.z.number().optional(),offsetY:i.z.number().optional()}),tt=i.z.object({type:i.z.literal(`text`),text:i.z.string().min(1),position:i.z.enum([`top-left`,`top-right`,`bottom-left`,`bottom-right`,`center`]),fontSize:i.z.number().positive(),color:i.z.string().min(1),fontFamily:i.z.string().optional(),offsetX:i.z.number().optional(),offsetY:i.z.number().optional()}),nt=i.z.discriminatedUnion(`type`,[Ge,Ke,qe,Je,Ye,Q,Xe,Ze,Qe,$e,et,tt]),rt=i.z.object({transformations:i.z.array(nt).min(1)}),it=i.z.object({startTime:i.z.number().nonnegative(),endTime:i.z.number().positive().optional(),duration:i.z.number().positive().optional()}).refine(e=>!e.endTime||!e.duration,`Cannot specify both endTime and duration`).refine(e=>!e.endTime||e.endTime>e.startTime,`endTime must be greater than startTime`);var at=class extends r.Context.Tag(`VideoPlugin`)(){},ot=class extends r.Context.Tag(`VirusScanPlugin`)(){},st=class extends r.Context.Tag(`ZipPlugin`)(){};const ct=(e,t)=>{if(e.length===0)return t;let[n,...r]=e;return r.reduce((e,t)=>i.z.union([e,t]),n)};function lt(e){return r.Effect.gen(function*(){let n=Object.entries(e.nodes),a=e=>r.Effect.isEffect(e)?e:r.Effect.succeed(e),o=yield*r.Effect.forEach(n,([e,n])=>r.Effect.flatMap(a(n),n=>n.id===e?r.Effect.succeed([e,n]):r.Effect.fail(t.n.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Node key ${e} does not match node id ${n.id}`)})))),s=Object.fromEntries(o),c=o.map(([,e])=>e),l=o.filter(([,e])=>e.type===S.input).map(([,e])=>e.inputSchema),u=e.edges.map(e=>({source:s[e.source]?.id??e.source,target:s[e.target]?.id??e.target,sourcePort:e.sourcePort,targetPort:e.targetPort})),d=new Set(o.map(([e])=>s[e]?.id).filter(e=>e&&!u.some(t=>t.source===e))),f=o.filter(([,e])=>d.has(e.id)).map(([,e])=>e.outputSchema),p=e.inputSchema??ct(l,i.z.unknown()),m=e.outputSchema??ct(f,i.z.unknown());return yield*W({flowId:e.flowId,name:e.name,nodes:c,edges:u,inputSchema:p,outputSchema:m,typeChecker:e.typeChecker,onEvent:e.onEvent,parallelExecution:e.parallelExecution,hooks:e.hooks,circuitBreaker:e.circuitBreaker})})}const $=i.z.object({inputs:i.z.record(i.z.string(),i.z.any())});Object.defineProperty(exports,`$`,{enumerable:!0,get:function(){return he}}),Object.defineProperty(exports,`A`,{enumerable:!0,get:function(){return Fe}}),Object.defineProperty(exports,`At`,{enumerable:!0,get:function(){return C}}),Object.defineProperty(exports,`B`,{enumerable:!0,get:function(){return Oe}}),Object.defineProperty(exports,`Bt`,{enumerable:!0,get:function(){return f}}),Object.defineProperty(exports,`C`,{enumerable:!0,get:function(){return He}}),Object.defineProperty(exports,`Ct`,{enumerable:!0,get:function(){return te}}),Object.defineProperty(exports,`D`,{enumerable:!0,get:function(){return Re}}),Object.defineProperty(exports,`Dt`,{enumerable:!0,get:function(){return T}}),Object.defineProperty(exports,`E`,{enumerable:!0,get:function(){return ze}}),Object.defineProperty(exports,`Et`,{enumerable:!0,get:function(){return k}}),Object.defineProperty(exports,`F`,{enumerable:!0,get:function(){return Ae}}),Object.defineProperty(exports,`Ft`,{enumerable:!0,get:function(){return g}}),Object.defineProperty(exports,`G`,{enumerable:!0,get:function(){return we}}),Object.defineProperty(exports,`Gt`,{enumerable:!0,get:function(){return d}}),Object.defineProperty(exports,`H`,{enumerable:!0,get:function(){return ye}}),Object.defineProperty(exports,`Ht`,{enumerable:!0,get:function(){return u}}),Object.defineProperty(exports,`I`,{enumerable:!0,get:function(){return X}}),Object.defineProperty(exports,`It`,{enumerable:!0,get:function(){return _}}),Object.defineProperty(exports,`J`,{enumerable:!0,get:function(){return _e}}),Object.defineProperty(exports,`K`,{enumerable:!0,get:function(){return Te}}),Object.defineProperty(exports,`Kt`,{enumerable:!0,get:function(){return o}}),Object.defineProperty(exports,`L`,{enumerable:!0,get:function(){return Y}}),Object.defineProperty(exports,`Lt`,{enumerable:!0,get:function(){return v}}),Object.defineProperty(exports,`M`,{enumerable:!0,get:function(){return Ne}}),Object.defineProperty(exports,`Mt`,{enumerable:!0,get:function(){return y}}),Object.defineProperty(exports,`N`,{enumerable:!0,get:function(){return Me}}),Object.defineProperty(exports,`Nt`,{enumerable:!0,get:function(){return b}}),Object.defineProperty(exports,`O`,{enumerable:!0,get:function(){return Le}}),Object.defineProperty(exports,`Ot`,{enumerable:!0,get:function(){return E}}),Object.defineProperty(exports,`P`,{enumerable:!0,get:function(){return je}}),Object.defineProperty(exports,`Pt`,{enumerable:!0,get:function(){return x}}),Object.defineProperty(exports,`Q`,{enumerable:!0,get:function(){return ge}}),Object.defineProperty(exports,`R`,{enumerable:!0,get:function(){return Ee}}),Object.defineProperty(exports,`Rt`,{enumerable:!0,get:function(){return h}}),Object.defineProperty(exports,`S`,{enumerable:!0,get:function(){return Ue}}),Object.defineProperty(exports,`St`,{enumerable:!0,get:function(){return z}}),Object.defineProperty(exports,`T`,{enumerable:!0,get:function(){return Be}}),Object.defineProperty(exports,`Tt`,{enumerable:!0,get:function(){return P}}),Object.defineProperty(exports,`U`,{enumerable:!0,get:function(){return be}}),Object.defineProperty(exports,`Ut`,{enumerable:!0,get:function(){return c}}),Object.defineProperty(exports,`V`,{enumerable:!0,get:function(){return ke}}),Object.defineProperty(exports,`Vt`,{enumerable:!0,get:function(){return p}}),Object.defineProperty(exports,`W`,{enumerable:!0,get:function(){return J}}),Object.defineProperty(exports,`Wt`,{enumerable:!0,get:function(){return l}}),Object.defineProperty(exports,`X`,{enumerable:!0,get:function(){return G}}),Object.defineProperty(exports,`Y`,{enumerable:!0,get:function(){return ve}}),Object.defineProperty(exports,`Z`,{enumerable:!0,get:function(){return K}}),Object.defineProperty(exports,`_`,{enumerable:!0,get:function(){return tt}}),Object.defineProperty(exports,`_t`,{enumerable:!0,get:function(){return L}}),Object.defineProperty(exports,`a`,{enumerable:!0,get:function(){return at}}),Object.defineProperty(exports,`at`,{enumerable:!0,get:function(){return le}}),Object.defineProperty(exports,`b`,{enumerable:!0,get:function(){return $e}}),Object.defineProperty(exports,`bt`,{enumerable:!0,get:function(){return R}}),Object.defineProperty(exports,`c`,{enumerable:!0,get:function(){return Xe}}),Object.defineProperty(exports,`ct`,{enumerable:!0,get:function(){return fe}}),Object.defineProperty(exports,`d`,{enumerable:!0,get:function(){return Ye}}),Object.defineProperty(exports,`dt`,{enumerable:!0,get:function(){return ae}}),Object.defineProperty(exports,`et`,{enumerable:!0,get:function(){return W}}),Object.defineProperty(exports,`f`,{enumerable:!0,get:function(){return et}}),Object.defineProperty(exports,`ft`,{enumerable:!0,get:function(){return ie}}),Object.defineProperty(exports,`g`,{enumerable:!0,get:function(){return Qe}}),Object.defineProperty(exports,`gt`,{enumerable:!0,get:function(){return ne}}),Object.defineProperty(exports,`h`,{enumerable:!0,get:function(){return Q}}),Object.defineProperty(exports,`ht`,{enumerable:!0,get:function(){return pe}}),Object.defineProperty(exports,`i`,{enumerable:!0,get:function(){return ot}}),Object.defineProperty(exports,`it`,{enumerable:!0,get:function(){return ce}}),Object.defineProperty(exports,`j`,{enumerable:!0,get:function(){return Pe}}),Object.defineProperty(exports,`jt`,{enumerable:!0,get:function(){return w}}),Object.defineProperty(exports,`k`,{enumerable:!0,get:function(){return Ie}}),Object.defineProperty(exports,`kt`,{enumerable:!0,get:function(){return S}}),Object.defineProperty(exports,`l`,{enumerable:!0,get:function(){return Ze}}),Object.defineProperty(exports,`lt`,{enumerable:!0,get:function(){return oe}}),Object.defineProperty(exports,`m`,{enumerable:!0,get:function(){return qe}}),Object.defineProperty(exports,`mt`,{enumerable:!0,get:function(){return me}}),Object.defineProperty(exports,`n`,{enumerable:!0,get:function(){return lt}}),Object.defineProperty(exports,`nt`,{enumerable:!0,get:function(){return V}}),Object.defineProperty(exports,`o`,{enumerable:!0,get:function(){return it}}),Object.defineProperty(exports,`ot`,{enumerable:!0,get:function(){return se}}),Object.defineProperty(exports,`p`,{enumerable:!0,get:function(){return Ge}}),Object.defineProperty(exports,`pt`,{enumerable:!0,get:function(){return re}}),Object.defineProperty(exports,`q`,{enumerable:!0,get:function(){return q}}),Object.defineProperty(exports,`r`,{enumerable:!0,get:function(){return st}}),Object.defineProperty(exports,`rt`,{enumerable:!0,get:function(){return H}}),Object.defineProperty(exports,`s`,{enumerable:!0,get:function(){return Ke}}),Object.defineProperty(exports,`st`,{enumerable:!0,get:function(){return ue}}),Object.defineProperty(exports,`t`,{enumerable:!0,get:function(){return $}}),Object.defineProperty(exports,`tt`,{enumerable:!0,get:function(){return U}}),Object.defineProperty(exports,`u`,{enumerable:!0,get:function(){return Je}}),Object.defineProperty(exports,`ut`,{enumerable:!0,get:function(){return de}}),Object.defineProperty(exports,`v`,{enumerable:!0,get:function(){return rt}}),Object.defineProperty(exports,`vt`,{enumerable:!0,get:function(){return I}}),Object.defineProperty(exports,`w`,{enumerable:!0,get:function(){return Ve}}),Object.defineProperty(exports,`wt`,{enumerable:!0,get:function(){return N}}),Object.defineProperty(exports,`x`,{enumerable:!0,get:function(){return We}}),Object.defineProperty(exports,`xt`,{enumerable:!0,get:function(){return B}}),Object.defineProperty(exports,`y`,{enumerable:!0,get:function(){return nt}}),Object.defineProperty(exports,`yt`,{enumerable:!0,get:function(){return F}}),Object.defineProperty(exports,`z`,{enumerable:!0,get:function(){return De}}),Object.defineProperty(exports,`zt`,{enumerable:!0,get:function(){return m}});