@uploadista/core 0.0.6 → 0.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. package/README.md +1 -1
  2. package/dist/{checksum-wSBuXX84.cjs → checksum-CPiON71t.cjs} +1 -1
  3. package/dist/{checksum-C5qE-5eg.js → checksum-_Vagjoys.js} +2 -2
  4. package/dist/{checksum-C5qE-5eg.js.map → checksum-_Vagjoys.js.map} +1 -1
  5. package/dist/errors/index.cjs +1 -1
  6. package/dist/errors/index.d.cts +1 -1
  7. package/dist/errors/index.d.ts +1 -1
  8. package/dist/errors/index.js +1 -1
  9. package/dist/flow/index.cjs +1 -1
  10. package/dist/flow/index.d.cts +5 -5
  11. package/dist/flow/index.d.ts +5 -5
  12. package/dist/flow/index.js +1 -1
  13. package/dist/flow-CSHZVjcf.js +2 -0
  14. package/dist/flow-CSHZVjcf.js.map +1 -0
  15. package/dist/flow-zlCaikPS.cjs +1 -0
  16. package/dist/{index-B46hb7yB.d.cts → index-6oHIyVO9.d.cts} +2 -2
  17. package/dist/{index-B46hb7yB.d.cts.map → index-6oHIyVO9.d.cts.map} +1 -1
  18. package/dist/{index-YegzC0p1.d.ts → index-BOKqNaD_.d.ts} +70 -19
  19. package/dist/index-BOKqNaD_.d.ts.map +1 -0
  20. package/dist/{index-C1mxuUxK.d.ts → index-BQIgMrBX.d.ts} +2 -2
  21. package/dist/{index-C1mxuUxK.d.ts.map → index-BQIgMrBX.d.ts.map} +1 -1
  22. package/dist/{index-GLPiXqj4.d.cts → index-BswVyg4Z.d.cts} +2 -2
  23. package/dist/index-BswVyg4Z.d.cts.map +1 -0
  24. package/dist/{index-DMJv8Tvo.d.ts → index-Dv14pVwd.d.ts} +2 -2
  25. package/dist/index-Dv14pVwd.d.ts.map +1 -0
  26. package/dist/{index-0xq1cArb.d.cts → index-aQrRecmb.d.cts} +72 -21
  27. package/dist/index-aQrRecmb.d.cts.map +1 -0
  28. package/dist/index.cjs +1 -1
  29. package/dist/index.d.cts +5 -5
  30. package/dist/index.d.ts +5 -5
  31. package/dist/index.js +1 -1
  32. package/dist/{stream-limiter-CTuiXkcq.js → stream-limiter-CTLrikR_.js} +2 -2
  33. package/dist/{stream-limiter-CTuiXkcq.js.map → stream-limiter-CTLrikR_.js.map} +1 -1
  34. package/dist/{stream-limiter-DYGG4t9f.cjs → stream-limiter-CaCFrKY1.cjs} +1 -1
  35. package/dist/streams/index.cjs +1 -1
  36. package/dist/streams/index.d.cts +2 -2
  37. package/dist/streams/index.d.ts +2 -2
  38. package/dist/streams/index.js +1 -1
  39. package/dist/types/index.cjs +1 -1
  40. package/dist/types/index.d.cts +4 -4
  41. package/dist/types/index.d.ts +4 -4
  42. package/dist/types/index.js +1 -1
  43. package/dist/{types-Dj9g8ocl.cjs → types-BVbqP7yA.cjs} +1 -1
  44. package/dist/{types-m26wrG-Z.js → types-DqllXpuL.js} +2 -2
  45. package/dist/types-DqllXpuL.js.map +1 -0
  46. package/dist/upload/index.cjs +1 -1
  47. package/dist/upload/index.d.cts +4 -4
  48. package/dist/upload/index.d.ts +4 -4
  49. package/dist/upload/index.js +1 -1
  50. package/dist/{upload-BzU7ifyH.js → upload-C_n7Smfl.js} +2 -2
  51. package/dist/{upload-BzU7ifyH.js.map → upload-C_n7Smfl.js.map} +1 -1
  52. package/dist/{upload-DvLp6TXO.cjs → upload-kFnf82ds.cjs} +1 -1
  53. package/dist/{uploadista-error-D9SONF9K.d.ts → uploadista-error-B4dn0Ch6.d.cts} +3 -3
  54. package/dist/uploadista-error-B4dn0Ch6.d.cts.map +1 -0
  55. package/dist/{uploadista-error-DdTP-Rjx.cjs → uploadista-error-BOHJtDRc.cjs} +2 -0
  56. package/dist/{uploadista-error-CjfcFnVa.js → uploadista-error-CDkJ_Vrc.js} +3 -1
  57. package/dist/uploadista-error-CDkJ_Vrc.js.map +1 -0
  58. package/dist/{uploadista-error-CAtkQiAv.d.cts → uploadista-error-kZCQLC_U.d.ts} +3 -3
  59. package/dist/uploadista-error-kZCQLC_U.d.ts.map +1 -0
  60. package/dist/utils/index.cjs +1 -1
  61. package/dist/utils/index.d.cts +2 -2
  62. package/dist/utils/index.d.ts +2 -2
  63. package/dist/utils/index.js +1 -1
  64. package/dist/{utils-BILytQlb.js → utils-B5sYo1z9.js} +2 -2
  65. package/dist/{utils-BILytQlb.js.map → utils-B5sYo1z9.js.map} +1 -1
  66. package/dist/{utils-BLsIUd8c.cjs → utils-BbLQplqQ.cjs} +1 -1
  67. package/package.json +2 -2
  68. package/src/errors/uploadista-error.ts +10 -0
  69. package/src/flow/event.ts +33 -0
  70. package/src/flow/flow-server.ts +177 -6
  71. package/src/flow/flow.ts +24 -0
  72. package/src/flow/types/flow-job.ts +6 -4
  73. package/src/flow/types/flow-types.ts +3 -0
  74. package/src/types/data-store.ts +1 -1
  75. package/dist/flow-B0mMJM5Y.js +0 -2
  76. package/dist/flow-B0mMJM5Y.js.map +0 -1
  77. package/dist/flow-s5bgJsdb.cjs +0 -1
  78. package/dist/index-0xq1cArb.d.cts.map +0 -1
  79. package/dist/index-DMJv8Tvo.d.ts.map +0 -1
  80. package/dist/index-GLPiXqj4.d.cts.map +0 -1
  81. package/dist/index-YegzC0p1.d.ts.map +0 -1
  82. package/dist/types-m26wrG-Z.js.map +0 -1
  83. package/dist/uploadista-error-CAtkQiAv.d.cts.map +0 -1
  84. package/dist/uploadista-error-CjfcFnVa.js.map +0 -1
  85. package/dist/uploadista-error-D9SONF9K.d.ts.map +0 -1
@@ -95,7 +95,9 @@ export class FlowProvider extends Context.Tag("FlowProvider")<
95
95
  * @property getFlow - Retrieves a flow definition by ID
96
96
  * @property getFlowData - Retrieves flow metadata (nodes, edges) without full flow instance
97
97
  * @property runFlow - Starts a new flow execution and returns immediately with job ID
98
- * @property continueFlow - Resumes a paused flow with new data for a specific node
98
+ * @property resumeFlow - Resumes a paused flow with new data for a specific node
99
+ * @property pauseFlow - Pauses a running flow (user-initiated pause)
100
+ * @property cancelFlow - Cancels a running or paused flow and cleans up resources
99
101
  * @property getJobStatus - Retrieves current status and results of a flow job
100
102
  * @property subscribeToFlowEvents - Subscribes WebSocket to flow execution events
101
103
  * @property unsubscribeFromFlowEvents - Unsubscribes from flow events
@@ -121,7 +123,10 @@ export class FlowProvider extends Context.Tag("FlowProvider")<
121
123
  *
122
124
  * // Poll for status
123
125
  * const status = yield* server.getJobStatus(job.id);
124
- * console.log(status.status); // "running", "paused", "completed", or "failed"
126
+ * console.log(status.status); // "running", "paused", "completed", "failed", or "cancelled"
127
+ *
128
+ * // User can pause the flow
129
+ * yield* server.pauseFlow(job.id, "client123");
125
130
  *
126
131
  * return job;
127
132
  * });
@@ -131,7 +136,7 @@ export class FlowProvider extends Context.Tag("FlowProvider")<
131
136
  * const server = yield* FlowServer;
132
137
  *
133
138
  * // Flow paused waiting for user input at node "approval_1"
134
- * const job = yield* server.continueFlow({
139
+ * const job = yield* server.resumeFlow({
135
140
  * jobId: "job123",
136
141
  * nodeId: "approval_1",
137
142
  * newData: { approved: true },
@@ -141,6 +146,16 @@ export class FlowProvider extends Context.Tag("FlowProvider")<
141
146
  * return job;
142
147
  * });
143
148
  *
149
+ * // Cancel a flow
150
+ * const cancel = Effect.gen(function* () {
151
+ * const server = yield* FlowServer;
152
+ *
153
+ * // Cancel flow and cleanup intermediate files
154
+ * const job = yield* server.cancelFlow("job123", "client123");
155
+ *
156
+ * return job;
157
+ * });
158
+ *
144
159
  * // Check flow structure before execution
145
160
  * const inspect = Effect.gen(function* () {
146
161
  * const server = yield* FlowServer;
@@ -176,7 +191,7 @@ export type FlowServerShape = {
176
191
  inputs: any;
177
192
  }) => Effect.Effect<FlowJob, UploadistaError, TRequirements>;
178
193
 
179
- continueFlow: <TRequirements>({
194
+ resumeFlow: <TRequirements>({
180
195
  jobId,
181
196
  nodeId,
182
197
  newData,
@@ -188,6 +203,16 @@ export type FlowServerShape = {
188
203
  clientId: string | null;
189
204
  }) => Effect.Effect<FlowJob, UploadistaError, TRequirements>;
190
205
 
206
+ pauseFlow: (
207
+ jobId: string,
208
+ clientId: string | null,
209
+ ) => Effect.Effect<FlowJob, UploadistaError>;
210
+
211
+ cancelFlow: (
212
+ jobId: string,
213
+ clientId: string | null,
214
+ ) => Effect.Effect<FlowJob, UploadistaError>;
215
+
191
216
  getJobStatus: (jobId: string) => Effect.Effect<FlowJob, UploadistaError>;
192
217
 
193
218
  subscribeToFlowEvents: (
@@ -498,6 +523,25 @@ function withFlowEvents<
498
523
  });
499
524
  };
500
525
 
526
+ // Create checkJobStatus callback that reads from KV store
527
+ const createCheckJobStatusCallback = (executionJobId: string) => {
528
+ return (jobId: string) =>
529
+ Effect.gen(function* () {
530
+ const job = yield* kvStore.get(jobId);
531
+ if (!job) {
532
+ return yield* Effect.fail(
533
+ UploadistaError.fromCode("FLOW_JOB_NOT_FOUND", {
534
+ cause: `Job ${jobId} not found`,
535
+ }),
536
+ );
537
+ }
538
+ // Return only the statuses we care about for flow control
539
+ if (job.status === "paused") return "paused" as const;
540
+ if (job.status === "cancelled") return "cancelled" as const;
541
+ return "running" as const;
542
+ });
543
+ };
544
+
501
545
  return {
502
546
  ...flow,
503
547
  run: (args: {
@@ -511,6 +555,7 @@ function withFlowEvents<
511
555
  const executionJobId = args.jobId || crypto.randomUUID();
512
556
 
513
557
  const onEventCallback = createOnEventCallback(executionJobId);
558
+ const checkJobStatusCallback = createCheckJobStatusCallback(executionJobId);
514
559
 
515
560
  // Create a new flow with the same configuration but with onEvent callback
516
561
  const flowWithEvents = yield* createFlowWithSchema({
@@ -521,6 +566,7 @@ function withFlowEvents<
521
566
  inputSchema: flow.inputSchema,
522
567
  outputSchema: flow.outputSchema,
523
568
  onEvent: onEventCallback,
569
+ checkJobStatus: checkJobStatusCallback,
524
570
  });
525
571
 
526
572
  // Run the enhanced flow with consistent jobId
@@ -549,6 +595,7 @@ function withFlowEvents<
549
595
  const executionJobId = args.jobId;
550
596
 
551
597
  const onEventCallback = createOnEventCallback(executionJobId);
598
+ const checkJobStatusCallback = createCheckJobStatusCallback(executionJobId);
552
599
 
553
600
  // Create a new flow with the same configuration but with onEvent callback
554
601
  const flowWithEvents = yield* createFlowWithSchema({
@@ -559,6 +606,7 @@ function withFlowEvents<
559
606
  inputSchema: flow.inputSchema,
560
607
  outputSchema: flow.outputSchema,
561
608
  onEvent: onEventCallback,
609
+ checkJobStatus: checkJobStatusCallback,
562
610
  });
563
611
 
564
612
  // Resume the enhanced flow
@@ -845,7 +893,7 @@ export function createFlowServer() {
845
893
  return job;
846
894
  }),
847
895
 
848
- continueFlow: ({
896
+ resumeFlow: ({
849
897
  jobId,
850
898
  nodeId,
851
899
  newData,
@@ -857,7 +905,6 @@ export function createFlowServer() {
857
905
  clientId: string | null;
858
906
  }) =>
859
907
  Effect.gen(function* () {
860
- console.log("continueFlow", jobId, nodeId, newData);
861
908
  // Get the current job
862
909
  const job = yield* kvStore.get(jobId);
863
910
  if (!job) {
@@ -1071,6 +1118,130 @@ export function createFlowServer() {
1071
1118
  return updatedJob;
1072
1119
  }),
1073
1120
 
1121
+ pauseFlow: (jobId: string, clientId: string | null) =>
1122
+ Effect.gen(function* () {
1123
+ // Get the current job
1124
+ const job = yield* kvStore.get(jobId);
1125
+ if (!job) {
1126
+ return yield* Effect.fail(
1127
+ UploadistaError.fromCode("FLOW_JOB_NOT_FOUND", {
1128
+ cause: `Job ${jobId} not found`,
1129
+ }),
1130
+ );
1131
+ }
1132
+
1133
+ // Verify authorization if clientId is provided
1134
+ if (clientId !== null && job.clientId !== clientId) {
1135
+ return yield* Effect.fail(
1136
+ UploadistaError.fromCode("FLOW_NOT_AUTHORIZED", {
1137
+ cause: `Client ${clientId} is not authorized to pause job ${jobId}`,
1138
+ }),
1139
+ );
1140
+ }
1141
+
1142
+ // Verify job can be paused (must be running)
1143
+ if (job.status !== "running") {
1144
+ return yield* Effect.fail(
1145
+ UploadistaError.fromCode("FLOW_JOB_ERROR", {
1146
+ cause: `Job ${jobId} cannot be paused (current status: ${job.status})`,
1147
+ }),
1148
+ );
1149
+ }
1150
+
1151
+ // Find the currently running node (if any)
1152
+ const runningTask = job.tasks.find((t) => t.status === "running");
1153
+ const pausedAtNode = runningTask?.nodeId;
1154
+
1155
+ // Update job status to paused
1156
+ yield* updateJob(jobId, {
1157
+ status: "paused",
1158
+ pausedAt: pausedAtNode,
1159
+ updatedAt: new Date(),
1160
+ });
1161
+
1162
+ // Emit FlowPause event
1163
+ yield* eventEmitter.emit(jobId, {
1164
+ jobId,
1165
+ flowId: job.flowId,
1166
+ eventType: EventType.FlowPause,
1167
+ pausedAt: pausedAtNode,
1168
+ });
1169
+
1170
+ // Return updated job
1171
+ const updatedJob = yield* kvStore.get(jobId);
1172
+ if (!updatedJob) {
1173
+ return yield* Effect.fail(
1174
+ UploadistaError.fromCode("FLOW_JOB_NOT_FOUND", {
1175
+ cause: `Job ${jobId} not found after pause`,
1176
+ }),
1177
+ );
1178
+ }
1179
+ return updatedJob;
1180
+ }),
1181
+
1182
+ cancelFlow: (jobId: string, clientId: string | null) =>
1183
+ Effect.gen(function* () {
1184
+ // Get the current job
1185
+ const job = yield* kvStore.get(jobId);
1186
+ if (!job) {
1187
+ return yield* Effect.fail(
1188
+ UploadistaError.fromCode("FLOW_JOB_NOT_FOUND", {
1189
+ cause: `Job ${jobId} not found`,
1190
+ }),
1191
+ );
1192
+ }
1193
+
1194
+ // Verify authorization if clientId is provided
1195
+ if (clientId !== null && job.clientId !== clientId) {
1196
+ return yield* Effect.fail(
1197
+ UploadistaError.fromCode("FLOW_NOT_AUTHORIZED", {
1198
+ cause: `Client ${clientId} is not authorized to cancel job ${jobId}`,
1199
+ }),
1200
+ );
1201
+ }
1202
+
1203
+ // Verify job can be cancelled (must be running or paused)
1204
+ if (
1205
+ job.status !== "running" &&
1206
+ job.status !== "paused" &&
1207
+ job.status !== "started"
1208
+ ) {
1209
+ return yield* Effect.fail(
1210
+ UploadistaError.fromCode("FLOW_JOB_ERROR", {
1211
+ cause: `Job ${jobId} cannot be cancelled (current status: ${job.status})`,
1212
+ }),
1213
+ );
1214
+ }
1215
+
1216
+ // Update job status to cancelled
1217
+ yield* updateJob(jobId, {
1218
+ status: "cancelled",
1219
+ updatedAt: new Date(),
1220
+ endedAt: new Date(),
1221
+ });
1222
+
1223
+ // Emit FlowCancel event
1224
+ yield* eventEmitter.emit(jobId, {
1225
+ jobId,
1226
+ flowId: job.flowId,
1227
+ eventType: EventType.FlowCancel,
1228
+ });
1229
+
1230
+ // Cleanup intermediate files
1231
+ yield* cleanupIntermediateFiles(jobId, clientId);
1232
+
1233
+ // Return updated job
1234
+ const updatedJob = yield* kvStore.get(jobId);
1235
+ if (!updatedJob) {
1236
+ return yield* Effect.fail(
1237
+ UploadistaError.fromCode("FLOW_JOB_NOT_FOUND", {
1238
+ cause: `Job ${jobId} not found after cancellation`,
1239
+ }),
1240
+ );
1241
+ }
1242
+ return updatedJob;
1243
+ }),
1244
+
1074
1245
  subscribeToFlowEvents: (jobId: string, connection: WebSocketConnection) =>
1075
1246
  Effect.gen(function* () {
1076
1247
  yield* eventEmitter.subscribe(jobId, connection);
package/src/flow/flow.ts CHANGED
@@ -163,6 +163,11 @@ export type Flow<
163
163
  TFlowOutputSchema,
164
164
  TRequirements
165
165
  >["onEvent"];
166
+ checkJobStatus?: FlowConfig<
167
+ TFlowInputSchema,
168
+ TFlowOutputSchema,
169
+ TRequirements
170
+ >["checkJobStatus"];
166
171
  run: (args: {
167
172
  inputs?: Record<string, z.infer<TFlowInputSchema>>;
168
173
  storageId: string;
@@ -298,6 +303,7 @@ export function createFlowWithSchema<
298
303
  flowId,
299
304
  name,
300
305
  onEvent,
306
+ checkJobStatus,
301
307
  edges,
302
308
  inputSchema,
303
309
  outputSchema,
@@ -473,6 +479,23 @@ export function createFlowWithSchema<
473
479
  ).toEffect();
474
480
  }
475
481
 
482
+ // Check job status before executing node
483
+ if (checkJobStatus) {
484
+ const status = yield* checkJobStatus(jobId);
485
+ if (status === "paused") {
486
+ // Flow was paused by user - stop execution gracefully
487
+ return yield* UploadistaError.fromCode("FLOW_PAUSED", {
488
+ cause: `Flow ${flowId} was paused by user at job ${jobId}`,
489
+ }).toEffect();
490
+ }
491
+ if (status === "cancelled") {
492
+ // Flow was cancelled by user - stop execution
493
+ return yield* UploadistaError.fromCode("FLOW_CANCELLED", {
494
+ cause: `Flow ${flowId} was cancelled by user at job ${jobId}`,
495
+ }).toEffect();
496
+ }
497
+ }
498
+
476
499
  // Emit NodeStart event if provided
477
500
  if (onEvent) {
478
501
  yield* onEvent({
@@ -1040,6 +1063,7 @@ export function createFlowWithSchema<
1040
1063
  inputSchema,
1041
1064
  outputSchema,
1042
1065
  onEvent,
1066
+ checkJobStatus,
1043
1067
  run,
1044
1068
  resume,
1045
1069
  validateTypes,
@@ -91,7 +91,7 @@ export type FlowJobTask = {
91
91
  * console.log("Final result:", status.result);
92
92
  * } else if (status.status === "paused") {
93
93
  * // Resume with additional data
94
- * yield* flowServer.continueFlow({
94
+ * yield* flowServer.resumeFlow({
95
95
  * jobId: job.id,
96
96
  * nodeId: status.pausedAt,
97
97
  * newData: additionalChunk
@@ -126,8 +126,9 @@ export type FlowJob = {
126
126
  /**
127
127
  * Overall status of a flow job.
128
128
  *
129
- * Job lifecycle: started → running → (completed | failed)
130
- * Or with pauses: started → running → paused → running → (completed | failed)
129
+ * Job lifecycle: started → running → (completed | failed | cancelled)
130
+ * Or with pauses: started → running → paused → running → (completed | failed | cancelled)
131
+ * User actions: running → paused (via pauseFlow) or running → cancelled (via cancelFlow)
131
132
  */
132
133
  export type FlowJobStatus =
133
134
  | "pending"
@@ -135,4 +136,5 @@ export type FlowJobStatus =
135
136
  | "completed"
136
137
  | "failed"
137
138
  | "started"
138
- | "paused";
139
+ | "paused"
140
+ | "cancelled";
@@ -342,6 +342,9 @@ export type FlowConfig<
342
342
  onEvent?: (
343
343
  event: FlowEvent,
344
344
  ) => Effect.Effect<{ eventId: string | null }, UploadistaError>;
345
+ checkJobStatus?: (
346
+ jobId: string,
347
+ ) => Effect.Effect<"running" | "paused" | "cancelled", UploadistaError>;
345
348
  parallelExecution?: {
346
349
  enabled?: boolean;
347
350
  maxConcurrency?: number;
@@ -273,7 +273,7 @@ export class UploadFileDataStores extends Context.Tag("UploadFileDataStores")<
273
273
  * // Effect that creates a store
274
274
  * const config: DataStoreConfig = Effect.gen(function* () {
275
275
  * const kvStore = yield* UploadFileKVStore;
276
- * return createS3Store(kvStore);
276
+ * return s3Store(kvStore);
277
277
  * });
278
278
  *
279
279
  * // Pre-built Layer
@@ -1,2 +0,0 @@
1
- import{n as e}from"./uploadista-error-CjfcFnVa.js";import{c as t,g as n,i as r}from"./types-m26wrG-Z.js";import{a as i,n as a,o}from"./upload-BzU7ifyH.js";import{Context as s,Effect as c,Layer as l}from"effect";import{z as u}from"zod";function d({source:e,target:t,sourcePort:n,targetPort:r}){return{source:e,target:t,sourcePort:n,targetPort:r}}let f=function(e){return e.JobStart=`job-start`,e.JobEnd=`job-end`,e.FlowStart=`flow-start`,e.FlowEnd=`flow-end`,e.FlowError=`flow-error`,e.NodeStart=`node-start`,e.NodeEnd=`node-end`,e.NodePause=`node-pause`,e.NodeResume=`node-resume`,e.NodeError=`node-error`,e.NodeStream=`node-stream`,e.NodeResponse=`node-response`,e}({}),p=function(e){return e.input=`input`,e.process=`process`,e.output=`output`,e.conditional=`conditional`,e.multiplex=`multiplex`,e.merge=`merge`,e}({});function m({id:t,name:n,description:r,type:i,inputSchema:a,outputSchema:o,run:s,condition:l,multiInput:u=!1,multiOutput:d=!1,pausable:f=!1,retry:p}){return c.succeed({id:t,name:n,description:r,type:i,inputSchema:a,outputSchema:o,pausable:f,run:({data:r,jobId:i,flowId:l,storageId:u,clientId:d})=>c.gen(function*(){let f=yield*s({data:yield*c.try({try:()=>a.parse(r),catch:r=>{let i=r instanceof Error?r.message:String(r);return e.fromCode(`FLOW_INPUT_VALIDATION_ERROR`,{body:`Node '${n}' (${t}) input validation failed: ${i}`,cause:r})}}),jobId:i,storageId:u,flowId:l,clientId:d});return f.type===`waiting`?f:{type:`complete`,data:yield*c.try({try:()=>o.parse(f.data),catch:r=>{let i=r instanceof Error?r.message:String(r);return e.fromCode(`FLOW_OUTPUT_VALIDATION_ERROR`,{body:`Node '${n}' (${t}) output validation failed: ${i}`,cause:r})}})}}),condition:l,multiInput:u,multiOutput:d,retry:p})}const h=e=>({id:e.id,name:e.name,description:e.description,type:e.type});var g=class{maxConcurrency;constructor(e={}){this.maxConcurrency=e.maxConcurrency??4}groupNodesByExecutionLevel(e,t){let n={},r={};e.forEach(e=>{n[e.id]=[],r[e.id]=0}),t.forEach(e=>{n[e.source]?.push(e.target),r[e.target]=(r[e.target]||0)+1});let i=[],a=new Set,o=0;for(;a.size<e.length;){let e=Object.keys(r).filter(e=>r[e]===0&&!a.has(e));if(e.length===0)throw Error(`Cycle detected in flow graph - cannot execute in parallel`);i.push({level:o++,nodes:e}),e.forEach(e=>{a.add(e),delete r[e],n[e]?.forEach(e=>{r[e]!==void 0&&r[e]--})})}return i}executeNodesInParallel(e){return c.all(e.map(e=>e()),{concurrency:this.maxConcurrency})}canExecuteInParallel(e,t,n){return e.every(e=>(n[e]||[]).every(e=>t.has(e)))}getStats(){return{maxConcurrency:this.maxConcurrency}}};const _=(e,t)=>{if(e===t)return!0;try{return!!(e&&t&&typeof e==`object`&&typeof t==`object`)}catch{return!0}};var v=class{typeChecker;constructor(e=_){this.typeChecker=e}validateConnection(e,t,n){return this.getCompatibleTypes(e.outputSchema,t.inputSchema)}getCompatibleTypes(e,t){return this.typeChecker(e,t)}validateFlow(e,t){let n=[],r=new Map(e.map(e=>[e.id,e]));for(let e of t){let t=r.get(e.source),i=r.get(e.target);if(!t){n.push(`Source node ${e.source} not found`);continue}if(!i){n.push(`Target node ${e.target} not found`);continue}this.validateConnection(t,i,e)||n.push(`Schema mismatch: ${t.id} output schema incompatible with ${i.id} input schema`)}return{isValid:n.length===0,errors:n}}getExpectedInputSchemas(e,t,n){let r=new Map(t.map(e=>[e.id,e])),i={};for(let t of n)if(t.target===e){let e=r.get(t.source);if(e){let n=t.sourcePort||t.source;i[n]=e.outputSchema}}return i}getActualOutputSchemas(e,t,n){let r=new Map(t.map(e=>[e.id,e])),i={};for(let t of n)if(t.source===e){let e=r.get(t.target);if(e){let n=t.targetPort||t.target;i[n]=e.inputSchema}}return i}validateData(e,t){try{return t.parse(e),{isValid:!0,errors:[]}}catch(e){return e instanceof Error&&`errors`in e?{isValid:!1,errors:e.errors.map(e=>`${e.path.join(`.`)}: ${e.message}`)}:{isValid:!1,errors:[e instanceof Error?e.message:`Validation failed`]}}}};const y=e=>({id:e.id,name:e.name,nodes:e.nodes.map(h),edges:e.edges});function b(t){return c.gen(function*(){let n=yield*c.all(t.nodes.map(e=>c.isEffect(e)?e:c.succeed(e))),{flowId:r,name:i,onEvent:a,edges:o,inputSchema:s,outputSchema:l,typeChecker:d}=t,p=n,m=new v(d),h=()=>{let e={},t={},n={};return p.forEach(r=>{e[r.id]=[],n[r.id]=[],t[r.id]=0}),o.forEach(r=>{e[r.source]?.push(r.target),n[r.target]?.push(r.source),t[r.target]=(t[r.target]||0)+1}),{graph:e,reverseGraph:n,inDegree:t}},_=()=>{let{graph:e,inDegree:t}=h(),n=[],r=[];for(Object.keys(t).forEach(e=>{t[e]===0&&n.push(e)});n.length>0;){let i=n.shift();if(!i)throw Error(`No current node found`);r.push(i),e[i]?.forEach(e=>{t[e]=(t[e]||0)-1,t[e]===0&&n.push(e)})}return r},y=(e,t)=>{if(!e.condition)return c.succeed(!0);let{field:n,operator:r,value:i}=e.condition,a=t,o=a?.metadata?.[n]||a?.[n],s=(()=>{switch(r){case`equals`:return o===i;case`notEquals`:return o!==i;case`greaterThan`:return Number(o)>Number(i);case`lessThan`:return Number(o)<Number(i);case`contains`:return String(o).includes(String(i));case`startsWith`:return String(o).startsWith(String(i));default:return!0}})();return c.succeed(s)},b=(e,t)=>{let{reverseGraph:n}=h(),r=n[e]||[],i={};return r.forEach(e=>{let n=t.get(e);n!==void 0&&(i[e]=n)}),i},x=e=>{let t=p.filter(e=>e.type===`input`),n={};return t.forEach(t=>{e&&typeof e==`object`&&t.id in e&&(n[t.id]=s.parse(e[t.id]))}),n},S=e=>{let t=p.filter(e=>e.type===`output`),n={};return t.forEach(t=>{let r=e.get(t.id);r!==void 0&&(n[t.id]=r)}),n},C=(t,n,i,o,s,l,u)=>c.gen(function*(){let d=s.get(t);if(!d)return yield*e.fromCode(`FLOW_NODE_NOT_FOUND`).toEffect();a&&(yield*a({jobId:l,flowId:r,nodeId:t,eventType:f.NodeStart,nodeName:d.name,nodeType:d.type}));let p=d.retry?.maxRetries??0,m=d.retry?.retryDelay??1e3,h=d.retry?.exponentialBackoff??!0,g=0,_=null;for(;g<=p;)try{let s,c={};if(d.type===`input`){if(s=i[t],s===void 0)return yield*e.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Input node ${t} has no input data`)}).toEffect()}else{if(c=b(t,o),Object.keys(c).length===0)return yield*e.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Node ${t} has no input data`)}).toEffect();if(d.multiInput)s=c;else{let n=Object.keys(c)[0];if(!n)return yield*e.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Node ${t} has no input data`)}).toEffect();s=c[n]}}if(d.type===`conditional`&&!(yield*y(d,s)))return a&&(yield*a({jobId:l,flowId:r,nodeId:t,eventType:f.NodeEnd,nodeName:d.name})),{nodeId:t,result:s,success:!0,waiting:!1};let p=yield*d.run({data:s,inputs:c,jobId:l,flowId:r,storageId:n,clientId:u});if(p.type===`waiting`){let e=p.partialData;return a&&(yield*a({jobId:l,flowId:r,nodeId:t,eventType:f.NodePause,nodeName:d.name,partialData:e})),{nodeId:t,result:e,success:!0,waiting:!0}}let m=p.data;return a&&(yield*a({jobId:l,flowId:r,nodeId:t,eventType:f.NodeEnd,nodeName:d.name,result:m})),{nodeId:t,result:m,success:!0,waiting:!1}}catch(n){if(_=n instanceof e?n:e.fromCode(`FLOW_NODE_ERROR`,{cause:n}),g<p){g++;let e=h?m*2**(g-1):m;yield*c.logWarning(`Node ${t} (${d.name}) failed, retrying (${g}/${p}) after ${e}ms`),yield*c.sleep(e);continue}return a&&(yield*a({jobId:l,flowId:r,nodeId:t,eventType:f.NodeError,nodeName:d.name,error:_.body,retryCount:g})),yield*_.toEffect()}return _?yield*_.toEffect():yield*e.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Unexpected error in retry loop`)}).toEffect()}),w=({inputs:n,storageId:i,jobId:s,resumeFrom:d,clientId:m})=>c.gen(function*(){!d&&a&&(yield*a({jobId:s,eventType:f.FlowStart,flowId:r}));let h=x(n||{}),v,y,b;if(d)v=d.executionOrder,y=d.nodeResults,b=d.currentIndex;else if(v=_(),y=new Map,b=0,v.length!==p.length)return yield*e.fromCode(`FLOW_CYCLE_ERROR`).toEffect();let w=new Map(p.map(e=>[e.id,e]));if(t.parallelExecution?.enabled??!1){yield*c.logDebug(`Flow ${r}: Executing in parallel mode (maxConcurrency: ${t.parallelExecution?.maxConcurrency??4})`);let e=new g({maxConcurrency:t.parallelExecution?.maxConcurrency??4}),n=e.groupNodesByExecutionLevel(p,o);yield*c.logDebug(`Flow ${r}: Grouped nodes into ${n.length} execution levels`);let l={};p.forEach(e=>{l[e.id]=[]}),o.forEach(e=>{l[e.target]?.push(e.source)});for(let t of n){yield*c.logDebug(`Flow ${r}: Executing level ${t.level} with nodes: ${t.nodes.join(`, `)}`);let n=t.nodes.map(e=>()=>c.gen(function*(){if(d&&e===d.executionOrder[b]&&a){let t=w.get(e);t&&(yield*a({jobId:s,flowId:r,nodeId:e,eventType:f.NodeResume,nodeName:t.name,nodeType:t.type}))}return{nodeId:e,nodeResult:yield*C(e,i,h,y,w,s,m)}})),o=yield*e.executeNodesInParallel(n);for(let{nodeId:e,nodeResult:t}of o){if(t.waiting)return t.result!==void 0&&y.set(e,t.result),{type:`paused`,nodeId:e,executionState:{executionOrder:v,currentIndex:v.indexOf(e),inputs:h}};t.success&&y.set(e,t.result)}}}else{yield*c.logDebug(`Flow ${r}: Executing in sequential mode`);for(let t=b;t<v.length;t++){let n=v[t];if(!n)return yield*e.fromCode(`FLOW_NODE_NOT_FOUND`).toEffect();if(d&&t===b&&a){let e=w.get(n);e&&(yield*a({jobId:s,flowId:r,nodeId:n,eventType:f.NodeResume,nodeName:e.name,nodeType:e.type}))}let o=yield*C(n,i,h,y,w,s,m);if(o.waiting)return o.result!==void 0&&y.set(o.nodeId,o.result),{type:`paused`,nodeId:o.nodeId,executionState:{executionOrder:v,currentIndex:t,inputs:h}};o.success&&y.set(o.nodeId,o.result)}}let T=S(y),E=u.record(u.string(),l).safeParse(T);if(!E.success){let t=`Flow output validation failed: ${E.error.message}. Expected outputs: ${JSON.stringify(Object.keys(S(y)))}. Output nodes: ${p.filter(e=>e.type===`output`).map(e=>e.id).join(`, `)}`;return a&&(yield*a({jobId:s,eventType:f.FlowError,flowId:r,error:t})),yield*e.fromCode(`FLOW_OUTPUT_VALIDATION_ERROR`,{body:t,cause:E.error}).toEffect()}let D=E.data;return a&&(yield*a({jobId:s,eventType:f.FlowEnd,flowId:r,result:D})),{type:`completed`,result:D}});return{id:r,name:i,nodes:p,edges:o,inputSchema:s,outputSchema:l,onEvent:a,run:({inputs:e,storageId:t,jobId:n,clientId:r})=>w({inputs:e,storageId:t,jobId:n,clientId:r}),resume:({jobId:e,storageId:t,nodeResults:n,executionState:r,clientId:i})=>w({inputs:r.inputs,storageId:t,jobId:e,resumeFrom:{executionOrder:r.executionOrder,nodeResults:new Map(Object.entries(n)),currentIndex:r.currentIndex},clientId:i}),validateTypes:()=>{let e=p;return m.validateFlow(e,o)},validateInputs:e=>m.validateData(e,s),validateOutputs:e=>m.validateData(e,l)}})}var x=class extends s.Tag(`FlowProvider`)(){},S=class extends s.Tag(`FlowServer`)(){};const C=e=>typeof e==`object`&&!!e&&`id`in e;function w(e,t,n){let r=r=>{let i=e=>c.gen(function*(){let t=yield*n.get(r);t&&(yield*n.set(r,{...t,...e,updatedAt:new Date}))});return a=>c.gen(function*(){switch(e.onEvent&&(yield*c.catchAll(e.onEvent(a),e=>(c.logError(`Original onEvent failed`,e),c.succeed({eventId:null})))),yield*t.emit(r,a),c.logInfo(`Updating job ${r} with event ${a.eventType}`),a.eventType){case f.FlowStart:yield*i({status:`running`});break;case f.FlowEnd:break;case f.FlowError:yield*i({status:`failed`,error:a.error});break;case f.NodeStart:yield*c.gen(function*(){let e=yield*n.get(r);if(e){let t=e.tasks.find(e=>e.nodeId===a.nodeId)?e.tasks.map(e=>e.nodeId===a.nodeId?{...e,status:`running`,updatedAt:new Date}:e):[...e.tasks,{nodeId:a.nodeId,status:`running`,createdAt:new Date,updatedAt:new Date}];yield*n.set(r,{...e,tasks:t,updatedAt:new Date})}});break;case f.NodePause:yield*c.gen(function*(){let e=yield*n.get(r);if(e){let t=e.tasks.find(e=>e.nodeId===a.nodeId)?e.tasks.map(e=>e.nodeId===a.nodeId?{...e,status:`paused`,result:a.partialData,updatedAt:new Date}:e):[...e.tasks,{nodeId:a.nodeId,status:`paused`,result:a.partialData,createdAt:new Date,updatedAt:new Date}];yield*n.set(r,{...e,tasks:t,updatedAt:new Date})}});break;case f.NodeResume:yield*c.gen(function*(){let e=yield*n.get(r);if(e){let t=e.tasks.map(e=>e.nodeId===a.nodeId?{...e,status:`running`,updatedAt:new Date}:e);yield*n.set(r,{...e,tasks:t,updatedAt:new Date})}});break;case f.NodeEnd:yield*c.gen(function*(){let t=yield*n.get(r);if(t){let i=t.tasks.map(e=>e.nodeId===a.nodeId?{...e,status:`completed`,result:a.result,updatedAt:new Date}:e),o=e.nodes.find(e=>e.id===a.nodeId)?.type===`output`,s=a.result,c=t.intermediateFiles||[];o&&C(s)&&s.id?c=c.filter(e=>e!==s.id):!o&&C(s)&&s.id&&(c.includes(s.id)||c.push(s.id)),yield*n.set(r,{...t,tasks:i,intermediateFiles:c,updatedAt:new Date})}});break;case f.NodeError:yield*c.gen(function*(){let e=yield*n.get(r);if(e){let t=e.tasks.map(e=>e.nodeId===a.nodeId?{...e,status:`failed`,error:a.error,retryCount:a.retryCount,updatedAt:new Date}:e);yield*n.set(r,{...e,tasks:t,error:a.error,updatedAt:new Date})}});break}return{eventId:r}})};return{...e,run:t=>c.gen(function*(){let n=t.jobId||crypto.randomUUID(),i=r(n);return yield*(yield*b({flowId:e.id,name:e.name,nodes:e.nodes,edges:e.edges,inputSchema:e.inputSchema,outputSchema:e.outputSchema,onEvent:i})).run({...t,jobId:n,clientId:t.clientId})}),resume:t=>c.gen(function*(){let n=t.jobId,i=r(n);return yield*(yield*b({flowId:e.id,name:e.name,nodes:e.nodes,edges:e.edges,inputSchema:e.inputSchema,outputSchema:e.outputSchema,onEvent:i})).resume(t)})}}function T(){return c.gen(function*(){let r=yield*x,i=yield*n,o=yield*t,s=yield*a,l=(t,n)=>c.gen(function*(){let r=yield*o.get(t);return r?yield*o.set(t,{...r,...n}):yield*c.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found`}))}),u=(e,t)=>c.gen(function*(){let n=yield*o.get(e);!n||!n.intermediateFiles||n.intermediateFiles.length===0||(yield*c.logInfo(`Cleaning up ${n.intermediateFiles.length} intermediate files for job ${e}`),yield*c.all(n.intermediateFiles.map(e=>c.gen(function*(){yield*s.delete(e,t),yield*c.logDebug(`Deleted intermediate file ${e}`)}).pipe(c.catchAll(t=>c.gen(function*(){return yield*c.logWarning(`Failed to delete intermediate file ${e}: ${t}`),c.succeed(void 0)})))),{concurrency:5}),yield*l(e,{intermediateFiles:[]}))}),d=({jobId:t,flow:n,storageId:r,clientId:a,inputs:s})=>c.gen(function*(){yield*l(t,{status:`running`});let e=yield*w(n,i,o).run({inputs:s,storageId:r,jobId:t,clientId:a});return e.type===`paused`?yield*l(t,{status:`paused`,pausedAt:e.nodeId,executionState:e.executionState,updatedAt:new Date}):(yield*l(t,{status:`completed`,result:e.result,updatedAt:new Date,endedAt:new Date}),yield*u(t,a)),e}).pipe(c.catchAll(n=>c.gen(function*(){yield*c.logError(`Flow execution failed`,n);let r=n instanceof e?n.body:String(n);yield*c.logInfo(`Updating job ${t} to failed status with error: ${r}`),yield*l(t,{status:`failed`,error:r,updatedAt:new Date}).pipe(c.catchAll(e=>c.gen(function*(){return yield*c.logError(`Failed to update job ${t}`,e),c.succeed(void 0)})));let s=yield*o.get(t);return s&&(yield*i.emit(t,{jobId:t,eventType:f.FlowError,flowId:s.flowId,error:r}).pipe(c.catchAll(e=>c.gen(function*(){return yield*c.logError(`Failed to emit FlowError event for job ${t}`,e),c.succeed(void 0)})))),yield*u(t,a).pipe(c.catchAll(e=>c.gen(function*(){return yield*c.logWarning(`Failed to cleanup intermediate files for job ${t}`,e),c.succeed(void 0)}))),c.fail(n)})));return{getFlow:(e,t)=>c.gen(function*(){return yield*r.getFlow(e,t)}),getFlowData:(e,t)=>c.gen(function*(){return y(yield*r.getFlow(e,t))}),runFlow:({flowId:t,storageId:n,clientId:i,inputs:a})=>c.gen(function*(){let s=yield*c.try({try:()=>X.parse({inputs:a}),catch:t=>e.fromCode(`FLOW_INPUT_VALIDATION_ERROR`,{cause:t})}),l=crypto.randomUUID(),u=new Date,f={id:l,flowId:t,storageId:n,clientId:i,status:`started`,createdAt:u,updatedAt:u,tasks:[]};yield*o.set(l,f);let p=yield*r.getFlow(t,i);return yield*c.forkDaemon(d({jobId:l,flow:p,storageId:n,clientId:i,inputs:s.inputs}).pipe(c.tapErrorCause(e=>c.logError(`Flow execution failed`,e)))),f}),getJobStatus:t=>c.gen(function*(){return(yield*o.get(t))||(yield*c.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found`})))}),continueFlow:({jobId:t,nodeId:n,newData:a,clientId:s})=>c.gen(function*(){console.log(`continueFlow`,t,n,a);let d=yield*o.get(t);if(!d)return console.error(`Job not found`),yield*c.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found`}));if(d.status!==`paused`)return console.error(`Job is not paused`),yield*c.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${t} is not paused (status: ${d.status})`}));if(d.pausedAt!==n)return console.error(`Job is not paused at the expected node`),yield*c.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${t} is paused at node ${d.pausedAt}, not ${n}`}));if(!d.executionState)return console.error(`Job has no execution state`),yield*c.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${t} has no execution state`}));let p={...d.tasks.reduce((e,t)=>(t.result!==void 0&&(e[t.nodeId]=t.result),e),{}),[n]:a},m={...d.executionState.inputs,[n]:a};yield*l(t,{status:`running`});let h=yield*r.getFlow(d.flowId,d.clientId),g=c.gen(function*(){let n=w(h,i,o);if(!d.executionState)return yield*c.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${t} has no execution state`}));let r=yield*n.resume({jobId:t,storageId:d.storageId,nodeResults:p,executionState:{...d.executionState,inputs:m},clientId:d.clientId});return r.type===`paused`?yield*l(t,{status:`paused`,pausedAt:r.nodeId,executionState:r.executionState,updatedAt:new Date}):(yield*l(t,{status:`completed`,pausedAt:void 0,executionState:void 0,result:r.result,updatedAt:new Date,endedAt:new Date}),yield*u(t,s)),r}).pipe(c.catchAll(n=>c.gen(function*(){yield*c.logError(`Flow resume failed`,n);let r=n instanceof e?n.body:String(n);yield*c.logInfo(`Updating job ${t} to failed status with error: ${r}`),yield*l(t,{status:`failed`,error:r,updatedAt:new Date}).pipe(c.catchAll(e=>c.gen(function*(){return yield*c.logError(`Failed to update job ${t}`,e),c.succeed(void 0)})));let a=yield*o.get(t);return a&&(yield*i.emit(t,{jobId:t,eventType:f.FlowError,flowId:a.flowId,error:r}).pipe(c.catchAll(e=>c.gen(function*(){return yield*c.logError(`Failed to emit FlowError event for job ${t}`,e),c.succeed(void 0)})))),yield*u(t,s).pipe(c.catchAll(e=>c.gen(function*(){return yield*c.logWarning(`Failed to cleanup intermediate files for job ${t}`,e),c.succeed(void 0)}))),c.fail(n)})));return yield*c.forkDaemon(g.pipe(c.tapErrorCause(e=>c.logError(`Flow resume failed`,e)))),(yield*o.get(t))||(yield*c.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found after update`})))}),subscribeToFlowEvents:(e,t)=>c.gen(function*(){yield*i.subscribe(e,t)}),unsubscribeFromFlowEvents:e=>c.gen(function*(){yield*i.unsubscribe(e)})}})}const E=l.effect(S,T()),D=e=>({type:`complete`,data:e}),O=e=>({type:`waiting`,partialData:e});function k(e){if(!e)return{type:``,fileName:``,metadata:void 0,metadataJson:void 0};let t={...e},n=String(t.type||t.mimeType||t[`content-type`]||``);n&&(t.type||=n,t.mimeType||=n);let r=String(t.fileName||t.originalName||t.name||``);return r&&(t.fileName||=r,t.originalName||=r,t.name||=r),{type:n,fileName:r,metadata:t,metadataJson:JSON.stringify(t)}}const A=u.object({operation:u.literal(`init`),storageId:u.string(),metadata:u.record(u.string(),u.any()).optional()}),j=u.object({operation:u.literal(`finalize`),uploadId:u.string()}),M=u.object({operation:u.literal(`url`),url:u.string(),storageId:u.string().optional(),metadata:u.record(u.string(),u.any()).optional()}),N=u.union([A,j,M]),P=u.object({allowedMimeTypes:u.array(u.string()).optional(),minSize:u.number().positive().optional(),maxSize:u.number().positive().optional()});function F(t,n){return c.gen(function*(){if(n){if(n.allowedMimeTypes&&n.allowedMimeTypes.length>0&&!n.allowedMimeTypes.some(e=>{if(e.endsWith(`/*`)){let n=e.slice(0,-2);return t.type.startsWith(n)}return t.type===e}))throw yield*e.fromCode(`VALIDATION_ERROR`,{cause:Error(`File type "${t.type}" is not allowed. Allowed types: ${n.allowedMimeTypes.join(`, `)}`)}).toEffect();if(n.minSize!==void 0&&t.size<n.minSize)throw yield*e.fromCode(`VALIDATION_ERROR`,{cause:Error(`File size (${t.size} bytes) is below minimum (${n.minSize} bytes)`)}).toEffect();if(n.maxSize!==void 0&&t.size>n.maxSize)throw yield*e.fromCode(`VALIDATION_ERROR`,{cause:Error(`File size (${t.size} bytes) exceeds maximum (${n.maxSize} bytes)`)}).toEffect()}})}function I(t,n){return c.gen(function*(){let s=yield*a;return yield*m({id:t,name:`Input`,description:`Handles file input through multiple methods - streaming upload (init/finalize) or direct URL fetch`,type:p.input,inputSchema:N,outputSchema:r,run:({data:r,flowId:a,jobId:l,clientId:u})=>c.gen(function*(){switch(r.operation){case`init`:{let e={storageId:r.storageId,size:r.metadata?.size||0,type:r.metadata?.mimeType||`application/octet-stream`,fileName:r.metadata?.originalName,lastModified:r.metadata?.size?Date.now():void 0,metadata:r.metadata?JSON.stringify(r.metadata):void 0,flow:{flowId:a,nodeId:t,jobId:l}};return O(yield*s.createUpload(e,u))}case`finalize`:{let e=yield*s.getUpload(r.uploadId),{type:t}=k(e.metadata);return yield*F({type:t,size:e.size||0},n),D(e)}case`url`:{let e=yield*o(r.url),c=yield*i(e),d=r.metadata?.mimeType||e.headers.get(`content-type`)||`application/octet-stream`,f=r.metadata?.size||Number(e.headers.get(`content-length`)||0),p=r.metadata?.originalName||r.url.split(`/`).pop()||`file`;yield*F({type:d,size:f},n);let m=new ReadableStream({start(e){e.enqueue(new Uint8Array(c)),e.close()}}),h={storageId:r.storageId||`buffer`,size:f,type:d,fileName:p,lastModified:Date.now(),metadata:r.metadata?JSON.stringify(r.metadata):void 0};return D({...yield*s.upload(h,u,m),flow:{flowId:a,nodeId:t,jobId:l}})}default:throw yield*e.fromCode(`VALIDATION_ERROR`,{cause:Error(`Invalid operation`)}).toEffect()}})})})}const L=u.object({});function R(t,n=e=>c.succeed(e)){return c.gen(function*(){let i=yield*a;return yield*m({id:t,name:`Storage`,description:`Stores a file in the storage`,type:p.output,inputSchema:r,outputSchema:r,run:({data:r,storageId:a,flowId:o,jobId:s,clientId:l})=>c.gen(function*(){let{type:u,fileName:d,metadata:f,metadataJson:p}=k(r.metadata),m={flowId:o,nodeId:t,jobId:s},h=f?{...r,metadata:f}:r,g=yield*i.getUpload(r.id);if(!g.id)return yield*c.fail(e.fromCode(`FILE_READ_ERROR`,Error(`Upload Key is undefined`)));if(g.storage.id===a)return D(yield*n({...h,flow:m}));let _=yield*i.read(r.id,l),v=new ReadableStream({start(e){e.enqueue(_),e.close()}}),y=yield*i.upload({storageId:a,size:_.byteLength,type:u,fileName:d,lastModified:0,metadata:p,flow:m},l,v),b=k(y.metadata);return D(yield*n(b.metadata?{...y,metadata:b.metadata}:y))})})})}function z({id:e,name:t,description:n,transform:i}){return c.gen(function*(){let o=yield*a;return yield*m({id:e,name:t,description:n,type:p.process,inputSchema:r,outputSchema:r,run:({data:t,storageId:n,flowId:r,jobId:a,clientId:s})=>c.gen(function*(){let c={flowId:r,nodeId:e,jobId:a},l=yield*i(yield*o.read(t.id,s),t),u=l instanceof Uint8Array?l:l.bytes,d=l instanceof Uint8Array?void 0:l.type,f=l instanceof Uint8Array?void 0:l.fileName,p=new ReadableStream({start(e){e.enqueue(u),e.close()}}),{type:m,fileName:h,metadata:g,metadataJson:_}=k(t.metadata),v=yield*o.upload({storageId:n,size:u.byteLength,type:d??m,fileName:f??h,lastModified:0,metadata:_,flow:c},s,p);return D(g?{...v,metadata:g}:v)})})})}var B=class extends s.Tag(`CredentialProvider`)(){},V=class extends s.Tag(`ImageAiPlugin`)(){},H=class extends s.Tag(`ImagePlugin`)(){};const U=u.object({serviceType:u.enum([`replicate`]).optional()}),W=u.object({quality:u.number().min(0).max(100),format:u.enum([`jpeg`,`webp`,`png`,`avif`])}),G=u.object({serviceType:u.enum([`replicate`]).optional()}),K=u.object({width:u.number().positive().optional(),height:u.number().positive().optional(),fit:u.enum([`contain`,`cover`,`fill`])}).refine(e=>e.width||e.height,`Either width or height must be specified for resize`);var q=class extends s.Tag(`ZipPlugin`)(){};const J=(e,t)=>{if(e.length===0)return t;let[n,...r]=e;return r.reduce((e,t)=>u.union([e,t]),n)};function Y(t){return c.gen(function*(){let n=Object.entries(t.nodes),r=e=>c.isEffect(e)?e:c.succeed(e),i=yield*c.forEach(n,([t,n])=>c.flatMap(r(n),n=>n.id===t?c.succeed([t,n]):c.fail(e.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Node key ${t} does not match node id ${n.id}`)})))),a=Object.fromEntries(i),o=i.map(([,e])=>e),s=i.filter(([,e])=>e.type===p.input).map(([,e])=>e.inputSchema),l=i.filter(([,e])=>e.type===p.output).map(([,e])=>e.outputSchema),d=t.inputSchema??J(s,u.unknown()),f=t.outputSchema??J(l,u.unknown()),m=t.edges.map(e=>({source:a[e.source]?.id??e.source,target:a[e.target]?.id??e.target,sourcePort:e.sourcePort,targetPort:e.targetPort}));return yield*b({flowId:t.flowId,name:t.name,nodes:o,edges:m,inputSchema:d,outputSchema:f,typeChecker:t.typeChecker,onEvent:t.onEvent,parallelExecution:t.parallelExecution})})}const X=u.object({inputs:u.record(u.string(),u.any())});export{f as A,E as C,p as D,g as E,m as O,T as S,y as T,k as _,G as a,x as b,H as c,z as d,R as f,P as g,N as h,K as i,d as j,h as k,V as l,I as m,Y as n,W as o,L as p,q as r,U as s,X as t,B as u,D as v,b as w,S as x,O as y};
2
- //# sourceMappingURL=flow-B0mMJM5Y.js.map