@uploadista/core 0.0.7 → 0.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/checksum-DgutVB-2.cjs +1 -0
- package/dist/checksum-Do_Vh-tB.mjs +2 -0
- package/dist/checksum-Do_Vh-tB.mjs.map +1 -0
- package/dist/errors/index.cjs +1 -1
- package/dist/errors/{index.d.ts → index.d.mts} +2 -2
- package/dist/errors/index.mjs +1 -0
- package/dist/flow/index.cjs +1 -1
- package/dist/flow/index.d.cts +3 -3
- package/dist/flow/index.d.mts +6 -0
- package/dist/flow/index.mjs +1 -0
- package/dist/flow-C_doYlGf.cjs +1 -0
- package/dist/flow-DEohelFR.mjs +2 -0
- package/dist/flow-DEohelFR.mjs.map +1 -0
- package/dist/{index-Dv14pVwd.d.ts → index-C2nrn_49.d.mts} +5 -5
- package/dist/{index-BswVyg4Z.d.cts.map → index-C2nrn_49.d.mts.map} +1 -1
- package/dist/{index-BQIgMrBX.d.ts → index-CsLVxsad.d.mts} +2 -2
- package/dist/index-CsLVxsad.d.mts.map +1 -0
- package/dist/{index-BOKqNaD_.d.ts → index-b891YUgl.d.cts} +97 -32
- package/dist/index-b891YUgl.d.cts.map +1 -0
- package/dist/{index-BswVyg4Z.d.cts → index-eLYBkDBH.d.cts} +4 -4
- package/dist/index-eLYBkDBH.d.cts.map +1 -0
- package/dist/{index-aQrRecmb.d.cts → index-od64jviT.d.mts} +188 -303
- package/dist/index-od64jviT.d.mts.map +1 -0
- package/dist/index.cjs +1 -1
- package/dist/index.d.cts +3 -3
- package/dist/index.d.mts +6 -0
- package/dist/index.mjs +1 -0
- package/dist/stream-limiter-Bj_iImMm.cjs +1 -0
- package/dist/stream-limiter-CEpDLQVB.mjs +2 -0
- package/dist/stream-limiter-CEpDLQVB.mjs.map +1 -0
- package/dist/streams/index.cjs +1 -1
- package/dist/streams/index.d.mts +3 -0
- package/dist/streams/index.mjs +1 -0
- package/dist/types/index.cjs +1 -1
- package/dist/types/index.d.cts +2 -2
- package/dist/types/index.d.mts +6 -0
- package/dist/types/index.mjs +1 -0
- package/dist/types-BYfvxhhG.mjs +2 -0
- package/dist/types-BYfvxhhG.mjs.map +1 -0
- package/dist/types-C80hlY_o.cjs +1 -0
- package/dist/upload/index.cjs +1 -1
- package/dist/upload/index.d.cts +2 -2
- package/dist/upload/index.d.mts +6 -0
- package/dist/upload/index.mjs +1 -0
- package/dist/upload-CcmxSO5u.mjs +2 -0
- package/dist/upload-CcmxSO5u.mjs.map +1 -0
- package/dist/upload-IZuHoQqL.cjs +1 -0
- package/dist/{uploadista-error-BOHJtDRc.cjs → uploadista-error-CQ1ADme7.cjs} +2 -2
- package/dist/{uploadista-error-kZCQLC_U.d.ts → uploadista-error-DHajuvP1.d.mts} +1 -1
- package/dist/uploadista-error-DHajuvP1.d.mts.map +1 -0
- package/dist/{uploadista-error-CDkJ_Vrc.js → uploadista-error-U9YxwNtM.mjs} +1 -1
- package/dist/uploadista-error-U9YxwNtM.mjs.map +1 -0
- package/dist/utils/index.cjs +1 -1
- package/dist/utils/index.d.cts +1 -1
- package/dist/utils/{index.d.ts → index.d.mts} +2 -2
- package/dist/utils/index.mjs +1 -0
- package/dist/{utils-BbLQplqQ.cjs → utils-C6k-Fs99.mjs} +2 -1
- package/dist/utils-C6k-Fs99.mjs.map +1 -0
- package/dist/utils-DzDTNMnq.cjs +1 -0
- package/package.json +6 -6
- package/src/flow/flow-server.ts +122 -25
- package/src/flow/flow.ts +2 -0
- package/src/types/upload-file.ts +11 -0
- package/dist/checksum-CPiON71t.cjs +0 -1
- package/dist/checksum-_Vagjoys.js +0 -2
- package/dist/checksum-_Vagjoys.js.map +0 -1
- package/dist/errors/index.js +0 -1
- package/dist/flow/index.d.ts +0 -6
- package/dist/flow/index.js +0 -1
- package/dist/flow-CSHZVjcf.js +0 -2
- package/dist/flow-CSHZVjcf.js.map +0 -1
- package/dist/flow-zlCaikPS.cjs +0 -1
- package/dist/index-BOKqNaD_.d.ts.map +0 -1
- package/dist/index-BQIgMrBX.d.ts.map +0 -1
- package/dist/index-Dv14pVwd.d.ts.map +0 -1
- package/dist/index-aQrRecmb.d.cts.map +0 -1
- package/dist/index.d.ts +0 -6
- package/dist/index.js +0 -1
- package/dist/stream-limiter-CTLrikR_.js +0 -2
- package/dist/stream-limiter-CTLrikR_.js.map +0 -1
- package/dist/stream-limiter-CaCFrKY1.cjs +0 -1
- package/dist/streams/index.d.ts +0 -3
- package/dist/streams/index.js +0 -1
- package/dist/types/index.d.ts +0 -6
- package/dist/types/index.js +0 -1
- package/dist/types-BVbqP7yA.cjs +0 -1
- package/dist/types-DqllXpuL.js +0 -2
- package/dist/types-DqllXpuL.js.map +0 -1
- package/dist/upload/index.d.ts +0 -6
- package/dist/upload/index.js +0 -1
- package/dist/upload-C_n7Smfl.js +0 -2
- package/dist/upload-C_n7Smfl.js.map +0 -1
- package/dist/upload-kFnf82ds.cjs +0 -1
- package/dist/uploadista-error-CDkJ_Vrc.js.map +0 -1
- package/dist/uploadista-error-kZCQLC_U.d.ts.map +0 -1
- package/dist/utils/index.js +0 -1
- package/dist/utils-B5sYo1z9.js +0 -2
- package/dist/utils-B5sYo1z9.js.map +0 -1
- /package/dist/{errors-8i_aMxOE.js → errors-C0zLx77t.mjs} +0 -0
- /package/dist/{errors-D-K-vxsP.cjs → errors-CRm1FHHT.cjs} +0 -0
- /package/dist/{index-BoGG5KAY.d.ts → index-DEHBdV_z.d.mts} +0 -0
- /package/dist/{streams-Bs3GDNKJ.js → streams-CJKKIAwy.mjs} +0 -0
package/src/flow/flow-server.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { Context, Effect, Layer } from "effect";
|
|
1
|
+
import { Context, Effect, Layer, Option, Runtime } from "effect";
|
|
2
2
|
import type { z } from "zod";
|
|
3
3
|
import { UploadistaError } from "../errors";
|
|
4
4
|
import {
|
|
@@ -6,6 +6,7 @@ import {
|
|
|
6
6
|
EventType,
|
|
7
7
|
type Flow,
|
|
8
8
|
type FlowData,
|
|
9
|
+
type FlowExecutionResult,
|
|
9
10
|
getFlowData,
|
|
10
11
|
runArgsSchema,
|
|
11
12
|
} from "../flow";
|
|
@@ -15,6 +16,40 @@ import type {
|
|
|
15
16
|
UploadFile,
|
|
16
17
|
WebSocketConnection,
|
|
17
18
|
} from "../types";
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* WaitUntil callback type for keeping background tasks alive.
|
|
22
|
+
* Used in serverless environments like Cloudflare Workers to prevent
|
|
23
|
+
* premature termination of background operations.
|
|
24
|
+
*
|
|
25
|
+
* @param promise - Promise representing the background task to keep alive
|
|
26
|
+
*/
|
|
27
|
+
export type WaitUntilCallback = (promise: Promise<unknown>) => void;
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Optional WaitUntil service for background task management.
|
|
31
|
+
* When provided, allows flows to execute beyond the HTTP response lifecycle.
|
|
32
|
+
*
|
|
33
|
+
* In Cloudflare Workers, use `ctx.executionCtx.waitUntil()`.
|
|
34
|
+
* In other environments, this can be undefined (flows execute normally with Effect.fork).
|
|
35
|
+
*
|
|
36
|
+
* This service uses Effect's optional service pattern. Access it via:
|
|
37
|
+
* ```typescript
|
|
38
|
+
* const waitUntil = yield* FlowWaitUntil.optional;
|
|
39
|
+
* if (Option.isSome(waitUntil)) {
|
|
40
|
+
* // Use waitUntil.value
|
|
41
|
+
* }
|
|
42
|
+
* ```
|
|
43
|
+
*
|
|
44
|
+
* @see https://effect.website/docs/requirements-management/services/#optional-services
|
|
45
|
+
*/
|
|
46
|
+
export class FlowWaitUntil extends Context.Tag("FlowWaitUntil")<
|
|
47
|
+
FlowWaitUntil,
|
|
48
|
+
WaitUntilCallback
|
|
49
|
+
>() {
|
|
50
|
+
static optional = Effect.serviceOption(FlowWaitUntil);
|
|
51
|
+
}
|
|
52
|
+
|
|
18
53
|
import { FlowEventEmitter, FlowJobKVStore } from "../types";
|
|
19
54
|
import { UploadServer } from "../upload";
|
|
20
55
|
import type { FlowEvent } from "./event";
|
|
@@ -555,7 +590,8 @@ function withFlowEvents<
|
|
|
555
590
|
const executionJobId = args.jobId || crypto.randomUUID();
|
|
556
591
|
|
|
557
592
|
const onEventCallback = createOnEventCallback(executionJobId);
|
|
558
|
-
const checkJobStatusCallback =
|
|
593
|
+
const checkJobStatusCallback =
|
|
594
|
+
createCheckJobStatusCallback(executionJobId);
|
|
559
595
|
|
|
560
596
|
// Create a new flow with the same configuration but with onEvent callback
|
|
561
597
|
const flowWithEvents = yield* createFlowWithSchema({
|
|
@@ -595,7 +631,8 @@ function withFlowEvents<
|
|
|
595
631
|
const executionJobId = args.jobId;
|
|
596
632
|
|
|
597
633
|
const onEventCallback = createOnEventCallback(executionJobId);
|
|
598
|
-
const checkJobStatusCallback =
|
|
634
|
+
const checkJobStatusCallback =
|
|
635
|
+
createCheckJobStatusCallback(executionJobId);
|
|
599
636
|
|
|
600
637
|
// Create a new flow with the same configuration but with onEvent callback
|
|
601
638
|
const flowWithEvents = yield* createFlowWithSchema({
|
|
@@ -697,13 +734,19 @@ export function createFlowServer() {
|
|
|
697
734
|
inputs: Record<string, any>;
|
|
698
735
|
}) =>
|
|
699
736
|
Effect.gen(function* () {
|
|
737
|
+
console.log(
|
|
738
|
+
`[FlowServer] executeFlowInBackground started for job: ${jobId}`,
|
|
739
|
+
);
|
|
740
|
+
|
|
700
741
|
// Update job status to running
|
|
701
742
|
yield* updateJob(jobId, {
|
|
702
743
|
status: "running",
|
|
703
744
|
});
|
|
704
745
|
|
|
746
|
+
console.log(`[FlowServer] Creating flowWithEvents for job: ${jobId}`);
|
|
705
747
|
const flowWithEvents = withFlowEvents(flow, eventEmitter, kvStore);
|
|
706
748
|
|
|
749
|
+
console.log(`[FlowServer] Running flow for job: ${jobId}`);
|
|
707
750
|
// Run the flow with the consistent jobId
|
|
708
751
|
const result = yield* flowWithEvents.run({
|
|
709
752
|
inputs,
|
|
@@ -712,6 +755,10 @@ export function createFlowServer() {
|
|
|
712
755
|
clientId,
|
|
713
756
|
});
|
|
714
757
|
|
|
758
|
+
console.log(
|
|
759
|
+
`[FlowServer] Flow completed for job: ${jobId}, result type: ${result.type}`,
|
|
760
|
+
);
|
|
761
|
+
|
|
715
762
|
// Handle result based on type
|
|
716
763
|
if (result.type === "paused") {
|
|
717
764
|
// Update job as paused (node results are in tasks, not executionState)
|
|
@@ -801,7 +848,7 @@ export function createFlowServer() {
|
|
|
801
848
|
),
|
|
802
849
|
);
|
|
803
850
|
|
|
804
|
-
|
|
851
|
+
throw error;
|
|
805
852
|
}),
|
|
806
853
|
),
|
|
807
854
|
);
|
|
@@ -831,6 +878,8 @@ export function createFlowServer() {
|
|
|
831
878
|
inputs: unknown;
|
|
832
879
|
}) =>
|
|
833
880
|
Effect.gen(function* () {
|
|
881
|
+
const waitUntil = yield* FlowWaitUntil.optional;
|
|
882
|
+
|
|
834
883
|
const parsedParams = yield* Effect.try({
|
|
835
884
|
try: () => runArgsSchema.parse({ inputs }),
|
|
836
885
|
catch: (error) =>
|
|
@@ -860,21 +909,47 @@ export function createFlowServer() {
|
|
|
860
909
|
// Get the flow and start background execution
|
|
861
910
|
const flow = yield* flowProvider.getFlow(flowId, clientId);
|
|
862
911
|
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
executeFlowInBackground({
|
|
866
|
-
jobId,
|
|
867
|
-
flow,
|
|
868
|
-
storageId,
|
|
869
|
-
clientId,
|
|
870
|
-
inputs: parsedParams.inputs,
|
|
871
|
-
}).pipe(
|
|
872
|
-
Effect.tapErrorCause((cause) =>
|
|
873
|
-
Effect.logError("Flow execution failed", cause),
|
|
874
|
-
),
|
|
875
|
-
),
|
|
912
|
+
console.log(
|
|
913
|
+
`[FlowServer] About to fork flow execution for job: ${jobId}`,
|
|
876
914
|
);
|
|
877
915
|
|
|
916
|
+
// Execute flow in background
|
|
917
|
+
// If waitUntil is provided (Cloudflare Workers), use it to keep execution alive
|
|
918
|
+
// Otherwise, use Effect.fork for standard environments
|
|
919
|
+
const flowEffect = executeFlowInBackground({
|
|
920
|
+
jobId,
|
|
921
|
+
flow,
|
|
922
|
+
storageId,
|
|
923
|
+
clientId,
|
|
924
|
+
inputs: parsedParams.inputs,
|
|
925
|
+
}).pipe(
|
|
926
|
+
Effect.tapErrorCause((cause) =>
|
|
927
|
+
Effect.logError("Flow execution failed", cause),
|
|
928
|
+
),
|
|
929
|
+
) as Effect.Effect<
|
|
930
|
+
FlowExecutionResult<Record<string, any>>,
|
|
931
|
+
UploadistaError,
|
|
932
|
+
never
|
|
933
|
+
>;
|
|
934
|
+
|
|
935
|
+
if (Option.isSome(waitUntil)) {
|
|
936
|
+
// Cloudflare Workers: Use waitUntil to keep execution alive
|
|
937
|
+
console.log(`[FlowServer] Using waitUntil for job: ${jobId}`);
|
|
938
|
+
// Get the current runtime to run the effect as a promise
|
|
939
|
+
const runtime = yield* Effect.runtime();
|
|
940
|
+
const runnable = Runtime.runPromise(runtime);
|
|
941
|
+
const promise = runnable(flowEffect);
|
|
942
|
+
waitUntil.value(promise);
|
|
943
|
+
} else {
|
|
944
|
+
// Standard environments: Fork normally
|
|
945
|
+
console.log(
|
|
946
|
+
`[FlowServer] Using Effect.forkDaemon for job: ${jobId}`,
|
|
947
|
+
);
|
|
948
|
+
yield* Effect.forkDaemon(flowEffect);
|
|
949
|
+
}
|
|
950
|
+
|
|
951
|
+
console.log(`[FlowServer] Flow execution started for job: ${jobId}`);
|
|
952
|
+
|
|
878
953
|
// Return immediately with jobId
|
|
879
954
|
return job;
|
|
880
955
|
}),
|
|
@@ -905,6 +980,8 @@ export function createFlowServer() {
|
|
|
905
980
|
clientId: string | null;
|
|
906
981
|
}) =>
|
|
907
982
|
Effect.gen(function* () {
|
|
983
|
+
const waitUntil = yield* FlowWaitUntil.optional;
|
|
984
|
+
|
|
908
985
|
// Get the current job
|
|
909
986
|
const job = yield* kvStore.get(jobId);
|
|
910
987
|
if (!job) {
|
|
@@ -1092,19 +1169,39 @@ export function createFlowServer() {
|
|
|
1092
1169
|
),
|
|
1093
1170
|
);
|
|
1094
1171
|
|
|
1095
|
-
|
|
1172
|
+
throw error;
|
|
1096
1173
|
}),
|
|
1097
1174
|
),
|
|
1098
1175
|
);
|
|
1099
1176
|
|
|
1100
|
-
// Fork the resume execution to run in background
|
|
1101
|
-
|
|
1102
|
-
|
|
1103
|
-
|
|
1104
|
-
|
|
1105
|
-
),
|
|
1177
|
+
// Fork the resume execution to run in background
|
|
1178
|
+
// Use waitUntil if available (Cloudflare Workers), otherwise fork normally
|
|
1179
|
+
const resumeEffect = resumeFlowInBackground.pipe(
|
|
1180
|
+
Effect.tapErrorCause((cause) =>
|
|
1181
|
+
Effect.logError("Flow resume failed", cause),
|
|
1106
1182
|
),
|
|
1107
|
-
)
|
|
1183
|
+
) as Effect.Effect<
|
|
1184
|
+
FlowExecutionResult<Record<string, any>>,
|
|
1185
|
+
UploadistaError,
|
|
1186
|
+
never
|
|
1187
|
+
>;
|
|
1188
|
+
|
|
1189
|
+
if (Option.isSome(waitUntil)) {
|
|
1190
|
+
// Cloudflare Workers: Use waitUntil to keep execution alive
|
|
1191
|
+
console.log(
|
|
1192
|
+
`[FlowServer] Using waitUntil for resume job: ${jobId}`,
|
|
1193
|
+
);
|
|
1194
|
+
const runtime = yield* Effect.runtime();
|
|
1195
|
+
const runnable = Runtime.runPromise(runtime);
|
|
1196
|
+
const promise = runnable(resumeEffect);
|
|
1197
|
+
waitUntil.value(promise);
|
|
1198
|
+
} else {
|
|
1199
|
+
// Standard environments: Fork normally as daemon
|
|
1200
|
+
console.log(
|
|
1201
|
+
`[FlowServer] Using Effect.forkDaemon for resume job: ${jobId}`,
|
|
1202
|
+
);
|
|
1203
|
+
yield* Effect.forkDaemon(resumeEffect);
|
|
1204
|
+
}
|
|
1108
1205
|
|
|
1109
1206
|
// Return immediately with updated job
|
|
1110
1207
|
const updatedJob = yield* kvStore.get(jobId);
|
package/src/flow/flow.ts
CHANGED
|
@@ -527,6 +527,7 @@ export function createFlowWithSchema<
|
|
|
527
527
|
// For input nodes, use the mapped flow input
|
|
528
528
|
nodeInput = nodeInputs[nodeId];
|
|
529
529
|
if (nodeInput === undefined) {
|
|
530
|
+
yield* Effect.logError(`Input node ${nodeId} has no input data`);
|
|
530
531
|
return yield* UploadistaError.fromCode("FLOW_NODE_ERROR", {
|
|
531
532
|
cause: new Error(`Input node ${nodeId} has no input data`),
|
|
532
533
|
}).toEffect();
|
|
@@ -536,6 +537,7 @@ export function createFlowWithSchema<
|
|
|
536
537
|
nodeInputsForExecution = getNodeInputs(nodeId, nodeResults);
|
|
537
538
|
|
|
538
539
|
if (Object.keys(nodeInputsForExecution).length === 0) {
|
|
540
|
+
yield* Effect.logError(`Node ${nodeId} has no input data`);
|
|
539
541
|
return yield* UploadistaError.fromCode("FLOW_NODE_ERROR", {
|
|
540
542
|
cause: new Error(`Node ${nodeId} has no input data`),
|
|
541
543
|
}).toEffect();
|
package/src/types/upload-file.ts
CHANGED
|
@@ -24,6 +24,11 @@ export const uploadFileSchema = z.object({
|
|
|
24
24
|
path: z.string().optional(),
|
|
25
25
|
uploadId: z.string().optional(),
|
|
26
26
|
bucket: z.string().optional(),
|
|
27
|
+
parts: z.array(z.object({
|
|
28
|
+
partNumber: z.number(),
|
|
29
|
+
etag: z.string(),
|
|
30
|
+
size: z.number(),
|
|
31
|
+
})).optional(),
|
|
27
32
|
}),
|
|
28
33
|
flow: z
|
|
29
34
|
.object({
|
|
@@ -49,6 +54,7 @@ export const uploadFileSchema = z.object({
|
|
|
49
54
|
* @property storage.path - Optional path prefix within the storage backend
|
|
50
55
|
* @property storage.uploadId - Optional backend-specific upload ID (e.g., S3 multipart upload ID)
|
|
51
56
|
* @property storage.bucket - Optional bucket or container name
|
|
57
|
+
* @property storage.parts - Optional array of uploaded parts (used by data stores that need to track parts locally, like R2)
|
|
52
58
|
* @property flow - Optional flow processing information (when file is part of a flow)
|
|
53
59
|
* @property flow.flowId - ID of the flow processing this file
|
|
54
60
|
* @property flow.nodeId - ID of the flow node that created this file
|
|
@@ -123,6 +129,11 @@ export type UploadFile = {
|
|
|
123
129
|
path?: string | undefined;
|
|
124
130
|
uploadId?: string | undefined;
|
|
125
131
|
bucket?: string | undefined;
|
|
132
|
+
parts?: Array<{
|
|
133
|
+
partNumber: number;
|
|
134
|
+
etag: string;
|
|
135
|
+
size: number;
|
|
136
|
+
}> | undefined;
|
|
126
137
|
};
|
|
127
138
|
flow?: {
|
|
128
139
|
flowId: string;
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
const e=require(`./uploadista-error-BOHJtDRc.cjs`);let t=require(`effect`);t=e.a(t);var n=class extends t.Context.Tag(`UploadistaGenerateIdService`)(){};const r=n.Service,i=n.of({generateId:()=>t.Effect.succeed(crypto.randomUUID())}),a=t.Layer.succeed(n,i),o=n.of({generateId:()=>t.Effect.succeed(`${Date.now()}-${Math.random().toString(36).slice(2,11)}`)}),s=[`sha256`];function c(e){return s.includes(e)}function l(n,r){return t.Effect.gen(function*(){if(!c(r))return yield*e.n.fromCode(`UNSUPPORTED_CHECKSUM_ALGORITHM`,{body:`Checksum algorithm '${r}' is not supported. Supported algorithms: ${s.join(`, `)}`,details:{algorithm:r,supportedAlgorithms:s}}).toEffect();let i=r.toUpperCase().replace(/\d+/,`-$&`),a=yield*t.Effect.tryPromise({try:()=>crypto.subtle.digest(i,n),catch:t=>e.n.fromCode(`UNKNOWN_ERROR`,{body:`Failed to compute checksum: ${t instanceof Error?t.message:`Unknown error`}`,cause:t,details:{algorithm:r}})});return Array.from(new Uint8Array(a)).map(e=>e.toString(16).padStart(2,`0`)).join(``)})}Object.defineProperty(exports,`a`,{enumerable:!0,get:function(){return i}}),Object.defineProperty(exports,`i`,{enumerable:!0,get:function(){return a}}),Object.defineProperty(exports,`n`,{enumerable:!0,get:function(){return c}}),Object.defineProperty(exports,`o`,{enumerable:!0,get:function(){return r}}),Object.defineProperty(exports,`r`,{enumerable:!0,get:function(){return n}}),Object.defineProperty(exports,`s`,{enumerable:!0,get:function(){return o}}),Object.defineProperty(exports,`t`,{enumerable:!0,get:function(){return l}});
|
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
import{n as e}from"./uploadista-error-CDkJ_Vrc.js";import{Context as t,Effect as n,Layer as r}from"effect";var i=class extends t.Tag(`UploadistaGenerateIdService`)(){};const a=i.Service,o=i.of({generateId:()=>n.succeed(crypto.randomUUID())}),s=r.succeed(i,o),c=i.of({generateId:()=>n.succeed(`${Date.now()}-${Math.random().toString(36).slice(2,11)}`)}),l=[`sha256`];function u(e){return l.includes(e)}function d(t,r){return n.gen(function*(){if(!u(r))return yield*e.fromCode(`UNSUPPORTED_CHECKSUM_ALGORITHM`,{body:`Checksum algorithm '${r}' is not supported. Supported algorithms: ${l.join(`, `)}`,details:{algorithm:r,supportedAlgorithms:l}}).toEffect();let i=r.toUpperCase().replace(/\d+/,`-$&`),a=yield*n.tryPromise({try:()=>crypto.subtle.digest(i,t),catch:t=>e.fromCode(`UNKNOWN_ERROR`,{body:`Failed to compute checksum: ${t instanceof Error?t.message:`Unknown error`}`,cause:t,details:{algorithm:r}})});return Array.from(new Uint8Array(a)).map(e=>e.toString(16).padStart(2,`0`)).join(``)})}export{o as a,s as i,u as n,a as o,i as r,c as s,d as t};
|
|
2
|
-
//# sourceMappingURL=checksum-_Vagjoys.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"checksum-_Vagjoys.js","names":[],"sources":["../src/utils/generate-id.ts","../src/utils/checksum.ts"],"sourcesContent":["import { Context, Effect, Layer } from \"effect\";\n\nexport type GenerateIdShape = {\n generateId: () => Effect.Effect<string>;\n};\n\n// Declaring a tag for a service that generates random id\nexport class GenerateId extends Context.Tag(\"UploadistaGenerateIdService\")<\n GenerateId,\n { readonly generateId: () => Effect.Effect<string> }\n>() {}\n\n/**\n * Effect-based ID generation services\n */\nexport const GenerateIdService = GenerateId.Service;\n\n/**\n * Generates a random UUID using Effect\n * @returns Effect that produces a random UUID string\n */\nexport const GenerateIdRandom = GenerateId.of({\n generateId: () => Effect.succeed(crypto.randomUUID()),\n});\n\nexport const GenerateIdLive = Layer.succeed(GenerateId, GenerateIdRandom);\n\n/**\n * Generates a timestamp-based ID using Effect\n * @returns Effect that produces a timestamp-based ID\n */\nexport const GenerateIdTimestamp = GenerateId.of({\n generateId: () =>\n Effect.succeed(`${Date.now()}-${Math.random().toString(36).slice(2, 11)}`),\n});\n","import { Effect } from \"effect\";\nimport { UploadistaError } from \"../errors/uploadista-error\";\n\n/**\n * Supported checksum algorithms\n */\nconst SUPPORTED_ALGORITHMS = [\"sha256\"] as const;\nexport type ChecksumAlgorithm = (typeof SUPPORTED_ALGORITHMS)[number];\n\n/**\n * Check if a checksum algorithm is supported\n */\nexport function isSupportedAlgorithm(algorithm: string): algorithm is ChecksumAlgorithm {\n return SUPPORTED_ALGORITHMS.includes(algorithm as ChecksumAlgorithm);\n}\n\n/**\n * Compute checksum of file bytes using the Web Crypto API.\n * This works across all modern platforms: browsers, Node.js 15+, Deno, Bun, and Cloudflare Workers.\n *\n * @param bytes - File content as Uint8Array\n * @param algorithm - Hashing algorithm to use (currently only 'sha256' is supported)\n * @returns Effect that resolves to hex-encoded checksum string\n */\nexport function computeChecksum(\n bytes: Uint8Array,\n algorithm: string,\n): Effect.Effect<string, UploadistaError> {\n return Effect.gen(function* () {\n // Validate algorithm is supported\n if (!isSupportedAlgorithm(algorithm)) {\n return yield* UploadistaError.fromCode(\"UNSUPPORTED_CHECKSUM_ALGORITHM\", {\n body: `Checksum algorithm '${algorithm}' is not supported. Supported algorithms: ${SUPPORTED_ALGORITHMS.join(\", \")}`,\n details: { algorithm, supportedAlgorithms: SUPPORTED_ALGORITHMS },\n }).toEffect();\n }\n\n // Map algorithm name to Web Crypto API algorithm name\n const webCryptoAlgorithm = algorithm.toUpperCase().replace(/\\d+/, \"-$&\"); // \"sha256\" -> \"SHA-256\"\n\n // Compute hash using Web Crypto API (available in browsers, Node.js 15+, Deno, Bun, Cloudflare Workers)\n // Pass Uint8Array directly - it's a valid BufferSource\n const hashBuffer = yield* Effect.tryPromise({\n try: () => crypto.subtle.digest(webCryptoAlgorithm, bytes as BufferSource),\n catch: (error) =>\n UploadistaError.fromCode(\"UNKNOWN_ERROR\", {\n body: `Failed to compute checksum: ${error instanceof Error ? error.message : \"Unknown error\"}`,\n cause: error,\n details: { algorithm },\n }),\n });\n\n // Convert ArrayBuffer to hex string\n const hashArray = Array.from(new Uint8Array(hashBuffer));\n const hashHex = hashArray\n .map((byte) => byte.toString(16).padStart(2, \"0\"))\n .join(\"\");\n\n return hashHex;\n });\n}\n"],"mappings":"2GAOA,IAAa,EAAb,cAAgC,EAAQ,IAAI,8BAA8B,EAGvE,AAAC,GAKJ,MAAa,EAAoB,EAAW,QAM/B,EAAmB,EAAW,GAAG,CAC5C,eAAkB,EAAO,QAAQ,OAAO,YAAY,CAAC,CACtD,CAAC,CAEW,EAAiB,EAAM,QAAQ,EAAY,EAAiB,CAM5D,EAAsB,EAAW,GAAG,CAC/C,eACE,EAAO,QAAQ,GAAG,KAAK,KAAK,CAAC,GAAG,KAAK,QAAQ,CAAC,SAAS,GAAG,CAAC,MAAM,EAAG,GAAG,GAAG,CAC7E,CAAC,CC5BI,EAAuB,CAAC,SAAS,CAMvC,SAAgB,EAAqB,EAAmD,CACtF,OAAO,EAAqB,SAAS,EAA+B,CAWtE,SAAgB,EACd,EACA,EACwC,CACxC,OAAO,EAAO,IAAI,WAAa,CAE7B,GAAI,CAAC,EAAqB,EAAU,CAClC,OAAO,MAAO,EAAgB,SAAS,iCAAkC,CACvE,KAAM,uBAAuB,EAAU,4CAA4C,EAAqB,KAAK,KAAK,GAClH,QAAS,CAAE,YAAW,oBAAqB,EAAsB,CAClE,CAAC,CAAC,UAAU,CAIf,IAAM,EAAqB,EAAU,aAAa,CAAC,QAAQ,MAAO,MAAM,CAIlE,EAAa,MAAO,EAAO,WAAW,CAC1C,QAAW,OAAO,OAAO,OAAO,EAAoB,EAAsB,CAC1E,MAAQ,GACN,EAAgB,SAAS,gBAAiB,CACxC,KAAM,+BAA+B,aAAiB,MAAQ,EAAM,QAAU,kBAC9E,MAAO,EACP,QAAS,CAAE,YAAW,CACvB,CAAC,CACL,CAAC,CAQF,OALkB,MAAM,KAAK,IAAI,WAAW,EAAW,CAAC,CAErD,IAAK,GAAS,EAAK,SAAS,GAAG,CAAC,SAAS,EAAG,IAAI,CAAC,CACjD,KAAK,GAAG,EAGX"}
|
package/dist/errors/index.js
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import{i as e,n as t,r as n,t as r}from"../uploadista-error-CDkJ_Vrc.js";import"../errors-8i_aMxOE.js";export{r as ERRORS,r as ERROR_CATALOG,t as UploadistaError,n as httpFailure,e as isUploadistaError};
|
package/dist/flow/index.d.ts
DELETED
|
@@ -1,6 +0,0 @@
|
|
|
1
|
-
import "../uploadista-error-kZCQLC_U.js";
|
|
2
|
-
import "../index-BoGG5KAY.js";
|
|
3
|
-
import { $ as FlowServerShape, A as ImageAiContext, An as FlowEventFlowStart, B as StorageParams, Bn as ConditionOperator, C as describeImageParamsSchema, Cn as waitingNodeExecution, D as resizeParamsSchema, Dn as FlowEventFlowEnd, E as ResizeParams, En as FlowEventFlowCancel, F as ExecutionLevel, Fn as FlowEventNodePause, G as createInputNode, H as storageParamsSchema, Hn as NodeType, I as ParallelScheduler, In as FlowEventNodeResponse, J as FlowProvider, K as inputDataSchema, L as ParallelSchedulerConfig, Ln as FlowEventNodeResume, M as ImageAiPluginShape, Mn as FlowEventJobStart, N as CredentialProvider, Nn as FlowEventNodeEnd, O as OptimizeParams, On as FlowEventFlowError, P as CredentialProviderShape, Pn as FlowEventNodeError, Q as FlowServerOptions, R as TransformNodeConfig, Rn as FlowEventNodeStart, S as DescribeImageParams, Sn as completeNodeExecution, T as ImagePluginShape, Tn as FlowEvent, U as InputData, Un as createFlowNode, V as createStorageNode, Vn as ConditionValue, W as InputNodeParams, Wn as getNodeData, X as FlowServer, Y as FlowProviderShape, Z as FlowServerLayer, _ as ZipParams, _n as FlowNodeData, a as FlowCondition, at as FlowJobTaskStatus, b as RemoveBackgroundParams, bn as NodeTypeMap, c as FlowPluginRequirements, cn as Flow, d as NodeDefinitionsRecord, dn as createFlowWithSchema, et as createFlowServer, f as TypedFlow, fn as getFlowData, g as ZipInput, gn as FlowNode, h as createFlow, hn as FlowConfig, i as runArgsSchema, it as FlowJobTask, j as ImageAiPlugin, jn as FlowEventJobEnd, k as optimizeParamsSchema, kn as FlowEventFlowPause, l as FlowRequirements, ln as FlowData, m as TypedFlowEdge, mn as createFlowEdge, n as resolveUploadMetadata, nt as FlowJob, o as FlowInputMap, p as TypedFlowConfig, pn as FlowEdge, q as inputNodeParamsSchema, r as RunArgs, rt as FlowJobStatus, s as FlowOutputMap, t as ResolvedUploadMetadata, tt as flowServer, u as NodeDefinition, un as FlowExecutionResult, v as ZipPlugin, vn as NodeConnectionValidator, w as ImagePlugin, wn as EventType, x as removeBackgroundParamsSchema, xn as TypeCompatibilityChecker, y as ZipPluginShape, yn as NodeExecutionResult, z as createTransformNode, zn as ConditionField } from "../index-BOKqNaD_.js";
|
|
4
|
-
import "../index-Dv14pVwd.js";
|
|
5
|
-
import "../index-BQIgMrBX.js";
|
|
6
|
-
export { ConditionField, ConditionOperator, ConditionValue, CredentialProvider, CredentialProviderShape, DescribeImageParams, EventType, ExecutionLevel, Flow, FlowCondition, FlowConfig, FlowData, FlowEdge, FlowEvent, FlowEventFlowCancel, FlowEventFlowEnd, FlowEventFlowError, FlowEventFlowPause, FlowEventFlowStart, FlowEventJobEnd, FlowEventJobStart, FlowEventNodeEnd, FlowEventNodeError, FlowEventNodePause, FlowEventNodeResponse, FlowEventNodeResume, FlowEventNodeStart, FlowExecutionResult, FlowInputMap, FlowJob, FlowJobStatus, FlowJobTask, FlowJobTaskStatus, FlowNode, FlowNodeData, FlowOutputMap, FlowPluginRequirements, FlowProvider, FlowProviderShape, FlowRequirements, FlowServer, FlowServerLayer, FlowServerOptions, FlowServerShape, ImageAiContext, ImageAiPlugin, ImageAiPluginShape, ImagePlugin, ImagePluginShape, InputData, InputNodeParams, NodeConnectionValidator, NodeDefinition, NodeDefinitionsRecord, NodeExecutionResult, NodeType, NodeTypeMap, OptimizeParams, ParallelScheduler, ParallelSchedulerConfig, RemoveBackgroundParams, ResizeParams, ResolvedUploadMetadata, RunArgs, StorageParams, TransformNodeConfig, TypeCompatibilityChecker, TypedFlow, TypedFlowConfig, TypedFlowEdge, ZipInput, ZipParams, ZipPlugin, ZipPluginShape, completeNodeExecution, createFlow, createFlowEdge, createFlowNode, createFlowServer, createFlowWithSchema, createInputNode, createStorageNode, createTransformNode, describeImageParamsSchema, flowServer, getFlowData, getNodeData, inputDataSchema, inputNodeParamsSchema, optimizeParamsSchema, removeBackgroundParamsSchema, resizeParamsSchema, resolveUploadMetadata, runArgsSchema, storageParamsSchema, waitingNodeExecution };
|
package/dist/flow/index.js
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import"../uploadista-error-CDkJ_Vrc.js";import"../errors-8i_aMxOE.js";import{A as e,C as t,D as n,E as r,O as i,S as a,T as o,_ as s,a as c,b as l,c as u,d,f,g as p,h as m,i as h,j as g,k as _,l as v,m as y,n as b,o as x,p as S,r as C,s as w,t as T,u as E,v as D,w as O,x as k,y as A}from"../flow-CSHZVjcf.js";import"../types-DqllXpuL.js";import"../upload-C_n7Smfl.js";import"../checksum-_Vagjoys.js";import"../stream-limiter-CTLrikR_.js";export{E as CredentialProvider,e as EventType,l as FlowProvider,k as FlowServer,v as ImageAiPlugin,u as ImagePlugin,n as NodeType,r as ParallelScheduler,C as ZipPlugin,D as completeNodeExecution,b as createFlow,g as createFlowEdge,i as createFlowNode,a as createFlowServer,O as createFlowWithSchema,y as createInputNode,f as createStorageNode,d as createTransformNode,w as describeImageParamsSchema,t as flowServer,o as getFlowData,_ as getNodeData,m as inputDataSchema,p as inputNodeParamsSchema,x as optimizeParamsSchema,c as removeBackgroundParamsSchema,h as resizeParamsSchema,s as resolveUploadMetadata,T as runArgsSchema,S as storageParamsSchema,A as waitingNodeExecution};
|
package/dist/flow-CSHZVjcf.js
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
import{n as e}from"./uploadista-error-CDkJ_Vrc.js";import{c as t,g as n,i as r}from"./types-DqllXpuL.js";import{a as i,n as a,o}from"./upload-C_n7Smfl.js";import{Context as s,Effect as c,Layer as l}from"effect";import{z as u}from"zod";function d({source:e,target:t,sourcePort:n,targetPort:r}){return{source:e,target:t,sourcePort:n,targetPort:r}}let f=function(e){return e.JobStart=`job-start`,e.JobEnd=`job-end`,e.FlowStart=`flow-start`,e.FlowEnd=`flow-end`,e.FlowError=`flow-error`,e.FlowPause=`flow-pause`,e.FlowCancel=`flow-cancel`,e.NodeStart=`node-start`,e.NodeEnd=`node-end`,e.NodePause=`node-pause`,e.NodeResume=`node-resume`,e.NodeError=`node-error`,e.NodeStream=`node-stream`,e.NodeResponse=`node-response`,e}({}),p=function(e){return e.input=`input`,e.process=`process`,e.output=`output`,e.conditional=`conditional`,e.multiplex=`multiplex`,e.merge=`merge`,e}({});function m({id:t,name:n,description:r,type:i,inputSchema:a,outputSchema:o,run:s,condition:l,multiInput:u=!1,multiOutput:d=!1,pausable:f=!1,retry:p}){return c.succeed({id:t,name:n,description:r,type:i,inputSchema:a,outputSchema:o,pausable:f,run:({data:r,jobId:i,flowId:l,storageId:u,clientId:d})=>c.gen(function*(){let f=yield*s({data:yield*c.try({try:()=>a.parse(r),catch:r=>{let i=r instanceof Error?r.message:String(r);return e.fromCode(`FLOW_INPUT_VALIDATION_ERROR`,{body:`Node '${n}' (${t}) input validation failed: ${i}`,cause:r})}}),jobId:i,storageId:u,flowId:l,clientId:d});return f.type===`waiting`?f:{type:`complete`,data:yield*c.try({try:()=>o.parse(f.data),catch:r=>{let i=r instanceof Error?r.message:String(r);return e.fromCode(`FLOW_OUTPUT_VALIDATION_ERROR`,{body:`Node '${n}' (${t}) output validation failed: ${i}`,cause:r})}})}}),condition:l,multiInput:u,multiOutput:d,retry:p})}const h=e=>({id:e.id,name:e.name,description:e.description,type:e.type});var g=class{maxConcurrency;constructor(e={}){this.maxConcurrency=e.maxConcurrency??4}groupNodesByExecutionLevel(e,t){let n={},r={};e.forEach(e=>{n[e.id]=[],r[e.id]=0}),t.forEach(e=>{n[e.source]?.push(e.target),r[e.target]=(r[e.target]||0)+1});let i=[],a=new Set,o=0;for(;a.size<e.length;){let e=Object.keys(r).filter(e=>r[e]===0&&!a.has(e));if(e.length===0)throw Error(`Cycle detected in flow graph - cannot execute in parallel`);i.push({level:o++,nodes:e}),e.forEach(e=>{a.add(e),delete r[e],n[e]?.forEach(e=>{r[e]!==void 0&&r[e]--})})}return i}executeNodesInParallel(e){return c.all(e.map(e=>e()),{concurrency:this.maxConcurrency})}canExecuteInParallel(e,t,n){return e.every(e=>(n[e]||[]).every(e=>t.has(e)))}getStats(){return{maxConcurrency:this.maxConcurrency}}};const _=(e,t)=>{if(e===t)return!0;try{return!!(e&&t&&typeof e==`object`&&typeof t==`object`)}catch{return!0}};var v=class{typeChecker;constructor(e=_){this.typeChecker=e}validateConnection(e,t,n){return this.getCompatibleTypes(e.outputSchema,t.inputSchema)}getCompatibleTypes(e,t){return this.typeChecker(e,t)}validateFlow(e,t){let n=[],r=new Map(e.map(e=>[e.id,e]));for(let e of t){let t=r.get(e.source),i=r.get(e.target);if(!t){n.push(`Source node ${e.source} not found`);continue}if(!i){n.push(`Target node ${e.target} not found`);continue}this.validateConnection(t,i,e)||n.push(`Schema mismatch: ${t.id} output schema incompatible with ${i.id} input schema`)}return{isValid:n.length===0,errors:n}}getExpectedInputSchemas(e,t,n){let r=new Map(t.map(e=>[e.id,e])),i={};for(let t of n)if(t.target===e){let e=r.get(t.source);if(e){let n=t.sourcePort||t.source;i[n]=e.outputSchema}}return i}getActualOutputSchemas(e,t,n){let r=new Map(t.map(e=>[e.id,e])),i={};for(let t of n)if(t.source===e){let e=r.get(t.target);if(e){let n=t.targetPort||t.target;i[n]=e.inputSchema}}return i}validateData(e,t){try{return t.parse(e),{isValid:!0,errors:[]}}catch(e){return e instanceof Error&&`errors`in e?{isValid:!1,errors:e.errors.map(e=>`${e.path.join(`.`)}: ${e.message}`)}:{isValid:!1,errors:[e instanceof Error?e.message:`Validation failed`]}}}};const y=e=>({id:e.id,name:e.name,nodes:e.nodes.map(h),edges:e.edges});function b(t){return c.gen(function*(){let n=yield*c.all(t.nodes.map(e=>c.isEffect(e)?e:c.succeed(e))),{flowId:r,name:i,onEvent:a,checkJobStatus:o,edges:s,inputSchema:l,outputSchema:d,typeChecker:p}=t,m=n,h=new v(p),_=()=>{let e={},t={},n={};return m.forEach(r=>{e[r.id]=[],n[r.id]=[],t[r.id]=0}),s.forEach(r=>{e[r.source]?.push(r.target),n[r.target]?.push(r.source),t[r.target]=(t[r.target]||0)+1}),{graph:e,reverseGraph:n,inDegree:t}},y=()=>{let{graph:e,inDegree:t}=_(),n=[],r=[];for(Object.keys(t).forEach(e=>{t[e]===0&&n.push(e)});n.length>0;){let i=n.shift();if(!i)throw Error(`No current node found`);r.push(i),e[i]?.forEach(e=>{t[e]=(t[e]||0)-1,t[e]===0&&n.push(e)})}return r},b=(e,t)=>{if(!e.condition)return c.succeed(!0);let{field:n,operator:r,value:i}=e.condition,a=t,o=a?.metadata?.[n]||a?.[n],s=(()=>{switch(r){case`equals`:return o===i;case`notEquals`:return o!==i;case`greaterThan`:return Number(o)>Number(i);case`lessThan`:return Number(o)<Number(i);case`contains`:return String(o).includes(String(i));case`startsWith`:return String(o).startsWith(String(i));default:return!0}})();return c.succeed(s)},x=(e,t)=>{let{reverseGraph:n}=_(),r=n[e]||[],i={};return r.forEach(e=>{let n=t.get(e);n!==void 0&&(i[e]=n)}),i},S=e=>{let t=m.filter(e=>e.type===`input`),n={};return t.forEach(t=>{e&&typeof e==`object`&&t.id in e&&(n[t.id]=l.parse(e[t.id]))}),n},C=e=>{let t=m.filter(e=>e.type===`output`),n={};return t.forEach(t=>{let r=e.get(t.id);r!==void 0&&(n[t.id]=r)}),n},w=(t,n,i,s,l,u,d)=>c.gen(function*(){let p=l.get(t);if(!p)return yield*e.fromCode(`FLOW_NODE_NOT_FOUND`).toEffect();if(o){let t=yield*o(u);if(t===`paused`)return yield*e.fromCode(`FLOW_PAUSED`,{cause:`Flow ${r} was paused by user at job ${u}`}).toEffect();if(t===`cancelled`)return yield*e.fromCode(`FLOW_CANCELLED`,{cause:`Flow ${r} was cancelled by user at job ${u}`}).toEffect()}a&&(yield*a({jobId:u,flowId:r,nodeId:t,eventType:f.NodeStart,nodeName:p.name,nodeType:p.type}));let m=p.retry?.maxRetries??0,h=p.retry?.retryDelay??1e3,g=p.retry?.exponentialBackoff??!0,_=0,v=null;for(;_<=m;)try{let o,c={};if(p.type===`input`){if(o=i[t],o===void 0)return yield*e.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Input node ${t} has no input data`)}).toEffect()}else{if(c=x(t,s),Object.keys(c).length===0)return yield*e.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Node ${t} has no input data`)}).toEffect();if(p.multiInput)o=c;else{let n=Object.keys(c)[0];if(!n)return yield*e.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Node ${t} has no input data`)}).toEffect();o=c[n]}}if(p.type===`conditional`&&!(yield*b(p,o)))return a&&(yield*a({jobId:u,flowId:r,nodeId:t,eventType:f.NodeEnd,nodeName:p.name})),{nodeId:t,result:o,success:!0,waiting:!1};let l=yield*p.run({data:o,inputs:c,jobId:u,flowId:r,storageId:n,clientId:d});if(l.type===`waiting`){let e=l.partialData;return a&&(yield*a({jobId:u,flowId:r,nodeId:t,eventType:f.NodePause,nodeName:p.name,partialData:e})),{nodeId:t,result:e,success:!0,waiting:!0}}let m=l.data;return a&&(yield*a({jobId:u,flowId:r,nodeId:t,eventType:f.NodeEnd,nodeName:p.name,result:m})),{nodeId:t,result:m,success:!0,waiting:!1}}catch(n){if(v=n instanceof e?n:e.fromCode(`FLOW_NODE_ERROR`,{cause:n}),_<m){_++;let e=g?h*2**(_-1):h;yield*c.logWarning(`Node ${t} (${p.name}) failed, retrying (${_}/${m}) after ${e}ms`),yield*c.sleep(e);continue}return a&&(yield*a({jobId:u,flowId:r,nodeId:t,eventType:f.NodeError,nodeName:p.name,error:v.body,retryCount:_})),yield*v.toEffect()}return v?yield*v.toEffect():yield*e.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Unexpected error in retry loop`)}).toEffect()}),T=({inputs:n,storageId:i,jobId:o,resumeFrom:l,clientId:p})=>c.gen(function*(){!l&&a&&(yield*a({jobId:o,eventType:f.FlowStart,flowId:r}));let h=S(n||{}),_,v,b;if(l)_=l.executionOrder,v=l.nodeResults,b=l.currentIndex;else if(_=y(),v=new Map,b=0,_.length!==m.length)return yield*e.fromCode(`FLOW_CYCLE_ERROR`).toEffect();let x=new Map(m.map(e=>[e.id,e]));if(t.parallelExecution?.enabled??!1){yield*c.logDebug(`Flow ${r}: Executing in parallel mode (maxConcurrency: ${t.parallelExecution?.maxConcurrency??4})`);let e=new g({maxConcurrency:t.parallelExecution?.maxConcurrency??4}),n=e.groupNodesByExecutionLevel(m,s);yield*c.logDebug(`Flow ${r}: Grouped nodes into ${n.length} execution levels`);let u={};m.forEach(e=>{u[e.id]=[]}),s.forEach(e=>{u[e.target]?.push(e.source)});for(let t of n){yield*c.logDebug(`Flow ${r}: Executing level ${t.level} with nodes: ${t.nodes.join(`, `)}`);let n=t.nodes.map(e=>()=>c.gen(function*(){if(l&&e===l.executionOrder[b]&&a){let t=x.get(e);t&&(yield*a({jobId:o,flowId:r,nodeId:e,eventType:f.NodeResume,nodeName:t.name,nodeType:t.type}))}return{nodeId:e,nodeResult:yield*w(e,i,h,v,x,o,p)}})),s=yield*e.executeNodesInParallel(n);for(let{nodeId:e,nodeResult:t}of s){if(t.waiting)return t.result!==void 0&&v.set(e,t.result),{type:`paused`,nodeId:e,executionState:{executionOrder:_,currentIndex:_.indexOf(e),inputs:h}};t.success&&v.set(e,t.result)}}}else{yield*c.logDebug(`Flow ${r}: Executing in sequential mode`);for(let t=b;t<_.length;t++){let n=_[t];if(!n)return yield*e.fromCode(`FLOW_NODE_NOT_FOUND`).toEffect();if(l&&t===b&&a){let e=x.get(n);e&&(yield*a({jobId:o,flowId:r,nodeId:n,eventType:f.NodeResume,nodeName:e.name,nodeType:e.type}))}let s=yield*w(n,i,h,v,x,o,p);if(s.waiting)return s.result!==void 0&&v.set(s.nodeId,s.result),{type:`paused`,nodeId:s.nodeId,executionState:{executionOrder:_,currentIndex:t,inputs:h}};s.success&&v.set(s.nodeId,s.result)}}let T=C(v),E=u.record(u.string(),d).safeParse(T);if(!E.success){let t=`Flow output validation failed: ${E.error.message}. Expected outputs: ${JSON.stringify(Object.keys(C(v)))}. Output nodes: ${m.filter(e=>e.type===`output`).map(e=>e.id).join(`, `)}`;return a&&(yield*a({jobId:o,eventType:f.FlowError,flowId:r,error:t})),yield*e.fromCode(`FLOW_OUTPUT_VALIDATION_ERROR`,{body:t,cause:E.error}).toEffect()}let D=E.data;return a&&(yield*a({jobId:o,eventType:f.FlowEnd,flowId:r,result:D})),{type:`completed`,result:D}});return{id:r,name:i,nodes:m,edges:s,inputSchema:l,outputSchema:d,onEvent:a,checkJobStatus:o,run:({inputs:e,storageId:t,jobId:n,clientId:r})=>T({inputs:e,storageId:t,jobId:n,clientId:r}),resume:({jobId:e,storageId:t,nodeResults:n,executionState:r,clientId:i})=>T({inputs:r.inputs,storageId:t,jobId:e,resumeFrom:{executionOrder:r.executionOrder,nodeResults:new Map(Object.entries(n)),currentIndex:r.currentIndex},clientId:i}),validateTypes:()=>{let e=m;return h.validateFlow(e,s)},validateInputs:e=>h.validateData(e,l),validateOutputs:e=>h.validateData(e,d)}})}var x=class extends s.Tag(`FlowProvider`)(){},S=class extends s.Tag(`FlowServer`)(){};const C=e=>typeof e==`object`&&!!e&&`id`in e;function w(t,n,r){let i=e=>{let i=t=>c.gen(function*(){let n=yield*r.get(e);n&&(yield*r.set(e,{...n,...t,updatedAt:new Date}))});return a=>c.gen(function*(){switch(t.onEvent&&(yield*c.catchAll(t.onEvent(a),e=>(c.logError(`Original onEvent failed`,e),c.succeed({eventId:null})))),yield*n.emit(e,a),c.logInfo(`Updating job ${e} with event ${a.eventType}`),a.eventType){case f.FlowStart:yield*i({status:`running`});break;case f.FlowEnd:break;case f.FlowError:yield*i({status:`failed`,error:a.error});break;case f.NodeStart:yield*c.gen(function*(){let t=yield*r.get(e);if(t){let n=t.tasks.find(e=>e.nodeId===a.nodeId)?t.tasks.map(e=>e.nodeId===a.nodeId?{...e,status:`running`,updatedAt:new Date}:e):[...t.tasks,{nodeId:a.nodeId,status:`running`,createdAt:new Date,updatedAt:new Date}];yield*r.set(e,{...t,tasks:n,updatedAt:new Date})}});break;case f.NodePause:yield*c.gen(function*(){let t=yield*r.get(e);if(t){let n=t.tasks.find(e=>e.nodeId===a.nodeId)?t.tasks.map(e=>e.nodeId===a.nodeId?{...e,status:`paused`,result:a.partialData,updatedAt:new Date}:e):[...t.tasks,{nodeId:a.nodeId,status:`paused`,result:a.partialData,createdAt:new Date,updatedAt:new Date}];yield*r.set(e,{...t,tasks:n,updatedAt:new Date})}});break;case f.NodeResume:yield*c.gen(function*(){let t=yield*r.get(e);if(t){let n=t.tasks.map(e=>e.nodeId===a.nodeId?{...e,status:`running`,updatedAt:new Date}:e);yield*r.set(e,{...t,tasks:n,updatedAt:new Date})}});break;case f.NodeEnd:yield*c.gen(function*(){let n=yield*r.get(e);if(n){let i=n.tasks.map(e=>e.nodeId===a.nodeId?{...e,status:`completed`,result:a.result,updatedAt:new Date}:e),o=t.nodes.find(e=>e.id===a.nodeId)?.type===`output`,s=a.result,c=n.intermediateFiles||[];o&&C(s)&&s.id?c=c.filter(e=>e!==s.id):!o&&C(s)&&s.id&&(c.includes(s.id)||c.push(s.id)),yield*r.set(e,{...n,tasks:i,intermediateFiles:c,updatedAt:new Date})}});break;case f.NodeError:yield*c.gen(function*(){let t=yield*r.get(e);if(t){let n=t.tasks.map(e=>e.nodeId===a.nodeId?{...e,status:`failed`,error:a.error,retryCount:a.retryCount,updatedAt:new Date}:e);yield*r.set(e,{...t,tasks:n,error:a.error,updatedAt:new Date})}});break}return{eventId:e}})},a=t=>t=>c.gen(function*(){let n=yield*r.get(t);return n?n.status===`paused`?`paused`:n.status===`cancelled`?`cancelled`:`running`:yield*c.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found`}))});return{...t,run:e=>c.gen(function*(){let n=e.jobId||crypto.randomUUID(),r=i(n),o=a(n);return yield*(yield*b({flowId:t.id,name:t.name,nodes:t.nodes,edges:t.edges,inputSchema:t.inputSchema,outputSchema:t.outputSchema,onEvent:r,checkJobStatus:o})).run({...e,jobId:n,clientId:e.clientId})}),resume:e=>c.gen(function*(){let n=e.jobId,r=i(n),o=a(n);return yield*(yield*b({flowId:t.id,name:t.name,nodes:t.nodes,edges:t.edges,inputSchema:t.inputSchema,outputSchema:t.outputSchema,onEvent:r,checkJobStatus:o})).resume(e)})}}function T(){return c.gen(function*(){let r=yield*x,i=yield*n,o=yield*t,s=yield*a,l=(t,n)=>c.gen(function*(){let r=yield*o.get(t);return r?yield*o.set(t,{...r,...n}):yield*c.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found`}))}),u=(e,t)=>c.gen(function*(){let n=yield*o.get(e);!n||!n.intermediateFiles||n.intermediateFiles.length===0||(yield*c.logInfo(`Cleaning up ${n.intermediateFiles.length} intermediate files for job ${e}`),yield*c.all(n.intermediateFiles.map(e=>c.gen(function*(){yield*s.delete(e,t),yield*c.logDebug(`Deleted intermediate file ${e}`)}).pipe(c.catchAll(t=>c.gen(function*(){return yield*c.logWarning(`Failed to delete intermediate file ${e}: ${t}`),c.succeed(void 0)})))),{concurrency:5}),yield*l(e,{intermediateFiles:[]}))}),d=({jobId:t,flow:n,storageId:r,clientId:a,inputs:s})=>c.gen(function*(){yield*l(t,{status:`running`});let e=yield*w(n,i,o).run({inputs:s,storageId:r,jobId:t,clientId:a});return e.type===`paused`?yield*l(t,{status:`paused`,pausedAt:e.nodeId,executionState:e.executionState,updatedAt:new Date}):(yield*l(t,{status:`completed`,result:e.result,updatedAt:new Date,endedAt:new Date}),yield*u(t,a)),e}).pipe(c.catchAll(n=>c.gen(function*(){yield*c.logError(`Flow execution failed`,n);let r=n instanceof e?n.body:String(n);yield*c.logInfo(`Updating job ${t} to failed status with error: ${r}`),yield*l(t,{status:`failed`,error:r,updatedAt:new Date}).pipe(c.catchAll(e=>c.gen(function*(){return yield*c.logError(`Failed to update job ${t}`,e),c.succeed(void 0)})));let s=yield*o.get(t);return s&&(yield*i.emit(t,{jobId:t,eventType:f.FlowError,flowId:s.flowId,error:r}).pipe(c.catchAll(e=>c.gen(function*(){return yield*c.logError(`Failed to emit FlowError event for job ${t}`,e),c.succeed(void 0)})))),yield*u(t,a).pipe(c.catchAll(e=>c.gen(function*(){return yield*c.logWarning(`Failed to cleanup intermediate files for job ${t}`,e),c.succeed(void 0)}))),c.fail(n)})));return{getFlow:(e,t)=>c.gen(function*(){return yield*r.getFlow(e,t)}),getFlowData:(e,t)=>c.gen(function*(){return y(yield*r.getFlow(e,t))}),runFlow:({flowId:t,storageId:n,clientId:i,inputs:a})=>c.gen(function*(){let s=yield*c.try({try:()=>X.parse({inputs:a}),catch:t=>e.fromCode(`FLOW_INPUT_VALIDATION_ERROR`,{cause:t})}),l=crypto.randomUUID(),u=new Date,f={id:l,flowId:t,storageId:n,clientId:i,status:`started`,createdAt:u,updatedAt:u,tasks:[]};yield*o.set(l,f);let p=yield*r.getFlow(t,i);return yield*c.forkDaemon(d({jobId:l,flow:p,storageId:n,clientId:i,inputs:s.inputs}).pipe(c.tapErrorCause(e=>c.logError(`Flow execution failed`,e)))),f}),getJobStatus:t=>c.gen(function*(){return(yield*o.get(t))||(yield*c.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found`})))}),resumeFlow:({jobId:t,nodeId:n,newData:a,clientId:s})=>c.gen(function*(){let d=yield*o.get(t);if(!d)return console.error(`Job not found`),yield*c.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found`}));if(d.status!==`paused`)return console.error(`Job is not paused`),yield*c.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${t} is not paused (status: ${d.status})`}));if(d.pausedAt!==n)return console.error(`Job is not paused at the expected node`),yield*c.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${t} is paused at node ${d.pausedAt}, not ${n}`}));if(!d.executionState)return console.error(`Job has no execution state`),yield*c.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${t} has no execution state`}));let p={...d.tasks.reduce((e,t)=>(t.result!==void 0&&(e[t.nodeId]=t.result),e),{}),[n]:a},m={...d.executionState.inputs,[n]:a};yield*l(t,{status:`running`});let h=yield*r.getFlow(d.flowId,d.clientId),g=c.gen(function*(){let n=w(h,i,o);if(!d.executionState)return yield*c.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${t} has no execution state`}));let r=yield*n.resume({jobId:t,storageId:d.storageId,nodeResults:p,executionState:{...d.executionState,inputs:m},clientId:d.clientId});return r.type===`paused`?yield*l(t,{status:`paused`,pausedAt:r.nodeId,executionState:r.executionState,updatedAt:new Date}):(yield*l(t,{status:`completed`,pausedAt:void 0,executionState:void 0,result:r.result,updatedAt:new Date,endedAt:new Date}),yield*u(t,s)),r}).pipe(c.catchAll(n=>c.gen(function*(){yield*c.logError(`Flow resume failed`,n);let r=n instanceof e?n.body:String(n);yield*c.logInfo(`Updating job ${t} to failed status with error: ${r}`),yield*l(t,{status:`failed`,error:r,updatedAt:new Date}).pipe(c.catchAll(e=>c.gen(function*(){return yield*c.logError(`Failed to update job ${t}`,e),c.succeed(void 0)})));let a=yield*o.get(t);return a&&(yield*i.emit(t,{jobId:t,eventType:f.FlowError,flowId:a.flowId,error:r}).pipe(c.catchAll(e=>c.gen(function*(){return yield*c.logError(`Failed to emit FlowError event for job ${t}`,e),c.succeed(void 0)})))),yield*u(t,s).pipe(c.catchAll(e=>c.gen(function*(){return yield*c.logWarning(`Failed to cleanup intermediate files for job ${t}`,e),c.succeed(void 0)}))),c.fail(n)})));return yield*c.forkDaemon(g.pipe(c.tapErrorCause(e=>c.logError(`Flow resume failed`,e)))),(yield*o.get(t))||(yield*c.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found after update`})))}),pauseFlow:(t,n)=>c.gen(function*(){let r=yield*o.get(t);if(!r)return yield*c.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found`}));if(n!==null&&r.clientId!==n)return yield*c.fail(e.fromCode(`FLOW_NOT_AUTHORIZED`,{cause:`Client ${n} is not authorized to pause job ${t}`}));if(r.status!==`running`)return yield*c.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${t} cannot be paused (current status: ${r.status})`}));let a=r.tasks.find(e=>e.status===`running`)?.nodeId;return yield*l(t,{status:`paused`,pausedAt:a,updatedAt:new Date}),yield*i.emit(t,{jobId:t,flowId:r.flowId,eventType:f.FlowPause,pausedAt:a}),(yield*o.get(t))||(yield*c.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found after pause`})))}),cancelFlow:(t,n)=>c.gen(function*(){let r=yield*o.get(t);return r?n!==null&&r.clientId!==n?yield*c.fail(e.fromCode(`FLOW_NOT_AUTHORIZED`,{cause:`Client ${n} is not authorized to cancel job ${t}`})):r.status!==`running`&&r.status!==`paused`&&r.status!==`started`?yield*c.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${t} cannot be cancelled (current status: ${r.status})`})):(yield*l(t,{status:`cancelled`,updatedAt:new Date,endedAt:new Date}),yield*i.emit(t,{jobId:t,flowId:r.flowId,eventType:f.FlowCancel}),yield*u(t,n),(yield*o.get(t))||(yield*c.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found after cancellation`})))):yield*c.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found`}))}),subscribeToFlowEvents:(e,t)=>c.gen(function*(){yield*i.subscribe(e,t)}),unsubscribeFromFlowEvents:e=>c.gen(function*(){yield*i.unsubscribe(e)})}})}const E=l.effect(S,T()),D=e=>({type:`complete`,data:e}),O=e=>({type:`waiting`,partialData:e});function k(e){if(!e)return{type:``,fileName:``,metadata:void 0,metadataJson:void 0};let t={...e},n=String(t.type||t.mimeType||t[`content-type`]||``);n&&(t.type||=n,t.mimeType||=n);let r=String(t.fileName||t.originalName||t.name||``);return r&&(t.fileName||=r,t.originalName||=r,t.name||=r),{type:n,fileName:r,metadata:t,metadataJson:JSON.stringify(t)}}const A=u.object({operation:u.literal(`init`),storageId:u.string(),metadata:u.record(u.string(),u.any()).optional()}),j=u.object({operation:u.literal(`finalize`),uploadId:u.string()}),M=u.object({operation:u.literal(`url`),url:u.string(),storageId:u.string().optional(),metadata:u.record(u.string(),u.any()).optional()}),N=u.union([A,j,M]),P=u.object({allowedMimeTypes:u.array(u.string()).optional(),minSize:u.number().positive().optional(),maxSize:u.number().positive().optional()});function F(t,n){return c.gen(function*(){if(n){if(n.allowedMimeTypes&&n.allowedMimeTypes.length>0&&!n.allowedMimeTypes.some(e=>{if(e.endsWith(`/*`)){let n=e.slice(0,-2);return t.type.startsWith(n)}return t.type===e}))throw yield*e.fromCode(`VALIDATION_ERROR`,{cause:Error(`File type "${t.type}" is not allowed. Allowed types: ${n.allowedMimeTypes.join(`, `)}`)}).toEffect();if(n.minSize!==void 0&&t.size<n.minSize)throw yield*e.fromCode(`VALIDATION_ERROR`,{cause:Error(`File size (${t.size} bytes) is below minimum (${n.minSize} bytes)`)}).toEffect();if(n.maxSize!==void 0&&t.size>n.maxSize)throw yield*e.fromCode(`VALIDATION_ERROR`,{cause:Error(`File size (${t.size} bytes) exceeds maximum (${n.maxSize} bytes)`)}).toEffect()}})}function I(t,n){return c.gen(function*(){let s=yield*a;return yield*m({id:t,name:`Input`,description:`Handles file input through multiple methods - streaming upload (init/finalize) or direct URL fetch`,type:p.input,inputSchema:N,outputSchema:r,run:({data:r,flowId:a,jobId:l,clientId:u})=>c.gen(function*(){switch(r.operation){case`init`:{let e={storageId:r.storageId,size:r.metadata?.size||0,type:r.metadata?.mimeType||`application/octet-stream`,fileName:r.metadata?.originalName,lastModified:r.metadata?.size?Date.now():void 0,metadata:r.metadata?JSON.stringify(r.metadata):void 0,flow:{flowId:a,nodeId:t,jobId:l}};return O(yield*s.createUpload(e,u))}case`finalize`:{let e=yield*s.getUpload(r.uploadId),{type:t}=k(e.metadata);return yield*F({type:t,size:e.size||0},n),D(e)}case`url`:{let e=yield*o(r.url),c=yield*i(e),d=r.metadata?.mimeType||e.headers.get(`content-type`)||`application/octet-stream`,f=r.metadata?.size||Number(e.headers.get(`content-length`)||0),p=r.metadata?.originalName||r.url.split(`/`).pop()||`file`;yield*F({type:d,size:f},n);let m=new ReadableStream({start(e){e.enqueue(new Uint8Array(c)),e.close()}}),h={storageId:r.storageId||`buffer`,size:f,type:d,fileName:p,lastModified:Date.now(),metadata:r.metadata?JSON.stringify(r.metadata):void 0};return D({...yield*s.upload(h,u,m),flow:{flowId:a,nodeId:t,jobId:l}})}default:throw yield*e.fromCode(`VALIDATION_ERROR`,{cause:Error(`Invalid operation`)}).toEffect()}})})})}const L=u.object({});function R(t,n=e=>c.succeed(e)){return c.gen(function*(){let i=yield*a;return yield*m({id:t,name:`Storage`,description:`Stores a file in the storage`,type:p.output,inputSchema:r,outputSchema:r,run:({data:r,storageId:a,flowId:o,jobId:s,clientId:l})=>c.gen(function*(){let{type:u,fileName:d,metadata:f,metadataJson:p}=k(r.metadata),m={flowId:o,nodeId:t,jobId:s},h=f?{...r,metadata:f}:r,g=yield*i.getUpload(r.id);if(!g.id)return yield*c.fail(e.fromCode(`FILE_READ_ERROR`,Error(`Upload Key is undefined`)));if(g.storage.id===a)return D(yield*n({...h,flow:m}));let _=yield*i.read(r.id,l),v=new ReadableStream({start(e){e.enqueue(_),e.close()}}),y=yield*i.upload({storageId:a,size:_.byteLength,type:u,fileName:d,lastModified:0,metadata:p,flow:m},l,v),b=k(y.metadata);return D(yield*n(b.metadata?{...y,metadata:b.metadata}:y))})})})}function z({id:e,name:t,description:n,transform:i}){return c.gen(function*(){let o=yield*a;return yield*m({id:e,name:t,description:n,type:p.process,inputSchema:r,outputSchema:r,run:({data:t,storageId:n,flowId:r,jobId:a,clientId:s})=>c.gen(function*(){let c={flowId:r,nodeId:e,jobId:a},l=yield*i(yield*o.read(t.id,s),t),u=l instanceof Uint8Array?l:l.bytes,d=l instanceof Uint8Array?void 0:l.type,f=l instanceof Uint8Array?void 0:l.fileName,p=new ReadableStream({start(e){e.enqueue(u),e.close()}}),{type:m,fileName:h,metadata:g,metadataJson:_}=k(t.metadata),v=yield*o.upload({storageId:n,size:u.byteLength,type:d??m,fileName:f??h,lastModified:0,metadata:_,flow:c},s,p);return D(g?{...v,metadata:g}:v)})})})}var B=class extends s.Tag(`CredentialProvider`)(){},V=class extends s.Tag(`ImageAiPlugin`)(){},H=class extends s.Tag(`ImagePlugin`)(){};const U=u.object({serviceType:u.enum([`replicate`]).optional()}),W=u.object({quality:u.number().min(0).max(100),format:u.enum([`jpeg`,`webp`,`png`,`avif`])}),G=u.object({serviceType:u.enum([`replicate`]).optional()}),K=u.object({width:u.number().positive().optional(),height:u.number().positive().optional(),fit:u.enum([`contain`,`cover`,`fill`])}).refine(e=>e.width||e.height,`Either width or height must be specified for resize`);var q=class extends s.Tag(`ZipPlugin`)(){};const J=(e,t)=>{if(e.length===0)return t;let[n,...r]=e;return r.reduce((e,t)=>u.union([e,t]),n)};function Y(t){return c.gen(function*(){let n=Object.entries(t.nodes),r=e=>c.isEffect(e)?e:c.succeed(e),i=yield*c.forEach(n,([t,n])=>c.flatMap(r(n),n=>n.id===t?c.succeed([t,n]):c.fail(e.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Node key ${t} does not match node id ${n.id}`)})))),a=Object.fromEntries(i),o=i.map(([,e])=>e),s=i.filter(([,e])=>e.type===p.input).map(([,e])=>e.inputSchema),l=i.filter(([,e])=>e.type===p.output).map(([,e])=>e.outputSchema),d=t.inputSchema??J(s,u.unknown()),f=t.outputSchema??J(l,u.unknown()),m=t.edges.map(e=>({source:a[e.source]?.id??e.source,target:a[e.target]?.id??e.target,sourcePort:e.sourcePort,targetPort:e.targetPort}));return yield*b({flowId:t.flowId,name:t.name,nodes:o,edges:m,inputSchema:d,outputSchema:f,typeChecker:t.typeChecker,onEvent:t.onEvent,parallelExecution:t.parallelExecution})})}const X=u.object({inputs:u.record(u.string(),u.any())});export{f as A,E as C,p as D,g as E,m as O,T as S,y as T,k as _,G as a,x as b,H as c,z as d,R as f,P as g,N as h,K as i,d as j,h as k,V as l,I as m,Y as n,W as o,L as p,q as r,U as s,X as t,B as u,D as v,b as w,S as x,O as y};
|
|
2
|
-
//# sourceMappingURL=flow-CSHZVjcf.js.map
|