@uploadista/core 0.0.18-beta.2 → 0.0.18-beta.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/flow/index.cjs +1 -1
- package/dist/flow/index.d.cts +2 -2
- package/dist/flow/index.d.mts +2 -2
- package/dist/flow/index.mjs +1 -1
- package/dist/flow-NBmCjMqI.mjs +2 -0
- package/dist/flow-NBmCjMqI.mjs.map +1 -0
- package/dist/flow-yTdzjQ0k.cjs +1 -0
- package/dist/{index-DiHUjE9t.d.mts → index-BJxZyw0A.d.mts} +550 -195
- package/dist/index-BJxZyw0A.d.mts.map +1 -0
- package/dist/index-CoAMCnm6.d.cts.map +1 -1
- package/dist/{index-D5ALjvAb.d.cts → index-uOAh_6qk.d.cts} +550 -195
- package/dist/index-uOAh_6qk.d.cts.map +1 -0
- package/dist/index.cjs +1 -1
- package/dist/index.d.cts +2 -2
- package/dist/index.d.mts +2 -2
- package/dist/index.mjs +1 -1
- package/dist/testing/index.cjs +1 -1
- package/dist/testing/index.d.cts +1 -1
- package/dist/testing/index.d.mts +1 -1
- package/dist/testing/index.mjs +1 -1
- package/dist/types/index.d.cts +1 -1
- package/dist/types/index.d.mts +1 -1
- package/dist/upload/index.d.cts +1 -1
- package/dist/upload/index.d.mts +1 -1
- package/package.json +5 -2
- package/src/flow/index.ts +2 -0
- package/src/flow/nodes/transform-node.ts +43 -2
- package/src/flow/types/flow-types.ts +148 -0
- package/src/flow/utils/file-naming.ts +308 -0
- package/dist/flow-CAywogte.mjs +0 -2
- package/dist/flow-CAywogte.mjs.map +0 -1
- package/dist/flow-D7QeEZVs.cjs +0 -1
- package/dist/index-D5ALjvAb.d.cts.map +0 -1
- package/dist/index-DiHUjE9t.d.mts.map +0 -1
|
@@ -551,6 +551,154 @@ export type FlowConfig<
|
|
|
551
551
|
};
|
|
552
552
|
};
|
|
553
553
|
|
|
554
|
+
// ============================================================================
|
|
555
|
+
// File Naming Types
|
|
556
|
+
// ============================================================================
|
|
557
|
+
|
|
558
|
+
/**
|
|
559
|
+
* Context provided to file naming functions and templates.
|
|
560
|
+
*
|
|
561
|
+
* Contains all relevant information about the current file, node, and flow
|
|
562
|
+
* execution that can be used to generate dynamic file names.
|
|
563
|
+
*
|
|
564
|
+
* @property baseName - Filename without extension (e.g., "photo" from "photo.jpg")
|
|
565
|
+
* @property extension - File extension without dot (e.g., "jpg")
|
|
566
|
+
* @property fileName - Full original filename (e.g., "photo.jpg")
|
|
567
|
+
* @property nodeType - Type of processing node (e.g., "resize", "optimize")
|
|
568
|
+
* @property nodeId - Specific node instance ID
|
|
569
|
+
* @property flowId - Flow identifier
|
|
570
|
+
* @property jobId - Execution job ID
|
|
571
|
+
* @property timestamp - ISO 8601 timestamp of processing
|
|
572
|
+
* @property width - Output width (image/video nodes only)
|
|
573
|
+
* @property height - Output height (image/video nodes only)
|
|
574
|
+
* @property format - Output format (e.g., "webp", "mp4")
|
|
575
|
+
* @property quality - Quality setting (e.g., 80)
|
|
576
|
+
*
|
|
577
|
+
* @example
|
|
578
|
+
* ```typescript
|
|
579
|
+
* // Available in templates as {{variable}}
|
|
580
|
+
* const pattern = "{{baseName}}-{{width}}x{{height}}.{{extension}}";
|
|
581
|
+
* // Result: "photo-800x600.jpg"
|
|
582
|
+
* ```
|
|
583
|
+
*/
|
|
584
|
+
export type NamingContext = {
|
|
585
|
+
/** Filename without extension */
|
|
586
|
+
baseName: string;
|
|
587
|
+
/** File extension without dot */
|
|
588
|
+
extension: string;
|
|
589
|
+
/** Full original filename */
|
|
590
|
+
fileName: string;
|
|
591
|
+
/** Type of processing node */
|
|
592
|
+
nodeType: string;
|
|
593
|
+
/** Specific node instance ID */
|
|
594
|
+
nodeId: string;
|
|
595
|
+
/** Flow identifier */
|
|
596
|
+
flowId: string;
|
|
597
|
+
/** Execution job ID */
|
|
598
|
+
jobId: string;
|
|
599
|
+
/** ISO 8601 timestamp of processing */
|
|
600
|
+
timestamp: string;
|
|
601
|
+
/** Output width (image/video nodes) */
|
|
602
|
+
width?: number;
|
|
603
|
+
/** Output height (image/video nodes) */
|
|
604
|
+
height?: number;
|
|
605
|
+
/** Output format */
|
|
606
|
+
format?: string;
|
|
607
|
+
/** Quality setting */
|
|
608
|
+
quality?: number;
|
|
609
|
+
/** Page number (document nodes) */
|
|
610
|
+
pageNumber?: number;
|
|
611
|
+
/** Additional custom variables */
|
|
612
|
+
[key: string]: string | number | undefined;
|
|
613
|
+
};
|
|
614
|
+
|
|
615
|
+
/**
|
|
616
|
+
* Function type for custom file naming logic.
|
|
617
|
+
*
|
|
618
|
+
* @param file - The UploadFile being processed
|
|
619
|
+
* @param context - Naming context with all available variables
|
|
620
|
+
* @returns The new filename (including extension)
|
|
621
|
+
*
|
|
622
|
+
* @example
|
|
623
|
+
* ```typescript
|
|
624
|
+
* const customRename: FileNamingFunction = (file, ctx) =>
|
|
625
|
+
* `${ctx.flowId}-${ctx.baseName}-${ctx.timestamp}.${ctx.extension}`;
|
|
626
|
+
* ```
|
|
627
|
+
*/
|
|
628
|
+
export type FileNamingFunction = (
|
|
629
|
+
file: UploadFile,
|
|
630
|
+
context: NamingContext,
|
|
631
|
+
) => string;
|
|
632
|
+
|
|
633
|
+
/**
|
|
634
|
+
* Function type for generating auto-naming suffixes.
|
|
635
|
+
*
|
|
636
|
+
* Each node type can define its own auto suffix generator that creates
|
|
637
|
+
* a descriptive suffix based on the processing parameters.
|
|
638
|
+
*
|
|
639
|
+
* @param context - Naming context with all available variables
|
|
640
|
+
* @returns The suffix to append (without leading dash)
|
|
641
|
+
*
|
|
642
|
+
* @example
|
|
643
|
+
* ```typescript
|
|
644
|
+
* // Resize node auto suffix
|
|
645
|
+
* const resizeAutoSuffix: AutoNamingSuffixGenerator = (ctx) =>
|
|
646
|
+
* `${ctx.width}x${ctx.height}`;
|
|
647
|
+
* // Result: "photo-800x600.jpg"
|
|
648
|
+
*
|
|
649
|
+
* // Optimize node auto suffix
|
|
650
|
+
* const optimizeAutoSuffix: AutoNamingSuffixGenerator = (ctx) =>
|
|
651
|
+
* ctx.format ?? 'optimized';
|
|
652
|
+
* // Result: "photo-webp.webp"
|
|
653
|
+
* ```
|
|
654
|
+
*/
|
|
655
|
+
export type AutoNamingSuffixGenerator = (context: NamingContext) => string;
|
|
656
|
+
|
|
657
|
+
/**
|
|
658
|
+
* Configuration for file naming behavior on a node.
|
|
659
|
+
*
|
|
660
|
+
* Supports three modes:
|
|
661
|
+
* - `undefined` or no config: Preserve original filename (backward compatible)
|
|
662
|
+
* - `mode: 'auto'`: Generate smart suffix based on node type
|
|
663
|
+
* - `mode: 'custom'`: Use template pattern or rename function
|
|
664
|
+
*
|
|
665
|
+
* @property mode - Naming mode: 'auto' for smart suffixes, 'custom' for templates/functions
|
|
666
|
+
* @property pattern - Mustache-style template string (for custom mode)
|
|
667
|
+
* @property rename - Custom function for full control (for custom mode, SDK only)
|
|
668
|
+
* @property autoSuffix - Generator function for auto mode suffix
|
|
669
|
+
*
|
|
670
|
+
* @example
|
|
671
|
+
* ```typescript
|
|
672
|
+
* // Auto mode with smart suffix
|
|
673
|
+
* const autoNaming: FileNamingConfig = {
|
|
674
|
+
* mode: 'auto',
|
|
675
|
+
* autoSuffix: (ctx) => `${ctx.width}x${ctx.height}`
|
|
676
|
+
* };
|
|
677
|
+
*
|
|
678
|
+
* // Custom mode with template
|
|
679
|
+
* const templateNaming: FileNamingConfig = {
|
|
680
|
+
* mode: 'custom',
|
|
681
|
+
* pattern: '{{baseName}}-{{nodeType}}.{{extension}}'
|
|
682
|
+
* };
|
|
683
|
+
*
|
|
684
|
+
* // Custom mode with function
|
|
685
|
+
* const functionNaming: FileNamingConfig = {
|
|
686
|
+
* mode: 'custom',
|
|
687
|
+
* rename: (file, ctx) => `processed-${ctx.fileName}`
|
|
688
|
+
* };
|
|
689
|
+
* ```
|
|
690
|
+
*/
|
|
691
|
+
export type FileNamingConfig = {
|
|
692
|
+
/** Naming mode: 'auto' for smart suffixes, 'custom' for templates/functions */
|
|
693
|
+
mode: "auto" | "custom";
|
|
694
|
+
/** Mustache-style template string (for custom mode) */
|
|
695
|
+
pattern?: string;
|
|
696
|
+
/** Custom function for full control (for custom mode, SDK only) */
|
|
697
|
+
rename?: FileNamingFunction;
|
|
698
|
+
/** Generator function for auto mode suffix */
|
|
699
|
+
autoSuffix?: AutoNamingSuffixGenerator;
|
|
700
|
+
};
|
|
701
|
+
|
|
554
702
|
// Re-export existing types for compatibility
|
|
555
703
|
export { NodeType };
|
|
556
704
|
export type { FlowEvent, FlowEventFlowEnd, FlowEventFlowStart };
|
|
@@ -0,0 +1,308 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File naming utilities for the flow engine.
|
|
3
|
+
*
|
|
4
|
+
* This module provides functions for generating dynamic file names based on
|
|
5
|
+
* templates, auto-suffixes, or custom functions. It supports mustache-style
|
|
6
|
+
* template interpolation using micromustache.
|
|
7
|
+
*
|
|
8
|
+
* @module flow/utils/file-naming
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
import { render } from "micromustache";
|
|
12
|
+
import type { UploadFile } from "../../types/upload-file";
|
|
13
|
+
import type {
|
|
14
|
+
FileNamingConfig,
|
|
15
|
+
NamingContext,
|
|
16
|
+
} from "../types/flow-types";
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Extracts the base name (without extension) from a filename.
|
|
20
|
+
*
|
|
21
|
+
* @param fileName - The full filename
|
|
22
|
+
* @returns The filename without extension
|
|
23
|
+
*
|
|
24
|
+
* @example
|
|
25
|
+
* ```typescript
|
|
26
|
+
* getBaseName("photo.jpg") // "photo"
|
|
27
|
+
* getBaseName("document.tar.gz") // "document.tar"
|
|
28
|
+
* getBaseName("noextension") // "noextension"
|
|
29
|
+
* ```
|
|
30
|
+
*/
|
|
31
|
+
export function getBaseName(fileName: string): string {
|
|
32
|
+
const lastDotIndex = fileName.lastIndexOf(".");
|
|
33
|
+
if (lastDotIndex === -1 || lastDotIndex === 0) {
|
|
34
|
+
return fileName;
|
|
35
|
+
}
|
|
36
|
+
return fileName.substring(0, lastDotIndex);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Extracts the extension (without dot) from a filename.
|
|
41
|
+
*
|
|
42
|
+
* @param fileName - The full filename
|
|
43
|
+
* @returns The extension without leading dot, or empty string if none
|
|
44
|
+
*
|
|
45
|
+
* @example
|
|
46
|
+
* ```typescript
|
|
47
|
+
* getExtension("photo.jpg") // "jpg"
|
|
48
|
+
* getExtension("document.tar.gz") // "gz"
|
|
49
|
+
* getExtension("noextension") // ""
|
|
50
|
+
* ```
|
|
51
|
+
*/
|
|
52
|
+
export function getExtension(fileName: string): string {
|
|
53
|
+
const lastDotIndex = fileName.lastIndexOf(".");
|
|
54
|
+
if (lastDotIndex === -1 || lastDotIndex === 0) {
|
|
55
|
+
return "";
|
|
56
|
+
}
|
|
57
|
+
return fileName.substring(lastDotIndex + 1);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Builds a naming context from file and flow execution information.
|
|
62
|
+
*
|
|
63
|
+
* @param file - The UploadFile being processed
|
|
64
|
+
* @param flowContext - Flow execution context (flowId, jobId, nodeId, nodeType)
|
|
65
|
+
* @param extraVars - Additional variables to include (width, height, format, etc.)
|
|
66
|
+
* @returns Complete naming context for template interpolation
|
|
67
|
+
*
|
|
68
|
+
* @example
|
|
69
|
+
* ```typescript
|
|
70
|
+
* const context = buildNamingContext(
|
|
71
|
+
* uploadFile,
|
|
72
|
+
* { flowId: "flow-1", jobId: "job-1", nodeId: "resize-1", nodeType: "resize" },
|
|
73
|
+
* { width: 800, height: 600 }
|
|
74
|
+
* );
|
|
75
|
+
* // context.baseName = "photo"
|
|
76
|
+
* // context.extension = "jpg"
|
|
77
|
+
* // context.width = 800
|
|
78
|
+
* // context.height = 600
|
|
79
|
+
* ```
|
|
80
|
+
*/
|
|
81
|
+
export function buildNamingContext(
|
|
82
|
+
file: UploadFile,
|
|
83
|
+
flowContext: {
|
|
84
|
+
flowId: string;
|
|
85
|
+
jobId: string;
|
|
86
|
+
nodeId: string;
|
|
87
|
+
nodeType: string;
|
|
88
|
+
},
|
|
89
|
+
extraVars?: Record<string, string | number | undefined>,
|
|
90
|
+
): NamingContext {
|
|
91
|
+
// Extract fileName from metadata
|
|
92
|
+
const metadata = file.metadata ?? {};
|
|
93
|
+
const fileName =
|
|
94
|
+
(metadata.fileName as string) ??
|
|
95
|
+
(metadata.originalName as string) ??
|
|
96
|
+
(metadata.name as string) ??
|
|
97
|
+
"unnamed";
|
|
98
|
+
|
|
99
|
+
const baseName = getBaseName(fileName);
|
|
100
|
+
const extension = getExtension(fileName);
|
|
101
|
+
|
|
102
|
+
return {
|
|
103
|
+
baseName,
|
|
104
|
+
extension,
|
|
105
|
+
fileName,
|
|
106
|
+
nodeType: flowContext.nodeType,
|
|
107
|
+
nodeId: flowContext.nodeId,
|
|
108
|
+
flowId: flowContext.flowId,
|
|
109
|
+
jobId: flowContext.jobId,
|
|
110
|
+
timestamp: new Date().toISOString(),
|
|
111
|
+
...extraVars,
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
/**
|
|
116
|
+
* Interpolates a mustache-style template with the given context.
|
|
117
|
+
*
|
|
118
|
+
* Uses micromustache for fast, secure template rendering.
|
|
119
|
+
* Unknown variables are preserved as-is (e.g., {{unknown}} stays {{unknown}}).
|
|
120
|
+
*
|
|
121
|
+
* @param pattern - Mustache-style template string
|
|
122
|
+
* @param context - Variables to interpolate
|
|
123
|
+
* @returns Interpolated string
|
|
124
|
+
*
|
|
125
|
+
* @example
|
|
126
|
+
* ```typescript
|
|
127
|
+
* interpolateFileName(
|
|
128
|
+
* "{{baseName}}-{{width}}x{{height}}.{{extension}}",
|
|
129
|
+
* { baseName: "photo", width: 800, height: 600, extension: "jpg" }
|
|
130
|
+
* );
|
|
131
|
+
* // Returns: "photo-800x600.jpg"
|
|
132
|
+
* ```
|
|
133
|
+
*/
|
|
134
|
+
export function interpolateFileName(
|
|
135
|
+
pattern: string,
|
|
136
|
+
context: NamingContext,
|
|
137
|
+
): string {
|
|
138
|
+
try {
|
|
139
|
+
// Convert context to string values for micromustache
|
|
140
|
+
const stringContext: Record<string, string> = {};
|
|
141
|
+
for (const [key, value] of Object.entries(context)) {
|
|
142
|
+
if (value !== undefined) {
|
|
143
|
+
stringContext[key] = String(value);
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
return render(pattern, stringContext);
|
|
147
|
+
} catch {
|
|
148
|
+
// On error, return the pattern as-is (fallback behavior)
|
|
149
|
+
return pattern;
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
/**
|
|
154
|
+
* Applies file naming configuration to generate a new filename.
|
|
155
|
+
*
|
|
156
|
+
* Handles three modes:
|
|
157
|
+
* - No config: Returns original filename (backward compatible)
|
|
158
|
+
* - Auto mode: Appends auto-generated suffix based on node type
|
|
159
|
+
* - Custom mode: Uses template pattern or rename function
|
|
160
|
+
*
|
|
161
|
+
* On any error, falls back to the original filename to prevent flow failures.
|
|
162
|
+
*
|
|
163
|
+
* @param file - The UploadFile being processed
|
|
164
|
+
* @param context - Naming context with all available variables
|
|
165
|
+
* @param config - Optional naming configuration
|
|
166
|
+
* @returns The new filename (or original on error/no config)
|
|
167
|
+
*
|
|
168
|
+
* @example
|
|
169
|
+
* ```typescript
|
|
170
|
+
* // Auto mode
|
|
171
|
+
* applyFileNaming(file, context, {
|
|
172
|
+
* mode: 'auto',
|
|
173
|
+
* autoSuffix: (ctx) => `${ctx.width}x${ctx.height}`
|
|
174
|
+
* });
|
|
175
|
+
* // Returns: "photo-800x600.jpg"
|
|
176
|
+
*
|
|
177
|
+
* // Custom mode with template
|
|
178
|
+
* applyFileNaming(file, context, {
|
|
179
|
+
* mode: 'custom',
|
|
180
|
+
* pattern: '{{baseName}}-processed.{{extension}}'
|
|
181
|
+
* });
|
|
182
|
+
* // Returns: "photo-processed.jpg"
|
|
183
|
+
*
|
|
184
|
+
* // Custom mode with function
|
|
185
|
+
* applyFileNaming(file, context, {
|
|
186
|
+
* mode: 'custom',
|
|
187
|
+
* rename: (file, ctx) => `${ctx.flowId}-${ctx.fileName}`
|
|
188
|
+
* });
|
|
189
|
+
* // Returns: "flow-1-photo.jpg"
|
|
190
|
+
* ```
|
|
191
|
+
*/
|
|
192
|
+
export function applyFileNaming(
|
|
193
|
+
file: UploadFile,
|
|
194
|
+
context: NamingContext,
|
|
195
|
+
config?: FileNamingConfig,
|
|
196
|
+
): string {
|
|
197
|
+
const originalFileName = context.fileName;
|
|
198
|
+
|
|
199
|
+
// No config = preserve original (backward compatible)
|
|
200
|
+
if (!config) {
|
|
201
|
+
return originalFileName;
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
try {
|
|
205
|
+
if (config.mode === "auto") {
|
|
206
|
+
// Auto mode: append auto-generated suffix
|
|
207
|
+
if (config.autoSuffix) {
|
|
208
|
+
const suffix = config.autoSuffix(context);
|
|
209
|
+
if (suffix) {
|
|
210
|
+
const { baseName, extension } = context;
|
|
211
|
+
return extension
|
|
212
|
+
? `${baseName}-${suffix}.${extension}`
|
|
213
|
+
: `${baseName}-${suffix}`;
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
// No autoSuffix defined, preserve original
|
|
217
|
+
return originalFileName;
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
if (config.mode === "custom") {
|
|
221
|
+
// Custom mode: use function or template
|
|
222
|
+
if (config.rename) {
|
|
223
|
+
const result = config.rename(file, context);
|
|
224
|
+
return result || originalFileName;
|
|
225
|
+
}
|
|
226
|
+
if (config.pattern) {
|
|
227
|
+
const result = interpolateFileName(config.pattern, context);
|
|
228
|
+
return result || originalFileName;
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
// Unknown mode, preserve original
|
|
233
|
+
return originalFileName;
|
|
234
|
+
} catch {
|
|
235
|
+
// On any error, fall back to original filename
|
|
236
|
+
return originalFileName;
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
/**
|
|
241
|
+
* Validates a template pattern for common issues.
|
|
242
|
+
*
|
|
243
|
+
* Checks for:
|
|
244
|
+
* - Balanced braces
|
|
245
|
+
* - Non-empty pattern
|
|
246
|
+
* - Valid variable names
|
|
247
|
+
*
|
|
248
|
+
* @param pattern - Template pattern to validate
|
|
249
|
+
* @returns Object with isValid flag and optional error message
|
|
250
|
+
*
|
|
251
|
+
* @example
|
|
252
|
+
* ```typescript
|
|
253
|
+
* validatePattern("{{baseName}}.{{extension}}");
|
|
254
|
+
* // { isValid: true }
|
|
255
|
+
*
|
|
256
|
+
* validatePattern("{{baseName");
|
|
257
|
+
* // { isValid: false, error: "Unbalanced braces: missing closing }}" }
|
|
258
|
+
* ```
|
|
259
|
+
*/
|
|
260
|
+
export function validatePattern(pattern: string): {
|
|
261
|
+
isValid: boolean;
|
|
262
|
+
error?: string;
|
|
263
|
+
} {
|
|
264
|
+
if (!pattern || pattern.trim() === "") {
|
|
265
|
+
return { isValid: false, error: "Pattern cannot be empty" };
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
// Check for balanced braces
|
|
269
|
+
const openCount = (pattern.match(/\{\{/g) || []).length;
|
|
270
|
+
const closeCount = (pattern.match(/\}\}/g) || []).length;
|
|
271
|
+
|
|
272
|
+
if (openCount !== closeCount) {
|
|
273
|
+
return {
|
|
274
|
+
isValid: false,
|
|
275
|
+
error: `Unbalanced braces: ${openCount} opening, ${closeCount} closing`,
|
|
276
|
+
};
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
// Check for valid variable syntax
|
|
280
|
+
const invalidVars = pattern.match(/\{\{[^}]*[^a-zA-Z0-9_}][^}]*\}\}/g);
|
|
281
|
+
if (invalidVars) {
|
|
282
|
+
return {
|
|
283
|
+
isValid: false,
|
|
284
|
+
error: `Invalid variable syntax: ${invalidVars[0]}`,
|
|
285
|
+
};
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
return { isValid: true };
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
/**
|
|
292
|
+
* List of available template variables for documentation and UI.
|
|
293
|
+
*/
|
|
294
|
+
export const AVAILABLE_TEMPLATE_VARIABLES = [
|
|
295
|
+
{ name: "baseName", description: "Filename without extension", example: "photo" },
|
|
296
|
+
{ name: "extension", description: "File extension without dot", example: "jpg" },
|
|
297
|
+
{ name: "fileName", description: "Full original filename", example: "photo.jpg" },
|
|
298
|
+
{ name: "nodeType", description: "Type of processing node", example: "resize" },
|
|
299
|
+
{ name: "nodeId", description: "Specific node instance ID", example: "resize-1" },
|
|
300
|
+
{ name: "flowId", description: "Flow identifier", example: "flow-abc" },
|
|
301
|
+
{ name: "jobId", description: "Execution job ID", example: "job-123" },
|
|
302
|
+
{ name: "timestamp", description: "ISO 8601 processing time", example: "2024-01-15T10:30:00Z" },
|
|
303
|
+
{ name: "width", description: "Output width (image/video)", example: "800" },
|
|
304
|
+
{ name: "height", description: "Output height (image/video)", example: "600" },
|
|
305
|
+
{ name: "format", description: "Output format", example: "webp" },
|
|
306
|
+
{ name: "quality", description: "Quality setting", example: "80" },
|
|
307
|
+
{ name: "pageNumber", description: "Page number (documents)", example: "1" },
|
|
308
|
+
] as const;
|
package/dist/flow-CAywogte.mjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
import{n as e}from"./uploadista-error-B-kFH_SE.mjs";import{D as t,h as n,s as r,w as i}from"./types-CnhCQFkg.mjs";import{a,n as o,o as s}from"./upload-DPX3jSQH.mjs";import{Context as c,Effect as l,Layer as u,Option as d,Runtime as f,Stream as p}from"effect";import{z as m}from"zod";function h({source:e,target:t,sourcePort:n,targetPort:r}){return{source:e,target:t,sourcePort:n,targetPort:r}}let g=function(e){return e.JobStart=`job-start`,e.JobEnd=`job-end`,e.FlowStart=`flow-start`,e.FlowEnd=`flow-end`,e.FlowError=`flow-error`,e.FlowPause=`flow-pause`,e.FlowCancel=`flow-cancel`,e.NodeStart=`node-start`,e.NodeEnd=`node-end`,e.NodePause=`node-pause`,e.NodeResume=`node-resume`,e.NodeError=`node-error`,e.NodeStream=`node-stream`,e.NodeResponse=`node-response`,e}({});var _=class{types;constructor(){this.types=new Map}register(t){if(this.types.has(t.id))throw e.fromCode(`VALIDATION_ERROR`,{body:`Input type "${t.id}" is already registered. Types cannot be modified or re-registered.`,details:{typeId:t.id}});this.types.set(t.id,t)}get(e){return this.types.get(e)}list(){return Array.from(this.types.values())}validate(t,n){let r=this.types.get(t);if(!r)return{success:!1,error:e.fromCode(`VALIDATION_ERROR`,{body:`Input type "${t}" is not registered`,details:{typeId:t}})};try{return{success:!0,data:r.schema.parse(n)}}catch(n){return{success:!1,error:e.fromCode(`VALIDATION_ERROR`,{body:`Data validation failed for input type "${t}"`,cause:n,details:{typeId:t,validationErrors:n}})}}}has(e){return this.types.has(e)}size(){return this.types.size}};const v=new _;function y(e,t){return v.validate(e,t)}var b=class{types;constructor(){this.types=new Map}register(t){if(this.types.has(t.id))throw e.fromCode(`VALIDATION_ERROR`,{body:`Output type "${t.id}" is already registered. Types cannot be modified or re-registered.`,details:{typeId:t.id}});this.types.set(t.id,t)}get(e){return this.types.get(e)}list(){return Array.from(this.types.values())}validate(t,n){let r=this.types.get(t);if(!r)return{success:!1,error:e.fromCode(`VALIDATION_ERROR`,{body:`Output type "${t}" is not registered`,details:{typeId:t}})};try{return{success:!0,data:r.schema.parse(n)}}catch(n){return{success:!1,error:e.fromCode(`VALIDATION_ERROR`,{body:`Data validation failed for output type "${t}"`,cause:n,details:{typeId:t,validationErrors:n}})}}}has(e){return this.types.has(e)}size(){return this.types.size}};const x=new b;function S(e,t){return x.validate(e,t)}let C=function(e){return e.input=`input`,e.process=`process`,e.conditional=`conditional`,e.multiplex=`multiplex`,e.merge=`merge`,e}({});function w({id:t,name:n,description:r,type:i,inputSchema:a,outputSchema:o,run:s,condition:c,multiInput:u=!1,multiOutput:d=!1,pausable:f=!1,retry:p,inputTypeId:m,outputTypeId:h,keepOutput:g=!1}){return l.gen(function*(){return m&&!v.get(m)?yield*e.fromCode(`INVALID_INPUT_TYPE`,{body:`Input type "${m}" is not registered in inputTypeRegistry`,details:{inputTypeId:m,nodeId:t}}).toEffect():h&&!x.get(h)?yield*e.fromCode(`INVALID_OUTPUT_TYPE`,{body:`Output type "${h}" is not registered in outputTypeRegistry`,details:{outputTypeId:h,nodeId:t}}).toEffect():{id:t,name:n,description:r,type:i,inputTypeId:m,outputTypeId:h,keepOutput:g,inputSchema:a,outputSchema:o,pausable:f,run:({data:r,jobId:i,flowId:c,storageId:u,clientId:d})=>l.gen(function*(){let f=yield*s({data:yield*l.try({try:()=>a.parse(r),catch:r=>{let i=r instanceof Error?r.message:String(r);return e.fromCode(`FLOW_INPUT_VALIDATION_ERROR`,{body:`Node '${n}' (${t}) input validation failed: ${i}`,cause:r})}}),jobId:i,storageId:u,flowId:c,clientId:d});return f.type===`waiting`?{type:`waiting`,partialData:f.partialData,nodeType:h,nodeId:t}:{type:`complete`,data:yield*l.try({try:()=>o.parse(f.data),catch:r=>{let i=r instanceof Error?r.message:String(r);return e.fromCode(`FLOW_OUTPUT_VALIDATION_ERROR`,{body:`Node '${n}' (${t}) output validation failed: ${i}`,cause:r})}}),nodeType:h,nodeId:t}}),condition:c,multiInput:u,multiOutput:d,retry:p}})}const T=e=>({id:e.id,name:e.name,description:e.description,type:e.type,inputTypeId:e.inputTypeId,outputTypeId:e.outputTypeId}),E=e=>({type:`complete`,data:e}),D=e=>({type:`waiting`,partialData:e}),O=(e,t)=>{if(e===t)return!0;try{return!!(e&&t&&typeof e==`object`&&typeof t==`object`)}catch{return!0}};var k=class{typeChecker;constructor(e=O){this.typeChecker=e}validateConnection(e,t,n){return this.getCompatibleTypes(e.outputSchema,t.inputSchema)}getCompatibleTypes(e,t){return this.typeChecker(e,t)}validateFlow(e,t){let n=[],r=new Map(e.map(e=>[e.id,e]));for(let e of t){let t=r.get(e.source),i=r.get(e.target);if(!t){n.push(`Source node ${e.source} not found`);continue}if(!i){n.push(`Target node ${e.target} not found`);continue}this.validateConnection(t,i,e)||n.push(`Schema mismatch: ${t.id} output schema incompatible with ${i.id} input schema`)}return{isValid:n.length===0,errors:n}}getExpectedInputSchemas(e,t,n){let r=new Map(t.map(e=>[e.id,e])),i={};for(let t of n)if(t.target===e){let e=r.get(t.source);if(e){let n=t.sourcePort||t.source;i[n]=e.outputSchema}}return i}getActualOutputSchemas(e,t,n){let r=new Map(t.map(e=>[e.id,e])),i={};for(let t of n)if(t.source===e){let e=r.get(t.target);if(e){let n=t.targetPort||t.target;i[n]=e.inputSchema}}return i}validateData(e,t){try{return t.parse(e),{isValid:!0,errors:[]}}catch(e){return e instanceof Error&&`errors`in e?{isValid:!1,errors:e.errors.map(e=>`${e.path.join(`.`)}: ${e.message}`)}:{isValid:!1,errors:[e instanceof Error?e.message:`Validation failed`]}}}};function A(e){if(!e)return{type:``,fileName:``,metadata:void 0,metadataJson:void 0};let t={...e},n=String(t.type||t.mimeType||t[`content-type`]||``);n&&(t.type||=n,t.mimeType||=n);let r=String(t.fileName||t.originalName||t.name||``);return r&&(t.fileName||=r,t.originalName||=r,t.name||=r),{type:n,fileName:r,metadata:t,metadataJson:JSON.stringify(t)}}const ee=m.object({operation:m.literal(`init`),storageId:m.string(),metadata:m.record(m.string(),m.any()).optional()}),te=m.object({operation:m.literal(`finalize`),uploadId:m.string()}),ne=m.object({operation:m.literal(`url`),url:m.string(),storageId:m.string().optional(),metadata:m.record(m.string(),m.any()).optional()}),j=m.union([ee,te,ne]),re=m.object({allowedMimeTypes:m.array(m.string()).optional(),minSize:m.number().positive().optional(),maxSize:m.number().positive().optional()});function M(t,n){return l.gen(function*(){if(n){if(n.allowedMimeTypes&&n.allowedMimeTypes.length>0&&!n.allowedMimeTypes.some(e=>{if(e.endsWith(`/*`)){let n=e.slice(0,-2);return t.type.startsWith(n)}return t.type===e}))throw yield*e.fromCode(`VALIDATION_ERROR`,{cause:Error(`File type "${t.type}" is not allowed. Allowed types: ${n.allowedMimeTypes.join(`, `)}`)}).toEffect();if(n.minSize!==void 0&&t.size<n.minSize)throw yield*e.fromCode(`VALIDATION_ERROR`,{cause:Error(`File size (${t.size} bytes) is below minimum (${n.minSize} bytes)`)}).toEffect();if(n.maxSize!==void 0&&t.size>n.maxSize)throw yield*e.fromCode(`VALIDATION_ERROR`,{cause:Error(`File size (${t.size} bytes) exceeds maximum (${n.maxSize} bytes)`)}).toEffect()}})}function ie(n,r,i){let c=i?.keepOutput??!1;return l.gen(function*(){let i=yield*o;return yield*w({id:n,name:`Input`,description:`Handles file input through multiple methods - streaming upload (init/finalize) or direct URL fetch`,type:C.input,inputSchema:j,outputSchema:t,keepOutput:c,inputTypeId:I,outputTypeId:N,run:({data:t,flowId:o,jobId:c,clientId:u})=>l.gen(function*(){switch(t.operation){case`init`:{let e={storageId:t.storageId,size:t.metadata?.size||0,type:t.metadata?.mimeType||`application/octet-stream`,fileName:t.metadata?.originalName,lastModified:t.metadata?.size?Date.now():void 0,metadata:t.metadata?JSON.stringify(t.metadata):void 0,flow:{flowId:o,nodeId:n,jobId:c}};return D(yield*i.createUpload(e,u))}case`finalize`:{let e=yield*i.getUpload(t.uploadId),{type:n}=A(e.metadata);return yield*M({type:n,size:e.size||0},r),E(e)}case`url`:{let e=yield*s(t.url),l=yield*a(e),d=t.metadata?.mimeType||e.headers.get(`content-type`)||`application/octet-stream`,f=t.metadata?.size||Number(e.headers.get(`content-length`)||0),p=t.metadata?.originalName||t.url.split(`/`).pop()||`file`;yield*M({type:d,size:f},r);let m=new ReadableStream({start(e){e.enqueue(new Uint8Array(l)),e.close()}}),h={storageId:t.storageId||`buffer`,size:f,type:d,fileName:p,lastModified:Date.now(),metadata:t.metadata?JSON.stringify(t.metadata):void 0};return E({...yield*i.upload(h,u,m),flow:{flowId:o,nodeId:n,jobId:c}})}default:throw yield*e.fromCode(`VALIDATION_ERROR`,{cause:Error(`Invalid operation`)}).toEffect()}})})})}const N=`storage-output-v1`,P=`ocr-output-v1`,F=`image-description-output-v1`,I=`streaming-input-v1`,L=m.object({extractedText:m.string(),format:m.enum([`markdown`,`plain`,`structured`]),taskType:m.enum([`convertToMarkdown`,`freeOcr`,`parseFigure`,`locateObject`]),confidence:m.number().min(0).max(1).optional()}),R=m.object({description:m.string(),confidence:m.number().min(0).max(1).optional(),metadata:m.record(m.string(),m.unknown()).optional()});v.register({id:I,schema:j,version:`1.0.0`,description:`Streaming file input with init/finalize/url operations for flexible file ingestion`}),x.register({id:N,schema:t,version:`1.0.0`,description:`Storage output node that saves files to configured storage backend`}),x.register({id:P,schema:L,version:`1.0.0`,description:`OCR output node that extracts structured text from documents using AI`}),x.register({id:F,schema:R,version:`1.0.0`,description:`Image description output node that generates AI-powered descriptions of images`});var z=class{maxConcurrency;constructor(e={}){this.maxConcurrency=e.maxConcurrency??4}groupNodesByExecutionLevel(e,t){let n={},r={};e.forEach(e=>{n[e.id]=[],r[e.id]=0}),t.forEach(e=>{n[e.source]?.push(e.target),r[e.target]=(r[e.target]||0)+1});let i=[],a=new Set,o=0;for(;a.size<e.length;){let e=Object.keys(r).filter(e=>r[e]===0&&!a.has(e));if(e.length===0)throw Error(`Cycle detected in flow graph - cannot execute in parallel`);i.push({level:o++,nodes:e}),e.forEach(e=>{a.add(e),delete r[e],n[e]?.forEach(e=>{r[e]!==void 0&&r[e]--})})}return i}executeNodesInParallel(e){return l.all(e.map(e=>e()),{concurrency:this.maxConcurrency})}canExecuteInParallel(e,t,n){return e.every(e=>(n[e]||[]).every(e=>t.has(e)))}getStats(){return{maxConcurrency:this.maxConcurrency}}};function B(e){return t=>{if(t.nodeType!==e)return!1;let n=x.get(e);return n?n.schema.safeParse(t.data).success:!1}}function V(e){return!e||typeof e!=`object`?!1:t.safeParse(e).success}const ae=B(`storage-output-v1`),oe=B(P),se=B(F);function H(e,t){return e.filter(t)}function ce(t,n){return l.gen(function*(){let r=H(t,n);return r.length===0?yield*e.fromCode(`OUTPUT_NOT_FOUND`,{body:`No output of the specified type was found in the flow results`}).toEffect():r.length>1?yield*e.fromCode(`MULTIPLE_OUTPUTS_FOUND`,{body:`Found ${r.length} outputs of the specified type, expected exactly one`,details:{foundCount:r.length,nodeIds:r.map(e=>e.nodeId)}}).toEffect():r[0]})}function le(e,t){return H(e,t)[0]}function ue(e,t){return e.find(e=>e.nodeId===t)}function de(e,t){return e.some(t)}function fe(e){return e.operation===`init`}function pe(e){return e.operation===`finalize`}function me(e){return e.operation===`url`}function he(e){return e.operation===`init`||e.operation===`url`}const U=e=>({id:e.id,name:e.name,nodes:e.nodes.map(T),edges:e.edges});function W(t){return l.gen(function*(){let n=yield*l.all(t.nodes.map(e=>l.isEffect(e)?e:l.succeed(e))),{flowId:r,name:a,onEvent:o,checkJobStatus:s,edges:c,inputSchema:u,outputSchema:d,typeChecker:f}=t,h=n,_=new k(f),v=()=>{let e={},t={},n={};return h.forEach(r=>{e[r.id]=[],n[r.id]=[],t[r.id]=0}),c.forEach(r=>{e[r.source]?.push(r.target),n[r.target]?.push(r.source),t[r.target]=(t[r.target]||0)+1}),{graph:e,reverseGraph:n,inDegree:t}},y=()=>{let{graph:e,inDegree:t}=v(),n=[],r=[];for(Object.keys(t).forEach(e=>{t[e]===0&&n.push(e)});n.length>0;){let i=n.shift();if(!i)throw Error(`No current node found`);r.push(i),e[i]?.forEach(e=>{t[e]=(t[e]||0)-1,t[e]===0&&n.push(e)})}return r},b=(e,t)=>{if(!e.condition)return l.succeed(!0);let{field:n,operator:r,value:i}=e.condition,a=t,o=a?.metadata?.[n]||a?.[n],s=(()=>{switch(r){case`equals`:return o===i;case`notEquals`:return o!==i;case`greaterThan`:return Number(o)>Number(i);case`lessThan`:return Number(o)<Number(i);case`contains`:return String(o).includes(String(i));case`startsWith`:return String(o).startsWith(String(i));default:return!0}})();return l.succeed(s)},x=(e,t)=>{let{reverseGraph:n}=v(),r=n[e]||[],i={};return r.forEach(e=>{let n=t.get(e);n!==void 0&&(i[e]=n)}),i},S=e=>{let t=h.filter(e=>e.type===`input`),n={};return t.forEach(t=>{e&&typeof e==`object`&&t.id in e&&(n[t.id]=u.parse(e[t.id]))}),n},C=e=>!c.some(t=>t.source===e),w=e=>{let t=h.find(t=>t.id===e);return C(e)||t?.keepOutput===!0},T=e=>{let t=h.filter(e=>w(e.id)),n={};return t.forEach(t=>{let r=e.get(t.id);r!==void 0&&(n[t.id]=r)}),n},E=(e,t)=>{let n=h.filter(e=>w(e.id)),r=[];return n.forEach(n=>{let i=e.get(n.id);if(i!==void 0){let e=t.get(n.id);r.push({nodeId:n.id,nodeType:e,data:i,timestamp:new Date().toISOString()})}}),r},D=(e,t,n)=>l.gen(function*(){if(e.storage.id===t)return e;let r=yield*i,a=yield*r.getDataStore(e.storage.id,n),o=yield*r.getDataStore(t,n),s=yield*a.read(e.id),c=p.make(s),l={...e,storage:{id:t,type:e.storage.type}},u=yield*o.create(l);return yield*o.write({file_id:u.id,stream:c,offset:0},{}),u}),O=(n,i,a,c,u,d,f)=>l.gen(function*(){let p=u.get(n);if(!p)return yield*e.fromCode(`FLOW_NODE_NOT_FOUND`).toEffect();if(s){let t=yield*s(d);if(t===`paused`)return yield*e.fromCode(`FLOW_PAUSED`,{cause:`Flow ${r} was paused by user at job ${d}`}).toEffect();if(t===`cancelled`)return yield*e.fromCode(`FLOW_CANCELLED`,{cause:`Flow ${r} was cancelled by user at job ${d}`}).toEffect()}o&&(yield*o({jobId:d,flowId:r,nodeId:n,eventType:g.NodeStart,nodeName:p.name,nodeType:p.type}));let m=p.retry?.maxRetries??0,h=p.retry?.retryDelay??1e3,_=p.retry?.exponentialBackoff??!0,v=0,y=null;for(;v<=m;)try{let s,u={};if(p.type===`input`){if(s=a[n],s===void 0)return yield*l.logError(`Input node ${n} has no input data`),yield*e.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Input node ${n} has no input data`)}).toEffect()}else{if(u=x(n,c),Object.keys(u).length===0)return yield*l.logError(`Node ${n} has no input data`),yield*e.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Node ${n} has no input data`)}).toEffect();if(p.multiInput)s=u;else{let t=Object.keys(u)[0];if(!t)return yield*e.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Node ${n} has no input data`)}).toEffect();s=u[t]}}if(p.type===`conditional`&&!(yield*b(p,s)))return o&&(yield*o({jobId:d,flowId:r,nodeId:n,eventType:g.NodeEnd,nodeName:p.name})),{nodeId:n,result:s,success:!0,waiting:!1};let m=yield*p.run({data:s,inputs:u,jobId:d,flowId:r,storageId:i,clientId:f});if(m.type===`waiting`){let e=m.partialData;return o&&(yield*o({jobId:d,flowId:r,nodeId:n,eventType:g.NodePause,nodeName:p.name,partialData:e})),{nodeId:n,result:e,success:!0,waiting:!0,nodeType:m.nodeType}}let h=m.data;if(w(n)&&(V(h)&&h.storage.id!==i&&(yield*l.logDebug(`Auto-persisting output node ${n} output from ${h.storage.id} to ${i}`),h=yield*D(h,i,f)),t.hooks?.onNodeOutput)){yield*l.logDebug(`Calling onNodeOutput hook for sink node ${n}`);let e=t.hooks.onNodeOutput({output:h,nodeId:n,flowId:r,jobId:d,storageId:i,clientId:f});h=yield*l.isEffect(e)?e:l.promise(()=>e)}return o&&(yield*o({jobId:d,flowId:r,nodeId:n,eventType:g.NodeEnd,nodeName:p.name,result:h})),{nodeId:n,result:h,success:!0,waiting:!1,nodeType:m.nodeType}}catch(t){if(y=t instanceof e?t:e.fromCode(`FLOW_NODE_ERROR`,{cause:t}),v<m){v++;let e=_?h*2**(v-1):h;yield*l.logWarning(`Node ${n} (${p.name}) failed, retrying (${v}/${m}) after ${e}ms`),yield*l.sleep(e);continue}return o&&(yield*o({jobId:d,flowId:r,nodeId:n,eventType:g.NodeError,nodeName:p.name,error:y.body,retryCount:v})),yield*y.toEffect()}return y?yield*y.toEffect():yield*e.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Unexpected error in retry loop`)}).toEffect()}),A=({inputs:n,storageId:i,jobId:a,resumeFrom:s,clientId:u})=>l.gen(function*(){!s&&o&&(yield*o({jobId:a,eventType:g.FlowStart,flowId:r}));let f=S(n||{}),p,_,v;s?(p=s.executionOrder,_=s.nodeResults,v=s.currentIndex):(p=y(),_=new Map,v=0);let b=new Map;if(p.length!==h.length)return yield*e.fromCode(`FLOW_CYCLE_ERROR`).toEffect();let x=new Map(h.map(e=>[e.id,e]));if(t.parallelExecution?.enabled??!1){yield*l.logDebug(`Flow ${r}: Executing in parallel mode (maxConcurrency: ${t.parallelExecution?.maxConcurrency??4})`);let e=new z({maxConcurrency:t.parallelExecution?.maxConcurrency??4}),n=e.groupNodesByExecutionLevel(h,c);yield*l.logDebug(`Flow ${r}: Grouped nodes into ${n.length} execution levels`);let d={};h.forEach(e=>{d[e.id]=[]}),c.forEach(e=>{d[e.target]?.push(e.source)});for(let t of n){yield*l.logDebug(`Flow ${r}: Executing level ${t.level} with nodes: ${t.nodes.join(`, `)}`);let n=t.nodes.map(e=>()=>l.gen(function*(){if(s&&e===s.executionOrder[v]&&o){let t=x.get(e);t&&(yield*o({jobId:a,flowId:r,nodeId:e,eventType:g.NodeResume,nodeName:t.name,nodeType:t.type}))}return{nodeId:e,nodeResult:yield*O(e,i,f,_,x,a,u)}})),c=yield*e.executeNodesInParallel(n);for(let{nodeId:e,nodeResult:t}of c){if(t.waiting)return t.result!==void 0&&(_.set(e,t.result),t.nodeType&&b.set(e,t.nodeType)),{type:`paused`,nodeId:e,executionState:{executionOrder:p,currentIndex:p.indexOf(e),inputs:f}};t.success&&(_.set(e,t.result),t.nodeType&&b.set(e,t.nodeType))}}}else{yield*l.logDebug(`Flow ${r}: Executing in sequential mode`);for(let t=v;t<p.length;t++){let n=p[t];if(!n)return yield*e.fromCode(`FLOW_NODE_NOT_FOUND`).toEffect();if(s&&t===v&&o){let e=x.get(n);e&&(yield*o({jobId:a,flowId:r,nodeId:n,eventType:g.NodeResume,nodeName:e.name,nodeType:e.type}))}let c=yield*O(n,i,f,_,x,a,u);if(c.waiting)return c.result!==void 0&&(_.set(c.nodeId,c.result),c.nodeType&&b.set(c.nodeId,c.nodeType)),{type:`paused`,nodeId:c.nodeId,executionState:{executionOrder:p,currentIndex:t,inputs:f}};c.success&&(_.set(c.nodeId,c.result),c.nodeType&&b.set(c.nodeId,c.nodeType))}}let C=T(_),D=E(_,b),k=m.record(m.string(),d).safeParse(C);if(!k.success){let t=`Flow output validation failed: ${k.error.message}. Expected outputs: ${JSON.stringify(Object.keys(T(_)))}. Output nodes (sinks + keepOutput): ${h.filter(e=>w(e.id)).map(e=>e.id).join(`, `)}`;return o&&(yield*o({jobId:a,eventType:g.FlowError,flowId:r,error:t})),yield*e.fromCode(`FLOW_OUTPUT_VALIDATION_ERROR`,{body:t,cause:k.error}).toEffect()}let A=k.data;return o&&(yield*o({jobId:a,eventType:g.FlowEnd,flowId:r,outputs:D,result:A})),{type:`completed`,result:A,outputs:D}});return{id:r,name:a,nodes:h,edges:c,inputSchema:u,outputSchema:d,onEvent:o,checkJobStatus:s,hooks:t.hooks,run:({inputs:e,storageId:t,jobId:n,clientId:r})=>A({inputs:e,storageId:t,jobId:n,clientId:r}),resume:({jobId:e,storageId:t,nodeResults:n,executionState:r,clientId:i})=>A({inputs:r.inputs,storageId:t,jobId:e,resumeFrom:{executionOrder:r.executionOrder,nodeResults:new Map(Object.entries(n)),currentIndex:r.currentIndex},clientId:i}),validateTypes:()=>{let e=h;return _.validateFlow(e,c)},validateInputs:e=>_.validateData(e,u),validateOutputs:e=>_.validateData(e,d)}})}var G=class e extends c.Tag(`FlowWaitUntil`)(){static optional=l.serviceOption(e)},K=class extends c.Tag(`FlowProvider`)(){},q=class extends c.Tag(`FlowServer`)(){};const J=e=>typeof e==`object`&&!!e&&`id`in e,ge=e=>typeof e==`object`&&e&&`nodeId`in e&&`data`in e&&`timestamp`in e?e.data:e;function Y(t,n,r){let i=e=>{let i=t=>l.gen(function*(){let n=yield*r.get(e);n&&(yield*r.set(e,{...n,...t,updatedAt:new Date}))});return a=>l.gen(function*(){switch(t.onEvent&&(yield*l.catchAll(t.onEvent(a),e=>(l.logError(`Original onEvent failed`,e),l.succeed({eventId:null})))),yield*n.emit(e,a),l.logInfo(`Updating job ${e} with event ${a.eventType}`),a.eventType){case g.FlowStart:yield*i({status:`running`});break;case g.FlowEnd:yield*l.gen(function*(){let t=yield*r.get(e);t&&a.outputs&&(yield*r.set(e,{...t,result:a.outputs,updatedAt:new Date}))});break;case g.FlowError:yield*i({status:`failed`,error:a.error});break;case g.NodeStart:yield*l.gen(function*(){let t=yield*r.get(e);if(t){let n=t.tasks.find(e=>e.nodeId===a.nodeId)?t.tasks.map(e=>e.nodeId===a.nodeId?{...e,status:`running`,updatedAt:new Date}:e):[...t.tasks,{nodeId:a.nodeId,status:`running`,createdAt:new Date,updatedAt:new Date}];yield*r.set(e,{...t,tasks:n,updatedAt:new Date})}});break;case g.NodePause:yield*l.gen(function*(){let t=yield*r.get(e);if(t){let n=t.tasks.find(e=>e.nodeId===a.nodeId)?t.tasks.map(e=>e.nodeId===a.nodeId?{...e,status:`paused`,result:a.partialData,updatedAt:new Date}:e):[...t.tasks,{nodeId:a.nodeId,status:`paused`,result:a.partialData,createdAt:new Date,updatedAt:new Date}];yield*r.set(e,{...t,tasks:n,updatedAt:new Date})}});break;case g.NodeResume:yield*l.gen(function*(){let t=yield*r.get(e);if(t){let n=t.tasks.map(e=>e.nodeId===a.nodeId?{...e,status:`running`,updatedAt:new Date}:e);yield*r.set(e,{...t,tasks:n,updatedAt:new Date})}});break;case g.NodeEnd:yield*l.gen(function*(){let n=yield*r.get(e);if(n){let i=n.tasks.map(e=>e.nodeId===a.nodeId?{...e,status:`completed`,result:a.result,updatedAt:new Date}:e),o=!t.edges.some(e=>e.source===a.nodeId),s=t.nodes.find(e=>e.id===a.nodeId)?.keepOutput===!0,c=a.result,u=ge(c),d=n.intermediateFiles||[],f=o||s;f&&J(u)&&u.id?(d=d.filter(e=>e!==u.id),s&&!o&&l.logInfo(`Preserving output from node ${a.nodeId} due to keepOutput flag`)):!f&&J(u)&&u.id&&(d.includes(u.id)||d.push(u.id)),yield*r.set(e,{...n,tasks:i,intermediateFiles:d,updatedAt:new Date})}});break;case g.NodeError:yield*l.gen(function*(){let t=yield*r.get(e);if(t){let n=t.tasks.map(e=>e.nodeId===a.nodeId?{...e,status:`failed`,error:a.error,retryCount:a.retryCount,updatedAt:new Date}:e);yield*r.set(e,{...t,tasks:n,error:a.error,updatedAt:new Date})}});break}return{eventId:e}})},a=t=>t=>l.gen(function*(){let n=yield*r.get(t);return n?n.status===`paused`?`paused`:n.status===`cancelled`?`cancelled`:`running`:yield*l.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found`}))});return{...t,run:e=>l.gen(function*(){let n=e.jobId||crypto.randomUUID(),r=i(n),o=a(n);return yield*(yield*W({flowId:t.id,name:t.name,nodes:t.nodes,edges:t.edges,inputSchema:t.inputSchema,outputSchema:t.outputSchema,onEvent:r,checkJobStatus:o})).run({...e,jobId:n,clientId:e.clientId})}),resume:e=>l.gen(function*(){let n=e.jobId,r=i(n),o=a(n);return yield*(yield*W({flowId:t.id,name:t.name,nodes:t.nodes,edges:t.edges,inputSchema:t.inputSchema,outputSchema:t.outputSchema,onEvent:r,checkJobStatus:o})).resume(e)})}}function X(){return l.gen(function*(){let t=yield*K,i=yield*n,a=yield*r,s=yield*o,c=(t,n)=>l.gen(function*(){let r=yield*a.get(t);return r?yield*a.set(t,{...r,...n}):yield*l.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found`}))}),u=(e,t)=>l.gen(function*(){let n=yield*a.get(e);!n||!n.intermediateFiles||n.intermediateFiles.length===0||(yield*l.logInfo(`Cleaning up ${n.intermediateFiles.length} intermediate files for job ${e}`),yield*l.all(n.intermediateFiles.map(e=>l.gen(function*(){yield*s.delete(e,t),yield*l.logDebug(`Deleted intermediate file ${e}`)}).pipe(l.catchAll(t=>l.gen(function*(){return yield*l.logWarning(`Failed to delete intermediate file ${e}: ${t}`),l.succeed(void 0)})))),{concurrency:5}),yield*c(e,{intermediateFiles:[]}))}),p=({jobId:t,flow:n,storageId:r,clientId:o,inputs:s})=>l.gen(function*(){console.log(`[FlowServer] executeFlowInBackground started for job: ${t}`),yield*c(t,{status:`running`}),console.log(`[FlowServer] Creating flowWithEvents for job: ${t}`);let e=Y(n,i,a);console.log(`[FlowServer] Running flow for job: ${t}`);let l=yield*e.run({inputs:s,storageId:r,jobId:t,clientId:o});return console.log(`[FlowServer] Flow completed for job: ${t}, result type: ${l.type}`),l.type===`paused`?yield*c(t,{status:`paused`,pausedAt:l.nodeId,executionState:l.executionState,updatedAt:new Date}):(yield*c(t,{status:`completed`,updatedAt:new Date,endedAt:new Date}),yield*u(t,o)),l}).pipe(l.catchAll(n=>l.gen(function*(){yield*l.logError(`Flow execution failed`,n);let r=n instanceof e?n.body:String(n);yield*l.logInfo(`Updating job ${t} to failed status with error: ${r}`),yield*c(t,{status:`failed`,error:r,updatedAt:new Date}).pipe(l.catchAll(e=>l.gen(function*(){return yield*l.logError(`Failed to update job ${t}`,e),l.succeed(void 0)})));let s=yield*a.get(t);throw s&&(yield*i.emit(t,{jobId:t,eventType:g.FlowError,flowId:s.flowId,error:r}).pipe(l.catchAll(e=>l.gen(function*(){return yield*l.logError(`Failed to emit FlowError event for job ${t}`,e),l.succeed(void 0)})))),yield*u(t,o).pipe(l.catchAll(e=>l.gen(function*(){return yield*l.logWarning(`Failed to cleanup intermediate files for job ${t}`,e),l.succeed(void 0)}))),n})));return{getFlow:(e,n)=>l.gen(function*(){return yield*t.getFlow(e,n)}),getFlowData:(e,n)=>l.gen(function*(){return U(yield*t.getFlow(e,n))}),runFlow:({flowId:n,storageId:r,clientId:i,inputs:o})=>l.gen(function*(){let s=yield*G.optional,c=yield*l.try({try:()=>Ye.parse({inputs:o}),catch:t=>e.fromCode(`FLOW_INPUT_VALIDATION_ERROR`,{cause:t})}),u=crypto.randomUUID(),m=new Date,h={id:u,flowId:n,storageId:r,clientId:i,status:`started`,createdAt:m,updatedAt:m,tasks:[]};yield*a.set(u,h);let g=yield*t.getFlow(n,i);console.log(`[FlowServer] About to fork flow execution for job: ${u}`);let _=p({jobId:u,flow:g,storageId:r,clientId:i,inputs:c.inputs}).pipe(l.tapErrorCause(e=>l.logError(`Flow execution failed`,e)));if(d.isSome(s)){console.log(`[FlowServer] Using waitUntil for job: ${u}`);let e=yield*l.runtime(),t=f.runPromise(e)(_);s.value(t)}else console.log(`[FlowServer] Using Effect.forkDaemon for job: ${u}`),yield*l.forkDaemon(_);return console.log(`[FlowServer] Flow execution started for job: ${u}`),h}),getJobStatus:t=>l.gen(function*(){return(yield*a.get(t))||(yield*l.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found`})))}),resumeFlow:({jobId:n,nodeId:r,newData:o,clientId:s})=>l.gen(function*(){let p=yield*G.optional,m=yield*a.get(n);if(!m)return console.error(`Job not found`),yield*l.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${n} not found`}));if(m.status!==`paused`)return console.error(`Job is not paused`),yield*l.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${n} is not paused (status: ${m.status})`}));if(m.pausedAt!==r)return console.error(`Job is not paused at the expected node`),yield*l.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${n} is paused at node ${m.pausedAt}, not ${r}`}));if(!m.executionState)return console.error(`Job has no execution state`),yield*l.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${n} has no execution state`}));let h={...m.tasks.reduce((e,t)=>(t.result!==void 0&&(e[t.nodeId]=t.result),e),{}),[r]:o},_={...m.executionState.inputs,[r]:o};yield*c(n,{status:`running`});let v=yield*t.getFlow(m.flowId,m.clientId),y=l.gen(function*(){let t=Y(v,i,a);if(!m.executionState)return yield*l.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${n} has no execution state`}));let r=yield*t.resume({jobId:n,storageId:m.storageId,nodeResults:h,executionState:{...m.executionState,inputs:_},clientId:m.clientId});return r.type===`paused`?yield*c(n,{status:`paused`,pausedAt:r.nodeId,executionState:r.executionState,updatedAt:new Date}):(yield*c(n,{status:`completed`,pausedAt:void 0,executionState:void 0,updatedAt:new Date,endedAt:new Date}),yield*u(n,s)),r}).pipe(l.catchAll(t=>l.gen(function*(){yield*l.logError(`Flow resume failed`,t);let r=t instanceof e?t.body:String(t);yield*l.logInfo(`Updating job ${n} to failed status with error: ${r}`),yield*c(n,{status:`failed`,error:r,updatedAt:new Date}).pipe(l.catchAll(e=>l.gen(function*(){return yield*l.logError(`Failed to update job ${n}`,e),l.succeed(void 0)})));let o=yield*a.get(n);throw o&&(yield*i.emit(n,{jobId:n,eventType:g.FlowError,flowId:o.flowId,error:r}).pipe(l.catchAll(e=>l.gen(function*(){return yield*l.logError(`Failed to emit FlowError event for job ${n}`,e),l.succeed(void 0)})))),yield*u(n,s).pipe(l.catchAll(e=>l.gen(function*(){return yield*l.logWarning(`Failed to cleanup intermediate files for job ${n}`,e),l.succeed(void 0)}))),t}))).pipe(l.tapErrorCause(e=>l.logError(`Flow resume failed`,e)));if(d.isSome(p)){console.log(`[FlowServer] Using waitUntil for resume job: ${n}`);let e=yield*l.runtime(),t=f.runPromise(e)(y);p.value(t)}else console.log(`[FlowServer] Using Effect.forkDaemon for resume job: ${n}`),yield*l.forkDaemon(y);return(yield*a.get(n))||(yield*l.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${n} not found after update`})))}),pauseFlow:(t,n)=>l.gen(function*(){let r=yield*a.get(t);if(!r)return yield*l.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found`}));if(n!==null&&r.clientId!==n)return yield*l.fail(e.fromCode(`FLOW_NOT_AUTHORIZED`,{cause:`Client ${n} is not authorized to pause job ${t}`}));if(r.status!==`running`)return yield*l.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${t} cannot be paused (current status: ${r.status})`}));let o=r.tasks.find(e=>e.status===`running`)?.nodeId;return yield*c(t,{status:`paused`,pausedAt:o,updatedAt:new Date}),yield*i.emit(t,{jobId:t,flowId:r.flowId,eventType:g.FlowPause,pausedAt:o}),(yield*a.get(t))||(yield*l.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found after pause`})))}),cancelFlow:(t,n)=>l.gen(function*(){let r=yield*a.get(t);return r?n!==null&&r.clientId!==n?yield*l.fail(e.fromCode(`FLOW_NOT_AUTHORIZED`,{cause:`Client ${n} is not authorized to cancel job ${t}`})):r.status!==`running`&&r.status!==`paused`&&r.status!==`started`?yield*l.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${t} cannot be cancelled (current status: ${r.status})`})):(yield*c(t,{status:`cancelled`,updatedAt:new Date,endedAt:new Date}),yield*i.emit(t,{jobId:t,flowId:r.flowId,eventType:g.FlowCancel}),yield*u(t,n),(yield*a.get(t))||(yield*l.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found after cancellation`})))):yield*l.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found`}))}),subscribeToFlowEvents:(e,t)=>l.gen(function*(){yield*i.subscribe(e,t)}),unsubscribeFromFlowEvents:e=>l.gen(function*(){yield*i.unsubscribe(e)})}})}const _e=u.effect(q,X());function ve({id:e,name:n,description:r,outputTypeId:i,keepOutput:a,transform:s}){return l.gen(function*(){let c=yield*o;return yield*w({id:e,name:n,description:r,type:C.process,outputTypeId:i,keepOutput:a,inputSchema:t,outputSchema:t,run:({data:t,storageId:n,flowId:r,jobId:i,clientId:a})=>l.gen(function*(){let o={flowId:r,nodeId:e,jobId:i},l=yield*s(yield*c.read(t.id,a),t),u=l instanceof Uint8Array?l:l.bytes,d=l instanceof Uint8Array?void 0:l.type,f=l instanceof Uint8Array?void 0:l.fileName,p=new ReadableStream({start(e){e.enqueue(u),e.close()}}),{type:m,fileName:h,metadata:g,metadataJson:_}=A(t.metadata),v=yield*c.upload({storageId:n,size:u.byteLength,type:d??m,fileName:f??h,lastModified:0,metadata:_,flow:o},a,p),y=g?{...g,...d&&{mimeType:d,type:d,"content-type":d},...f&&{fileName:f,originalName:f,name:f,extension:f.split(`.`).pop()||g.extension}}:v.metadata;return E(y?{...v,metadata:y}:v)})})})}var ye=class extends c.Tag(`CredentialProvider`)(){},be=class extends c.Tag(`DocumentAiPlugin`)(){},xe=class extends c.Tag(`DocumentPlugin`)(){},Se=class extends c.Tag(`ImageAiPlugin`)(){},Ce=class extends c.Tag(`ImagePlugin`)(){};const we=m.object({serviceType:m.enum([`replicate`]).optional()}),Te=m.object({duration:m.number().nonnegative(),width:m.number().positive(),height:m.number().positive(),codec:m.string(),format:m.string(),bitrate:m.number().nonnegative(),frameRate:m.number().positive(),aspectRatio:m.string(),hasAudio:m.boolean(),audioCodec:m.string().optional(),audioBitrate:m.number().nonnegative().optional(),size:m.number().nonnegative()}),Ee=m.object({timestamp:m.number().nonnegative(),format:m.enum([`png`,`jpeg`]).optional(),quality:m.number().min(1).max(100).optional()}),De=m.object({quality:m.number().min(0).max(100),format:m.enum([`jpeg`,`webp`,`png`,`avif`])}),Oe=m.object({serviceType:m.enum([`replicate`]).optional()}),ke=m.object({width:m.number().positive().optional(),height:m.number().positive().optional(),fit:m.enum([`contain`,`cover`,`fill`])}).refine(e=>e.width||e.height,`Either width or height must be specified for resize`),Ae=m.object({width:m.number().positive().optional(),height:m.number().positive().optional(),aspectRatio:m.enum([`keep`,`ignore`]).optional(),scaling:m.enum([`bicubic`,`bilinear`,`lanczos`]).optional()}).refine(e=>e.width||e.height,`Either width or height must be specified for video resize`),je=m.object({format:m.enum([`mp4`,`webm`,`mov`,`avi`]),codec:m.enum([`h264`,`h265`,`vp9`,`av1`]).optional(),videoBitrate:m.string().optional(),audioBitrate:m.string().optional(),audioCodec:m.enum([`aac`,`mp3`,`opus`,`vorbis`]).optional()}),Z=m.object({type:m.literal(`resize`),width:m.number().positive().optional(),height:m.number().positive().optional(),fit:m.enum([`contain`,`cover`,`fill`])}),Q=m.object({type:m.literal(`blur`),sigma:m.number().min(.3).max(1e3)}),Me=m.object({type:m.literal(`rotate`),angle:m.number(),background:m.string().optional()}),Ne=m.object({type:m.literal(`flip`),direction:m.enum([`horizontal`,`vertical`])}),Pe=m.object({type:m.literal(`grayscale`)}),Fe=m.object({type:m.literal(`sepia`)}),Ie=m.object({type:m.literal(`brightness`),value:m.number().min(-100).max(100)}),Le=m.object({type:m.literal(`contrast`),value:m.number().min(-100).max(100)}),$=m.object({type:m.literal(`sharpen`),sigma:m.number().positive().optional()}),Re=m.object({type:m.literal(`watermark`),imagePath:m.string().min(1).url(),position:m.enum([`top-left`,`top-right`,`bottom-left`,`bottom-right`,`center`]),opacity:m.number().min(0).max(1),offsetX:m.number().optional(),offsetY:m.number().optional()}),ze=m.object({type:m.literal(`logo`),imagePath:m.string().min(1).url(),position:m.enum([`top-left`,`top-right`,`bottom-left`,`bottom-right`,`center`]),scale:m.number().min(.1).max(2),offsetX:m.number().optional(),offsetY:m.number().optional()}),Be=m.object({type:m.literal(`text`),text:m.string().min(1),position:m.enum([`top-left`,`top-right`,`bottom-left`,`bottom-right`,`center`]),fontSize:m.number().positive(),color:m.string().min(1),fontFamily:m.string().optional(),offsetX:m.number().optional(),offsetY:m.number().optional()}),Ve=m.discriminatedUnion(`type`,[Z,Q,Me,Ne,Pe,Fe,Ie,Le,$,Re,ze,Be]),He=m.object({transformations:m.array(Ve).min(1)}),Ue=m.object({startTime:m.number().nonnegative(),endTime:m.number().positive().optional(),duration:m.number().positive().optional()}).refine(e=>!e.endTime||!e.duration,`Cannot specify both endTime and duration`).refine(e=>!e.endTime||e.endTime>e.startTime,`endTime must be greater than startTime`);var We=class extends c.Tag(`VideoPlugin`)(){},Ge=class extends c.Tag(`VirusScanPlugin`)(){},Ke=class extends c.Tag(`ZipPlugin`)(){};const qe=(e,t)=>{if(e.length===0)return t;let[n,...r]=e;return r.reduce((e,t)=>m.union([e,t]),n)};function Je(t){return l.gen(function*(){let n=Object.entries(t.nodes),r=e=>l.isEffect(e)?e:l.succeed(e),i=yield*l.forEach(n,([t,n])=>l.flatMap(r(n),n=>n.id===t?l.succeed([t,n]):l.fail(e.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Node key ${t} does not match node id ${n.id}`)})))),a=Object.fromEntries(i),o=i.map(([,e])=>e),s=i.filter(([,e])=>e.type===C.input).map(([,e])=>e.inputSchema),c=t.edges.map(e=>({source:a[e.source]?.id??e.source,target:a[e.target]?.id??e.target,sourcePort:e.sourcePort,targetPort:e.targetPort})),u=new Set(i.map(([e])=>a[e]?.id).filter(e=>e&&!c.some(t=>t.source===e))),d=i.filter(([,e])=>u.has(e.id)).map(([,e])=>e.outputSchema),f=t.inputSchema??qe(s,m.unknown()),p=t.outputSchema??qe(d,m.unknown());return yield*W({flowId:t.flowId,name:t.name,nodes:o,edges:c,inputSchema:f,outputSchema:p,typeChecker:t.typeChecker,onEvent:t.onEvent,parallelExecution:t.parallelExecution,hooks:t.hooks})})}const Ye=m.object({inputs:m.record(m.string(),m.any())});export{V as $,Se as A,W as B,ke as C,y as Ct,Te as D,Ee as E,K as F,ue as G,B as H,q as I,pe as J,ce as K,G as L,be as M,ye as N,we as O,ve as P,ae as Q,X as R,Ae as S,v as St,De as T,h as Tt,H as U,U as V,le as W,fe as X,se as Y,oe as Z,Be as _,T as _t,We as a,N as at,Re as b,S as bt,Ie as c,L as ct,Pe as d,re as dt,he as et,ze as f,A as ft,$ as g,w as gt,Fe as h,C as ht,Ge as i,P as it,xe as j,Ce as k,Le as l,ie as lt,Me as m,D as mt,Je as n,z as nt,Ue as o,I as ot,Z as p,E as pt,de as q,Ke as r,F as rt,Q as s,R as st,Ye as t,me as tt,Ne as u,j as ut,He as v,b as vt,Oe as w,g as wt,je as x,_ as xt,Ve as y,x as yt,_e as z};
|
|
2
|
-
//# sourceMappingURL=flow-CAywogte.mjs.map
|