@uploadista/core 0.0.13 → 0.0.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (97) hide show
  1. package/dist/{checksum-CtOagryS.mjs → checksum-BaO9w1gC.mjs} +2 -2
  2. package/dist/{checksum-CtOagryS.mjs.map → checksum-BaO9w1gC.mjs.map} +1 -1
  3. package/dist/{checksum-jmKtZ9W8.cjs → checksum-DXCv7Avr.cjs} +1 -1
  4. package/dist/errors/index.cjs +1 -1
  5. package/dist/errors/index.d.cts +1 -1
  6. package/dist/errors/index.d.mts +1 -1
  7. package/dist/errors/index.mjs +1 -1
  8. package/dist/flow/index.cjs +1 -1
  9. package/dist/flow/index.d.cts +5 -5
  10. package/dist/flow/index.d.mts +5 -5
  11. package/dist/flow/index.mjs +1 -1
  12. package/dist/flow-DhuIQwjv.mjs +2 -0
  13. package/dist/flow-DhuIQwjv.mjs.map +1 -0
  14. package/dist/flow-s_AlC4r5.cjs +1 -0
  15. package/dist/{index-Bi9YYid8.d.mts → index-3jSHmGwH.d.mts} +2 -2
  16. package/dist/{index-Bi9YYid8.d.mts.map → index-3jSHmGwH.d.mts.map} +1 -1
  17. package/dist/{index-4VDJDcWM.d.cts → index-5K4oXy67.d.cts} +822 -169
  18. package/dist/index-5K4oXy67.d.cts.map +1 -0
  19. package/dist/{index-RgOX4psL.d.mts → index-BB1v4Ynz.d.mts} +822 -169
  20. package/dist/index-BB1v4Ynz.d.mts.map +1 -0
  21. package/dist/{index-Cbf1OPLp.d.mts → index-Bu5i-gcV.d.mts} +2 -2
  22. package/dist/index-Bu5i-gcV.d.mts.map +1 -0
  23. package/dist/{index-De4wQJwR.d.cts → index-CHGBYDtr.d.cts} +2 -2
  24. package/dist/{index-De4wQJwR.d.cts.map → index-CHGBYDtr.d.cts.map} +1 -1
  25. package/dist/{index-qZ90PVNl.d.cts → index-T6MZvUlM.d.cts} +2 -2
  26. package/dist/{index-Cbf1OPLp.d.mts.map → index-T6MZvUlM.d.cts.map} +1 -1
  27. package/dist/index.cjs +1 -1
  28. package/dist/index.d.cts +5 -5
  29. package/dist/index.d.mts +5 -5
  30. package/dist/index.mjs +1 -1
  31. package/dist/{stream-limiter-D9rrsvAT.cjs → stream-limiter-BcTJAjs-.cjs} +1 -1
  32. package/dist/{stream-limiter-D9KSAaoY.mjs → stream-limiter-D1-sVS5i.mjs} +2 -2
  33. package/dist/{stream-limiter-D9KSAaoY.mjs.map → stream-limiter-D1-sVS5i.mjs.map} +1 -1
  34. package/dist/streams/index.cjs +1 -1
  35. package/dist/streams/index.d.cts +2 -2
  36. package/dist/streams/index.d.mts +2 -2
  37. package/dist/streams/index.mjs +1 -1
  38. package/dist/testing/index.cjs +1 -1
  39. package/dist/testing/index.d.cts +4 -4
  40. package/dist/testing/index.d.mts +4 -4
  41. package/dist/testing/index.mjs +1 -1
  42. package/dist/types/index.cjs +1 -1
  43. package/dist/types/index.d.cts +4 -4
  44. package/dist/types/index.d.mts +4 -4
  45. package/dist/types/index.mjs +1 -1
  46. package/dist/types-B-EckCWW.cjs +1 -0
  47. package/dist/types-CO-R4pFG.mjs +2 -0
  48. package/dist/types-CO-R4pFG.mjs.map +1 -0
  49. package/dist/upload/index.cjs +1 -1
  50. package/dist/upload/index.d.cts +4 -4
  51. package/dist/upload/index.d.mts +4 -4
  52. package/dist/upload/index.mjs +1 -1
  53. package/dist/{upload-D-eiOIVG.cjs → upload-BwXGQQ26.cjs} +1 -1
  54. package/dist/upload-C_Ew1NMF.mjs +2 -0
  55. package/dist/{upload-Yj5lrtZo.mjs.map → upload-C_Ew1NMF.mjs.map} +1 -1
  56. package/dist/{uploadista-error-B-n8Kfyh.cjs → uploadista-error-Blmj3lpk.cjs} +5 -1
  57. package/dist/{uploadista-error-DUWw6OqS.d.mts → uploadista-error-Cpn3uBLO.d.mts} +2 -2
  58. package/dist/uploadista-error-Cpn3uBLO.d.mts.map +1 -0
  59. package/dist/{uploadista-error-BQLhNZcY.d.cts → uploadista-error-DgdQnozn.d.cts} +2 -2
  60. package/dist/uploadista-error-DgdQnozn.d.cts.map +1 -0
  61. package/dist/{uploadista-error-Buscq-FR.mjs → uploadista-error-DhNBioWq.mjs} +5 -1
  62. package/dist/uploadista-error-DhNBioWq.mjs.map +1 -0
  63. package/dist/utils/index.cjs +1 -1
  64. package/dist/utils/index.d.cts +2 -2
  65. package/dist/utils/index.d.mts +2 -2
  66. package/dist/utils/index.mjs +1 -1
  67. package/dist/{utils-BWiu6lqv.mjs → utils-7gziergl.mjs} +2 -2
  68. package/dist/{utils-BWiu6lqv.mjs.map → utils-7gziergl.mjs.map} +1 -1
  69. package/dist/{utils-_StwBtxT.cjs → utils-C_STf6Wl.cjs} +1 -1
  70. package/package.json +3 -3
  71. package/src/errors/uploadista-error.ts +21 -1
  72. package/src/flow/event.ts +28 -4
  73. package/src/flow/flow-server.ts +43 -12
  74. package/src/flow/flow.ts +92 -13
  75. package/src/flow/index.ts +7 -0
  76. package/src/flow/node-types/index.ts +85 -0
  77. package/src/flow/node.ts +48 -6
  78. package/src/flow/nodes/input-node.ts +2 -0
  79. package/src/flow/nodes/storage-node.ts +2 -0
  80. package/src/flow/type-guards.ts +293 -0
  81. package/src/flow/type-registry.ts +345 -0
  82. package/src/flow/types/flow-job.ts +22 -6
  83. package/src/flow/types/flow-types.ts +152 -3
  84. package/tests/flow/type-system.test.ts +799 -0
  85. package/dist/flow-ChADffZ5.cjs +0 -1
  86. package/dist/flow-_J9-Dm_m.mjs +0 -2
  87. package/dist/flow-_J9-Dm_m.mjs.map +0 -1
  88. package/dist/index-4VDJDcWM.d.cts.map +0 -1
  89. package/dist/index-RgOX4psL.d.mts.map +0 -1
  90. package/dist/index-qZ90PVNl.d.cts.map +0 -1
  91. package/dist/types-BI_KmpTc.mjs +0 -2
  92. package/dist/types-BI_KmpTc.mjs.map +0 -1
  93. package/dist/types-f08UsX4E.cjs +0 -1
  94. package/dist/upload-Yj5lrtZo.mjs +0 -2
  95. package/dist/uploadista-error-BQLhNZcY.d.cts.map +0 -1
  96. package/dist/uploadista-error-Buscq-FR.mjs.map +0 -1
  97. package/dist/uploadista-error-DUWw6OqS.d.mts.map +0 -1
package/src/flow/flow.ts CHANGED
@@ -20,7 +20,12 @@ import type { FlowEdge } from "./edge";
20
20
  import { EventType } from "./event";
21
21
  import { getNodeData } from "./node";
22
22
  import { ParallelScheduler } from "./parallel-scheduler";
23
- import type { FlowConfig, FlowNode, FlowNodeData } from "./types/flow-types";
23
+ import type {
24
+ FlowConfig,
25
+ FlowNode,
26
+ FlowNodeData,
27
+ TypedOutput,
28
+ } from "./types/flow-types";
24
29
  import { FlowTypeValidator } from "./types/type-validator";
25
30
 
26
31
  /**
@@ -87,7 +92,11 @@ export const getFlowData = <TRequirements>(
87
92
  * ```
88
93
  */
89
94
  export type FlowExecutionResult<TOutput> =
90
- | { type: "completed"; result: TOutput }
95
+ | {
96
+ type: "completed";
97
+ result: TOutput;
98
+ outputs?: TypedOutput[]; // Typed outputs from all output nodes with registered types
99
+ }
91
100
  | {
92
101
  type: "paused";
93
102
  nodeId: string;
@@ -458,6 +467,33 @@ export function createFlowWithSchema<
458
467
  return flowOutputs as Record<string, z.infer<TFlowInputSchema>>;
459
468
  };
460
469
 
470
+ // Collect typed outputs from output nodes with metadata
471
+ const collectTypedOutputs = (
472
+ nodeResults: Map<string, unknown>,
473
+ nodeTypesMap: Map<string, string>,
474
+ ): TypedOutput[] => {
475
+ const outputNodes = nodes.filter((node: any) => node.type === "output");
476
+ const typedOutputs: TypedOutput[] = [];
477
+
478
+ outputNodes.forEach((node: any) => {
479
+ const result = nodeResults.get(node.id);
480
+ if (result !== undefined) {
481
+ // Get the nodeType from the nodeTypes map
482
+ const nodeTypeId = nodeTypesMap.get(node.id);
483
+
484
+ // Create TypedOutput with metadata
485
+ typedOutputs.push({
486
+ nodeId: node.id,
487
+ nodeType: nodeTypeId,
488
+ data: result,
489
+ timestamp: new Date().toISOString(),
490
+ });
491
+ }
492
+ });
493
+
494
+ return typedOutputs;
495
+ };
496
+
461
497
  // Execute a single node using Effect
462
498
  const executeNode = (
463
499
  nodeId: string,
@@ -468,7 +504,13 @@ export function createFlowWithSchema<
468
504
  jobId: string,
469
505
  clientId: string | null,
470
506
  ): Effect.Effect<
471
- { nodeId: string; result: unknown; success: boolean; waiting: boolean },
507
+ {
508
+ nodeId: string;
509
+ result: unknown;
510
+ success: boolean;
511
+ waiting: boolean;
512
+ nodeType?: string;
513
+ },
472
514
  UploadistaError
473
515
  > => {
474
516
  return Effect.gen(function* () {
@@ -615,6 +657,7 @@ export function createFlowWithSchema<
615
657
  result,
616
658
  success: true,
617
659
  waiting: true,
660
+ nodeType: executionResult.nodeType,
618
661
  };
619
662
  }
620
663
 
@@ -633,7 +676,13 @@ export function createFlowWithSchema<
633
676
  });
634
677
  }
635
678
 
636
- return { nodeId, result, success: true, waiting: false };
679
+ return {
680
+ nodeId,
681
+ result,
682
+ success: true,
683
+ waiting: false,
684
+ nodeType: executionResult.nodeType,
685
+ };
637
686
  } catch (error) {
638
687
  // Store the error
639
688
  lastError =
@@ -712,6 +761,7 @@ export function createFlowWithSchema<
712
761
  | {
713
762
  type: "completed";
714
763
  result: Record<string, z.infer<TFlowOutputSchema>>;
764
+ outputs?: TypedOutput[];
715
765
  }
716
766
  | {
717
767
  type: "paused";
@@ -752,13 +802,22 @@ export function createFlowWithSchema<
752
802
  executionOrder = topologicalSort();
753
803
  nodeResults = new Map<string, unknown>();
754
804
  startIndex = 0;
805
+ }
755
806
 
756
- // Check for cycles
757
- if (executionOrder.length !== nodes.length) {
758
- return yield* UploadistaError.fromCode(
759
- "FLOW_CYCLE_ERROR",
760
- ).toEffect();
761
- }
807
+ // Track nodeTypes for typed outputs
808
+ const nodeTypes = new Map<string, string>();
809
+
810
+ // If resuming, restore any nodeTypes from previous execution
811
+ if (resumeFrom) {
812
+ // nodeTypes would need to be restored from job state if implementing pause/resume
813
+ // For now, fresh starts only track types going forward
814
+ }
815
+
816
+ // Check for cycles
817
+ if (executionOrder.length !== nodes.length) {
818
+ return yield* UploadistaError.fromCode(
819
+ "FLOW_CYCLE_ERROR",
820
+ ).toEffect();
762
821
  }
763
822
 
764
823
  // Create node map for quick lookup
@@ -849,6 +908,9 @@ export function createFlowWithSchema<
849
908
  // Node is waiting - pause execution and return state
850
909
  if (nodeResult.result !== undefined) {
851
910
  nodeResults.set(nodeId, nodeResult.result);
911
+ if (nodeResult.nodeType) {
912
+ nodeTypes.set(nodeId, nodeResult.nodeType);
913
+ }
852
914
  }
853
915
 
854
916
  return {
@@ -864,6 +926,9 @@ export function createFlowWithSchema<
864
926
 
865
927
  if (nodeResult.success) {
866
928
  nodeResults.set(nodeId, nodeResult.result);
929
+ if (nodeResult.nodeType) {
930
+ nodeTypes.set(nodeId, nodeResult.nodeType);
931
+ }
867
932
  }
868
933
  }
869
934
  }
@@ -910,6 +975,9 @@ export function createFlowWithSchema<
910
975
  // Node is waiting - pause execution and return state
911
976
  if (nodeResult.result !== undefined) {
912
977
  nodeResults.set(nodeResult.nodeId, nodeResult.result);
978
+ if (nodeResult.nodeType) {
979
+ nodeTypes.set(nodeResult.nodeId, nodeResult.nodeType);
980
+ }
913
981
  }
914
982
 
915
983
  return {
@@ -925,12 +993,16 @@ export function createFlowWithSchema<
925
993
 
926
994
  if (nodeResult.success) {
927
995
  nodeResults.set(nodeResult.nodeId, nodeResult.result);
996
+ if (nodeResult.nodeType) {
997
+ nodeTypes.set(nodeResult.nodeId, nodeResult.nodeType);
998
+ }
928
999
  }
929
1000
  }
930
1001
  }
931
1002
 
932
1003
  // All nodes completed - collect outputs
933
1004
  const finalResult = collectFlowOutputs(nodeResults);
1005
+ const typedOutputs = collectTypedOutputs(nodeResults, nodeTypes);
934
1006
 
935
1007
  const finalResultSchema = z.record(z.string(), outputSchema);
936
1008
 
@@ -961,17 +1033,22 @@ export function createFlowWithSchema<
961
1033
  }
962
1034
  const validatedResult = parseResult.data;
963
1035
 
964
- // Emit FlowEnd event
1036
+ // Emit FlowEnd event with typed outputs
965
1037
  if (onEvent) {
966
1038
  yield* onEvent({
967
1039
  jobId,
968
1040
  eventType: EventType.FlowEnd,
969
1041
  flowId,
970
- result: validatedResult,
1042
+ outputs: typedOutputs,
1043
+ result: validatedResult, // Keep for backward compatibility
971
1044
  });
972
1045
  }
973
1046
 
974
- return { type: "completed" as const, result: validatedResult };
1047
+ return {
1048
+ type: "completed" as const,
1049
+ result: validatedResult,
1050
+ outputs: typedOutputs,
1051
+ };
975
1052
  });
976
1053
  };
977
1054
 
@@ -989,6 +1066,7 @@ export function createFlowWithSchema<
989
1066
  | {
990
1067
  type: "completed";
991
1068
  result: Record<string, z.infer<TFlowOutputSchema>>;
1069
+ outputs?: TypedOutput[];
992
1070
  }
993
1071
  | {
994
1072
  type: "paused";
@@ -1025,6 +1103,7 @@ export function createFlowWithSchema<
1025
1103
  | {
1026
1104
  type: "completed";
1027
1105
  result: Record<string, z.infer<TFlowOutputSchema>>;
1106
+ outputs?: TypedOutput[];
1028
1107
  }
1029
1108
  | {
1030
1109
  type: "paused";
package/src/flow/index.ts CHANGED
@@ -4,6 +4,13 @@ export * from "./edge";
4
4
 
5
5
  export * from "./event";
6
6
  export type { Flow, FlowData } from "./flow";
7
+ // Type registry
8
+ export * from "./type-registry";
9
+ // Built-in node types (auto-registers on import)
10
+ import "./node-types";
11
+ export * from "./node-types";
12
+ // Type guards
13
+ export * from "./type-guards";
7
14
  export * from "./flow";
8
15
  // Core flow engine
9
16
  export { createFlowWithSchema } from "./flow";
@@ -0,0 +1,85 @@
1
+ /**
2
+ * Built-in node type registrations for the flow engine.
3
+ *
4
+ * This module automatically registers the standard input and output node types
5
+ * when imported. These types enable type-safe result consumption in clients.
6
+ *
7
+ * @module flow/node-types
8
+ *
9
+ * @remarks
10
+ * This module should be imported by the flow engine initialization to ensure
11
+ * built-in types are registered before any flows are created.
12
+ *
13
+ * @example
14
+ * ```typescript
15
+ * // Types are automatically registered on import
16
+ * import "@uploadista/core/flow/node-types";
17
+ * import { flowTypeRegistry } from "@uploadista/core/flow";
18
+ *
19
+ * // Check registered types
20
+ * const inputTypes = flowTypeRegistry.listByCategory("input");
21
+ * console.log(inputTypes.map(t => t.id)); // ["streaming-input-v1"]
22
+ * ```
23
+ */
24
+
25
+ import { uploadFileSchema } from "../../types/upload-file";
26
+ import { flowTypeRegistry } from "../type-registry";
27
+
28
+ /**
29
+ * Type ID constants for built-in node types.
30
+ *
31
+ * Use these constants when creating nodes with type information to ensure
32
+ * consistency and avoid typos.
33
+ *
34
+ * @example
35
+ * ```typescript
36
+ * import { STREAMING_INPUT_TYPE_ID } from "@uploadista/core/flow/node-types";
37
+ *
38
+ * const inputNode = createFlowNode({
39
+ * // ... other config
40
+ * nodeTypeId: STREAMING_INPUT_TYPE_ID
41
+ * });
42
+ * ```
43
+ */
44
+ export const STREAMING_INPUT_TYPE_ID = "streaming-input-v1";
45
+ export const STORAGE_OUTPUT_TYPE_ID = "storage-output-v1";
46
+
47
+ /**
48
+ * Register streaming input node type.
49
+ *
50
+ * This is the standard input type for flows that accept chunked file uploads.
51
+ * It produces UploadFile objects as output after processing chunks.
52
+ */
53
+ flowTypeRegistry.register({
54
+ id: STREAMING_INPUT_TYPE_ID,
55
+ category: "input",
56
+ schema: uploadFileSchema,
57
+ version: "1.0.0",
58
+ description: "Streaming file input node that accepts chunked uploads",
59
+ });
60
+
61
+ /**
62
+ * Register storage output node type.
63
+ *
64
+ * This is the standard output type for flows that save files to storage backends
65
+ * (S3, Azure, GCS, etc.). It produces UploadFile objects with final storage URLs.
66
+ */
67
+ flowTypeRegistry.register({
68
+ id: STORAGE_OUTPUT_TYPE_ID,
69
+ category: "output",
70
+ schema: uploadFileSchema,
71
+ version: "1.0.0",
72
+ description: "Storage output node that saves files to configured storage backend",
73
+ });
74
+
75
+ /**
76
+ * Future type registrations can be added here.
77
+ *
78
+ * Examples:
79
+ * - description-output-v1: AI-powered image description output
80
+ * - webhook-output-v1: HTTP webhook notification output
81
+ * - metadata-output-v1: File metadata extraction output
82
+ */
83
+
84
+ // Export the registry for convenience
85
+ export { flowTypeRegistry } from "../type-registry";
package/src/flow/node.ts CHANGED
@@ -6,6 +6,7 @@ import type {
6
6
  FlowNodeData,
7
7
  NodeExecutionResult,
8
8
  } from "./types/flow-types";
9
+ import { flowTypeRegistry } from "./type-registry";
9
10
 
10
11
  /**
11
12
  * Defines the type of node in a flow, determining its role in the processing pipeline.
@@ -81,6 +82,7 @@ export type ConditionValue = string | number;
81
82
  * @param config.retry.maxRetries - Maximum number of retry attempts (default: 0)
82
83
  * @param config.retry.retryDelay - Base delay in milliseconds between retries (default: 1000)
83
84
  * @param config.retry.exponentialBackoff - Whether to use exponential backoff for retries (default: true)
85
+ * @param config.nodeTypeId - Optional type ID from the registry (e.g., "storage-output-v1"). If provided, the node type must be registered and its category must match the node type (input/output).
84
86
  *
85
87
  * @returns An Effect that succeeds with the created FlowNode
86
88
  *
@@ -127,6 +129,7 @@ export function createFlowNode<Input, Output, TType extends NodeType = NodeType>
127
129
  multiOutput = false,
128
130
  pausable = false,
129
131
  retry,
132
+ nodeTypeId,
130
133
  }: {
131
134
  id: string;
132
135
  name: string;
@@ -154,8 +157,35 @@ export function createFlowNode<Input, Output, TType extends NodeType = NodeType>
154
157
  retryDelay?: number;
155
158
  exponentialBackoff?: boolean;
156
159
  };
157
- }): Effect.Effect<FlowNode<Input, Output, UploadistaError> & { type: TType }> {
158
- return Effect.succeed({
160
+ nodeTypeId?: string;
161
+ }): Effect.Effect<FlowNode<Input, Output, UploadistaError> & { type: TType }, UploadistaError> {
162
+ return Effect.gen(function* () {
163
+ // Validate type registration if nodeTypeId provided
164
+ if (nodeTypeId) {
165
+ const typeDef = flowTypeRegistry.get(nodeTypeId);
166
+ if (!typeDef) {
167
+ return yield* UploadistaError.fromCode("INVALID_NODE_TYPE", {
168
+ body: `Node type "${nodeTypeId}" is not registered`,
169
+ details: { nodeTypeId, nodeId: id },
170
+ }).toEffect();
171
+ }
172
+
173
+ // Validate category matches for input/output nodes
174
+ if (type === NodeType.input && typeDef.category !== "input") {
175
+ return yield* UploadistaError.fromCode("TYPE_CATEGORY_MISMATCH", {
176
+ body: `Node type "${nodeTypeId}" is registered as "${typeDef.category}" but node "${id}" is type "${type}"`,
177
+ details: { nodeTypeId, nodeId: id, expectedCategory: "input", actualCategory: typeDef.category },
178
+ }).toEffect();
179
+ }
180
+ if (type === NodeType.output && typeDef.category !== "output") {
181
+ return yield* UploadistaError.fromCode("TYPE_CATEGORY_MISMATCH", {
182
+ body: `Node type "${nodeTypeId}" is registered as "${typeDef.category}" but node "${id}" is type "${type}"`,
183
+ details: { nodeTypeId, nodeId: id, expectedCategory: "output", actualCategory: typeDef.category },
184
+ }).toEffect();
185
+ }
186
+ }
187
+
188
+ return {
159
189
  id,
160
190
  name,
161
191
  description,
@@ -199,9 +229,14 @@ export function createFlowNode<Input, Output, TType extends NodeType = NodeType>
199
229
  clientId,
200
230
  });
201
231
 
202
- // If the node returned waiting state, pass it through
232
+ // If the node returned waiting state, add type information and pass through
203
233
  if (result.type === "waiting") {
204
- return result;
234
+ return {
235
+ type: "waiting" as const,
236
+ partialData: result.partialData,
237
+ nodeType: nodeTypeId,
238
+ nodeId: id,
239
+ };
205
240
  }
206
241
 
207
242
  // Validate output data against schema for completed results
@@ -217,13 +252,20 @@ export function createFlowNode<Input, Output, TType extends NodeType = NodeType>
217
252
  },
218
253
  });
219
254
 
220
- return { type: "complete" as const, data: validatedResult };
255
+ // Return with type information
256
+ return {
257
+ type: "complete" as const,
258
+ data: validatedResult,
259
+ nodeType: nodeTypeId,
260
+ nodeId: id,
261
+ };
221
262
  }),
222
263
  condition,
223
264
  multiInput,
224
265
  multiOutput,
225
266
  retry,
226
- } as FlowNode<Input, Output, UploadistaError> & { type: TType });
267
+ } as FlowNode<Input, Output, UploadistaError> & { type: TType };
268
+ });
227
269
  }
228
270
 
229
271
  /**
@@ -6,6 +6,7 @@ import { uploadFileSchema } from "../../types";
6
6
  import { UploadServer } from "../../upload";
7
7
  import { arrayBuffer, fetchFile } from "../../upload/upload-url";
8
8
  import { createFlowNode, NodeType } from "../node";
9
+ import { STREAMING_INPUT_TYPE_ID } from "../node-types";
9
10
  import { completeNodeExecution, waitingNodeExecution } from "../types";
10
11
  import { resolveUploadMetadata } from "../utils/resolve-upload-metadata";
11
12
 
@@ -177,6 +178,7 @@ export function createInputNode(id: string, params?: InputNodeParams) {
177
178
  type: NodeType.input,
178
179
  inputSchema: inputDataSchema,
179
180
  outputSchema: uploadFileSchema,
181
+ nodeTypeId: STREAMING_INPUT_TYPE_ID,
180
182
  run: ({ data, flowId, jobId, clientId }) => {
181
183
  return Effect.gen(function* () {
182
184
  switch (data.operation) {
@@ -4,6 +4,7 @@ import { UploadistaError } from "../../errors";
4
4
  import { type UploadFile, uploadFileSchema } from "../../types";
5
5
  import { UploadServer } from "../../upload";
6
6
  import { createFlowNode, NodeType } from "../node";
7
+ import { STORAGE_OUTPUT_TYPE_ID } from "../node-types";
7
8
  import { completeNodeExecution } from "../types";
8
9
  import { resolveUploadMetadata } from "../utils/resolve-upload-metadata";
9
10
 
@@ -61,6 +62,7 @@ export function createStorageNode(
61
62
  type: NodeType.output,
62
63
  inputSchema: uploadFileSchema,
63
64
  outputSchema: uploadFileSchema,
65
+ nodeTypeId: STORAGE_OUTPUT_TYPE_ID,
64
66
  run: ({ data: file, storageId, flowId, jobId, clientId }) => {
65
67
  return Effect.gen(function* () {
66
68
  const { type, fileName, metadata, metadataJson } =