@uploadista/flow-utility-nodes 0.0.20-beta.6 → 0.0.20-beta.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -11,8 +11,6 @@ Utility nodes enable complex flow logic without custom code:
11
11
  - **Multiplex Node**: Split single input across multiple outputs
12
12
  - **Zip Node**: Archive multiple files together
13
13
 
14
- Perfect for building sophisticated upload workflows.
15
-
16
14
  ## Installation
17
15
 
18
16
  ```bash
@@ -24,266 +22,164 @@ pnpm add @uploadista/flow-utility-nodes
24
22
  ## Quick Start
25
23
 
26
24
  ```typescript
27
- import { conditionalNode, mergeNode, multiplexNode, zipNode } from "@uploadista/flow-utility-nodes";
28
- import { Effect } from "effect";
29
-
30
- // Route based on file properties
31
- const flow = {
32
- nodes: [
33
- { id: "input", type: "input" },
34
- {
35
- id: "router",
36
- type: "conditional",
37
- params: {
38
- field: "mimeType",
39
- operator: "contains",
40
- value: "image",
41
- },
42
- },
43
- { id: "output", type: "output" },
44
- ],
45
- edges: [
46
- { from: "input", to: "router" },
47
- { from: "router", to: "output" },
48
- ],
49
- };
25
+ import {
26
+ createConditionalNode,
27
+ createMergeNode,
28
+ createMultiplexNode,
29
+ createZipNode,
30
+ } from "@uploadista/flow-utility-nodes";
50
31
  ```
51
32
 
52
- ## Features
53
-
54
- - ✅ **Conditional Routing**: Route based on file properties
55
- - ✅ **Data Merging**: Combine multiple streams
56
- - ✅ **Multiplexing**: Split to multiple outputs
57
- - ✅ **Type Safe**: Full TypeScript support
58
- - ✅ **No Custom Code**: Visual flow building
59
-
60
33
  ## Node Types
61
34
 
62
35
  ### Conditional Node
63
36
 
64
- Route inputs based on file properties.
37
+ Routes inputs based on file properties.
65
38
 
66
- **Parameters**:
67
39
  ```typescript
68
- {
69
- field: "mimeType" | "size" | "width" | "height" | "extension",
70
- operator: "equals" | "notEquals" | "greaterThan" | "lessThan" | "contains" | "startsWith",
71
- value: string | number
72
- }
73
- ```
40
+ import { createConditionalNode } from "@uploadista/flow-utility-nodes";
74
41
 
75
- **Example**: Route images to resize, documents to compress
76
- ```typescript
77
- {
78
- type: "conditional",
79
- params: {
80
- field: "mimeType",
81
- operator: "contains",
82
- value: "image",
83
- },
84
- }
42
+ // Route images > 1MB to compression
43
+ const sizeRouter = createConditionalNode("size-router", {
44
+ field: "size",
45
+ operator: "greaterThan",
46
+ value: 1024 * 1024, // 1MB
47
+ });
48
+
49
+ // Route by MIME type
50
+ const mimeRouter = createConditionalNode("mime-router", {
51
+ field: "mimeType",
52
+ operator: "contains",
53
+ value: "image",
54
+ });
85
55
  ```
86
56
 
57
+ #### Parameters
58
+
59
+ | Parameter | Type | Required | Description |
60
+ |-----------|------|----------|-------------|
61
+ | `field` | `"mimeType" \| "size" \| "width" \| "height" \| "extension"` | Yes | File property to evaluate |
62
+ | `operator` | `"equals" \| "notEquals" \| "greaterThan" \| "lessThan" \| "contains" \| "startsWith"` | Yes | Comparison operator |
63
+ | `value` | `string \| number` | Yes | Value to compare against |
64
+
87
65
  ### Merge Node
88
66
 
89
67
  Combine multiple inputs into batch.
90
68
 
91
- **Parameters**:
92
69
  ```typescript
93
- {
94
- strategy: "concat" | "batch",
95
- separator?: string,
96
- inputCount: 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10
97
- }
98
- ```
70
+ import { createMergeNode } from "@uploadista/flow-utility-nodes";
99
71
 
100
- **Example**: Batch 5 uploads before processing
101
- ```typescript
102
- {
103
- type: "merge",
104
- params: {
105
- strategy: "batch",
106
- inputCount: 5,
107
- },
108
- }
72
+ // Concatenate 3 files into one
73
+ const mergeNode = createMergeNode("file-merger", {
74
+ strategy: "concat",
75
+ inputCount: 3,
76
+ });
77
+
78
+ // Batch 5 uploads before processing
79
+ const batchNode = createMergeNode("batch-collector", {
80
+ strategy: "batch",
81
+ inputCount: 5,
82
+ separator: "\n",
83
+ });
109
84
  ```
110
85
 
86
+ #### Parameters
87
+
88
+ | Parameter | Type | Required | Default | Description |
89
+ |-----------|------|----------|---------|-------------|
90
+ | `strategy` | `"concat" \| "batch"` | No | `"batch"` | Merge strategy |
91
+ | `inputCount` | `number` (2-10) | No | `2` | Number of inputs to wait for |
92
+ | `separator` | `string` | No | `"\n"` | Separator for concat strategy |
93
+
111
94
  ### Multiplex Node
112
95
 
113
96
  Split input to multiple independent paths.
114
97
 
115
- **Parameters**:
116
98
  ```typescript
117
- {
118
- outputCount: 2 | 3 | 4 | 5
119
- }
120
- ```
99
+ import { createMultiplexNode } from "@uploadista/flow-utility-nodes";
121
100
 
122
- **Example**: Send to S3 and archive simultaneously
123
- ```typescript
124
- {
125
- type: "multiplex",
126
- params: {
127
- outputCount: 2,
128
- },
129
- }
101
+ // Send to 3 different destinations
102
+ const multiplexNode = createMultiplexNode("multi-output", {
103
+ outputCount: 3,
104
+ strategy: "copy",
105
+ });
106
+
107
+ // Duplicate to 2 storage backends
108
+ const backupNode = createMultiplexNode("backup-splitter", {
109
+ outputCount: 2,
110
+ strategy: "copy",
111
+ });
130
112
  ```
131
113
 
132
- ### Zip Node
114
+ #### Parameters
133
115
 
134
- Archive multiple files (see `@uploadista/flow-utility-zipjs`).
116
+ | Parameter | Type | Required | Default | Description |
117
+ |-----------|------|----------|---------|-------------|
118
+ | `outputCount` | `number` (1-10) | Yes | - | Number of output copies |
119
+ | `strategy` | `"copy" \| "split"` | No | `"copy"` | `copy` duplicates the file, `split` divides it |
135
120
 
136
- ## Use Cases
121
+ ### Zip Node
137
122
 
138
- ### Case 1: Smart Routing
123
+ Archive multiple files into a ZIP.
139
124
 
140
- ```
141
- Input Conditional
142
- ├─ Image → Resize
143
- ├─ PDF → Compress
144
- └─ Document → Archive
145
- → Output
146
- ```
125
+ ```typescript
126
+ import { createZipNode } from "@uploadista/flow-utility-nodes";
147
127
 
148
- ### Case 2: Batch Processing
128
+ // Archive multiple files with metadata
129
+ const zipNode = createZipNode("archiver", {
130
+ zipName: "backup.zip",
131
+ includeMetadata: true,
132
+ inputCount: 5,
133
+ });
149
134
 
150
- ```
151
- Input 1
152
- Input 2 ├─ Merge (batch 3) → Process → Output
153
- Input 3 ┘
135
+ // Simple archive
136
+ const simpleZip = createZipNode("simple-archive", {
137
+ zipName: "files.zip",
138
+ });
154
139
  ```
155
140
 
156
- ### Case 3: Multi-Destination
141
+ #### Parameters
157
142
 
158
- ```
159
- Input → Multiplex ├─ Store to S3
160
- ├─ Archive to GCS
161
- └─ Notify Webhook
162
- ```
143
+ | Parameter | Type | Required | Default | Description |
144
+ |-----------|------|----------|---------|-------------|
145
+ | `zipName` | `string` | No | `"archive.zip"` | Output ZIP filename |
146
+ | `includeMetadata` | `boolean` | No | `false` | Include file metadata in archive |
147
+ | `inputCount` | `number` (2-10) | No | `2` | Number of files to archive |
163
148
 
164
- ## API Reference
149
+ ## Use Cases
165
150
 
166
- All nodes exported from main entry point.
151
+ ### Case 1: Smart Routing
167
152
 
168
- ```typescript
169
- import {
170
- conditionalNode,
171
- mergeNode,
172
- multiplexNode,
173
- zipNode,
174
- } from "@uploadista/flow-utility-nodes";
175
153
  ```
176
-
177
- ## Examples
178
-
179
- ### Example 1: Image/Document Routing
180
-
181
- ```typescript
182
- const flow = {
183
- nodes: [
184
- { id: "input", type: "input" },
185
- {
186
- id: "router",
187
- type: "conditional",
188
- params: {
189
- field: "mimeType",
190
- operator: "contains",
191
- value: "image",
192
- },
193
- },
194
- { id: "resize", type: "resize", params: { width: 800 } },
195
- { id: "s3", type: "s3", params: { bucket: "images" } },
196
- { id: "pdf-store", type: "s3", params: { bucket: "documents" } },
197
- { id: "output", type: "output" },
198
- ],
199
- edges: [
200
- { from: "input", to: "router" },
201
- { from: "router", true: "resize", false: "pdf-store" },
202
- { from: "resize", to: "s3" },
203
- { from: "s3", to: "output" },
204
- { from: "pdf-store", to: "output" },
205
- ],
206
- };
154
+ Input -> Conditional
155
+ |-- Image -> Resize
156
+ |-- PDF -> Compress
157
+ +-- Document -> Archive
158
+ -> Output
207
159
  ```
208
160
 
209
- ### Example 2: Batch Processing
161
+ ### Case 2: Batch Processing
210
162
 
211
- ```typescript
212
- const batchFlow = {
213
- nodes: [
214
- { id: "input1", type: "input" },
215
- { id: "input2", type: "input" },
216
- { id: "input3", type: "input" },
217
- {
218
- id: "merge",
219
- type: "merge",
220
- params: { strategy: "batch", inputCount: 3 },
221
- },
222
- { id: "process", type: "custom", params: {} },
223
- { id: "output", type: "output" },
224
- ],
225
- edges: [
226
- { from: "input1", to: "merge" },
227
- { from: "input2", to: "merge" },
228
- { from: "input3", to: "merge" },
229
- { from: "merge", to: "process" },
230
- { from: "process", to: "output" },
231
- ],
232
- };
233
163
  ```
234
-
235
- ### Example 3: Multi-Path Distribution
236
-
237
- ```typescript
238
- const multiPath = {
239
- nodes: [
240
- { id: "input", type: "input" },
241
- { id: "split", type: "multiplex", params: { outputCount: 3 } },
242
- { id: "s3", type: "s3", params: { bucket: "primary" } },
243
- { id: "gcs", type: "gcs", params: { bucket: "backup" } },
244
- { id: "archive", type: "zip", params: {} },
245
- { id: "output", type: "output" },
246
- ],
247
- edges: [
248
- { from: "input", to: "split" },
249
- { from: "split", index: 0, to: "s3" },
250
- { from: "split", index: 1, to: "gcs" },
251
- { from: "split", index: 2, to: "archive" },
252
- { from: "s3", to: "output" },
253
- { from: "gcs", to: "output" },
254
- { from: "archive", to: "output" },
255
- ],
256
- };
164
+ Input 1 -+
165
+ Input 2 -+-- Merge (batch 3) -> Process -> Output
166
+ Input 3 -+
257
167
  ```
258
168
 
259
- ## Configuration
260
-
261
- Nodes configured via `params` object in flow definition:
169
+ ### Case 3: Multi-Destination
262
170
 
263
- ```typescript
264
- {
265
- id: "node-id",
266
- type: "conditional",
267
- params: {
268
- field: "mimeType",
269
- operator: "contains",
270
- value: "image",
271
- },
272
- }
171
+ ```
172
+ Input -> Multiplex -+-- Store to S3
173
+ +-- Archive to GCS
174
+ +-- Notify Webhook
273
175
  ```
274
176
 
275
177
  ## Related Packages
276
178
 
277
179
  - [@uploadista/core](../../core) - Core flow types
278
- - [@uploadista/flow-utility-zipjs](../zipjs) - Archive node
279
- - [@uploadista/flow-images-nodes](../images/nodes) - Image utilities
180
+ - [@uploadista/flow-image-nodes](../images/nodes) - Image utilities
280
181
  - [@uploadista/server](../../servers/server) - Upload server
281
182
 
282
183
  ## License
283
184
 
284
185
  See [LICENSE](../../../LICENSE) in the main repository.
285
-
286
- ## See Also
287
-
288
- - [FLOW_NODES.md](../FLOW_NODES.md) - Complete node gallery
289
- - [Server Setup Guide](../../../SERVER_SETUP.md) - Flow integration
@@ -1 +1 @@
1
- let e=require(`@uploadista/core/flow`),t=require(`@uploadista/core/types`),n=require(`effect`),r=require(`@uploadista/core/errors`),i=require(`@uploadista/core/upload`),a=require(`zod`);function o(r,{field:i,operator:a,value:o}){return(0,e.createFlowNode)({id:r,name:`Conditional Router`,description:`Routes flow based on ${i} ${a} ${o}`,type:e.NodeType.conditional,nodeTypeId:`conditional`,inputSchema:t.uploadFileSchema,outputSchema:t.uploadFileSchema,condition:{field:i,operator:a,value:o},run:({data:t})=>n.Effect.succeed((0,e.completeNodeExecution)(t))})}const s=a.z.record(a.z.string(),t.uploadFileSchema),c=t.uploadFileSchema;function l(t,{strategy:a,separator:o}){return n.Effect.gen(function*(){let o=yield*i.UploadServer;return yield*(0,e.createFlowNode)({id:t,name:`Merge Files`,description:`Merges multiple files using ${a} strategy`,type:e.NodeType.merge,nodeTypeId:`merge`,outputTypeId:e.STORAGE_OUTPUT_TYPE_ID,inputSchema:s,outputSchema:c,multiInput:!0,run:({data:t,storageId:i,clientId:s})=>n.Effect.gen(function*(){if(!t||Object.keys(t).length===0)return yield*n.Effect.fail(r.UploadistaError.fromCode(`VALIDATION_ERROR`,{body:`No inputs provided to merge node`}));let c=Object.values(t);if(c.length===0)return yield*n.Effect.fail(r.UploadistaError.fromCode(`VALIDATION_ERROR`,{body:`No files to merge`}));switch(a){case`concat`:{let t=[],n=0;for(let e of c){let r=yield*o.read(e.id,s);t.push(r),n+=r.byteLength}let r=new Uint8Array(n),a=0;for(let e of t)r.set(e,a),a+=e.byteLength;let l=new ReadableStream({start(e){e.enqueue(r),e.close()}});return(0,e.completeNodeExecution)(yield*o.upload({storageId:i,size:r.byteLength,type:`application/octet-stream`,fileName:`merged_${c.length}_files.bin`,lastModified:0,metadata:JSON.stringify({mimeType:`application/octet-stream`,originalName:`merged_${c.length}_files`,extension:`bin`})},s,l))}default:return yield*n.Effect.fail(r.UploadistaError.fromCode(`VALIDATION_ERROR`,{body:`Unknown merge strategy: ${a}`}))}})})})}function u(a,{outputCount:o,strategy:s}){return n.Effect.gen(function*(){let o=yield*i.UploadServer;return yield*(0,e.createFlowNode)({id:a,name:`Multiplex`,description:`Multiplexes input using ${s} strategy`,type:e.NodeType.multiplex,nodeTypeId:`multiplex`,outputTypeId:e.STORAGE_OUTPUT_TYPE_ID,inputSchema:t.uploadFileSchema,outputSchema:t.uploadFileSchema,multiOutput:!0,run:({data:t,storageId:i,clientId:a})=>n.Effect.gen(function*(){let{type:c,fileName:l,metadata:u,metadataJson:d}=(0,e.resolveUploadMetadata)(t.metadata),f=u?{...t,metadata:u}:t;if(s===`copy`){let t=yield*o.read(f.id,a),n=new ReadableStream({start(e){e.enqueue(t),e.close()}}),r=yield*o.upload({storageId:i,size:t.byteLength,type:c,fileName:l,lastModified:0,metadata:d},a,n),s=(0,e.resolveUploadMetadata)(r.metadata);return(0,e.completeNodeExecution)(s.metadata?{...r,metadata:s.metadata}:r)}else if(s===`split`)return yield*n.Effect.fail(r.UploadistaError.fromCode(`VALIDATION_ERROR`,{body:`Split strategy is not supported with UploadFile pattern`}));return yield*n.Effect.fail(r.UploadistaError.fromCode(`VALIDATION_ERROR`,{body:`Unknown multiplex strategy: ${s}`}))})})})}const d=a.z.record(a.z.string(),t.uploadFileSchema),f=t.uploadFileSchema;function p(t,{zipName:a,includeMetadata:o}){return n.Effect.gen(function*(){let s=yield*i.UploadServer,c=yield*e.ZipPlugin;return yield*(0,e.createFlowNode)({id:t,name:`Zip Files`,description:`Combines multiple files into a zip archive`,type:e.NodeType.process,nodeTypeId:`zip`,outputTypeId:e.STORAGE_OUTPUT_TYPE_ID,inputSchema:d,outputSchema:f,multiInput:!0,run:({data:t,storageId:i,clientId:l})=>n.Effect.gen(function*(){if(!t||Object.keys(t).length===0)return yield*n.Effect.fail(r.UploadistaError.fromCode(`VALIDATION_ERROR`,{body:`No inputs provided to zip node`}));let u=yield*n.Effect.forEach(Object.values(t),e=>n.Effect.gen(function*(){let t=yield*s.read(e.id,l);return{id:e.id,data:t,metadata:e.metadata}}),{concurrency:`unbounded`}),d=yield*c.zip(u,{zipName:a,includeMetadata:o}),f=new ReadableStream({start(e){e.enqueue(d),e.close()}});return(0,e.completeNodeExecution)(yield*s.upload({storageId:i,size:d.byteLength,type:`application/zip`,fileName:a,lastModified:0,metadata:JSON.stringify({mimeType:`application/zip`,type:`application/zip`,originalName:a,fileName:a,extension:`zip`})},l,f))})})})}exports.createConditionalNode=o,exports.createMergeNode=l,exports.createMultiplexNode=u,exports.createZipNode=p;
1
+ let e=require(`@uploadista/core/flow`),t=require(`@uploadista/core/types`),n=require(`effect`),r=require(`@uploadista/core/errors`),i=require(`@uploadista/core/upload`),a=require(`zod`);function o(r,{field:i,operator:a,value:o}){return(0,e.createFlowNode)({id:r,name:`Conditional Router`,description:`Routes flow based on ${i} ${a} ${o}`,type:e.NodeType.conditional,nodeTypeId:`conditional`,inputSchema:t.uploadFileSchema,outputSchema:t.uploadFileSchema,condition:{field:i,operator:a,value:o},run:({data:t})=>n.Effect.succeed((0,e.completeNodeExecution)(t))})}const s=a.z.record(a.z.string(),t.uploadFileSchema),c=t.uploadFileSchema;function l(t,{strategy:a,separator:o}){return n.Effect.gen(function*(){let o=yield*i.UploadEngine;return yield*(0,e.createFlowNode)({id:t,name:`Merge Files`,description:`Merges multiple files using ${a} strategy`,type:e.NodeType.merge,nodeTypeId:`merge`,outputTypeId:e.STORAGE_OUTPUT_TYPE_ID,inputSchema:s,outputSchema:c,multiInput:!0,run:({data:t,storageId:i,clientId:s})=>n.Effect.gen(function*(){if(!t||Object.keys(t).length===0)return yield*n.Effect.fail(r.UploadistaError.fromCode(`VALIDATION_ERROR`,{body:`No inputs provided to merge node`}));let c=Object.values(t);if(c.length===0)return yield*n.Effect.fail(r.UploadistaError.fromCode(`VALIDATION_ERROR`,{body:`No files to merge`}));switch(a){case`concat`:{let t=[],n=0;for(let e of c){let r=yield*o.read(e.id,s);t.push(r),n+=r.byteLength}let r=new Uint8Array(n),a=0;for(let e of t)r.set(e,a),a+=e.byteLength;let l=new ReadableStream({start(e){e.enqueue(r),e.close()}});return(0,e.completeNodeExecution)(yield*o.upload({storageId:i,size:r.byteLength,type:`application/octet-stream`,fileName:`merged_${c.length}_files.bin`,lastModified:0,metadata:JSON.stringify({mimeType:`application/octet-stream`,originalName:`merged_${c.length}_files`,extension:`bin`})},s,l))}default:return yield*n.Effect.fail(r.UploadistaError.fromCode(`VALIDATION_ERROR`,{body:`Unknown merge strategy: ${a}`}))}})})})}function u(a,{outputCount:o,strategy:s}){return n.Effect.gen(function*(){let o=yield*i.UploadEngine;return yield*(0,e.createFlowNode)({id:a,name:`Multiplex`,description:`Multiplexes input using ${s} strategy`,type:e.NodeType.multiplex,nodeTypeId:`multiplex`,outputTypeId:e.STORAGE_OUTPUT_TYPE_ID,inputSchema:t.uploadFileSchema,outputSchema:t.uploadFileSchema,multiOutput:!0,run:({data:t,storageId:i,clientId:a})=>n.Effect.gen(function*(){let{type:c,fileName:l,metadata:u,metadataJson:d}=(0,e.resolveUploadMetadata)(t.metadata),f=u?{...t,metadata:u}:t;if(s===`copy`){let t=yield*o.read(f.id,a),n=new ReadableStream({start(e){e.enqueue(t),e.close()}}),r=yield*o.upload({storageId:i,size:t.byteLength,type:c,fileName:l,lastModified:0,metadata:d},a,n),s=(0,e.resolveUploadMetadata)(r.metadata);return(0,e.completeNodeExecution)(s.metadata?{...r,metadata:s.metadata}:r)}else if(s===`split`)return yield*n.Effect.fail(r.UploadistaError.fromCode(`VALIDATION_ERROR`,{body:`Split strategy is not supported with UploadFile pattern`}));return yield*n.Effect.fail(r.UploadistaError.fromCode(`VALIDATION_ERROR`,{body:`Unknown multiplex strategy: ${s}`}))})})})}const d=a.z.record(a.z.string(),t.uploadFileSchema),f=t.uploadFileSchema;function p(t,{zipName:a,includeMetadata:o}){return n.Effect.gen(function*(){let s=yield*i.UploadEngine,c=yield*e.ZipPlugin;return yield*(0,e.createFlowNode)({id:t,name:`Zip Files`,description:`Combines multiple files into a zip archive`,type:e.NodeType.process,nodeTypeId:`zip`,outputTypeId:e.STORAGE_OUTPUT_TYPE_ID,inputSchema:d,outputSchema:f,multiInput:!0,run:({data:t,storageId:i,clientId:l})=>n.Effect.gen(function*(){if(!t||Object.keys(t).length===0)return yield*n.Effect.fail(r.UploadistaError.fromCode(`VALIDATION_ERROR`,{body:`No inputs provided to zip node`}));let u=yield*n.Effect.forEach(Object.values(t),e=>n.Effect.gen(function*(){let t=yield*s.read(e.id,l);return{id:e.id,data:t,metadata:e.metadata}}),{concurrency:`unbounded`}),d=yield*c.zip(u,{zipName:a,includeMetadata:o}),f=new ReadableStream({start(e){e.enqueue(d),e.close()}});return(0,e.completeNodeExecution)(yield*s.upload({storageId:i,size:d.byteLength,type:`application/zip`,fileName:a,lastModified:0,metadata:JSON.stringify({mimeType:`application/zip`,type:`application/zip`,originalName:a,fileName:a,extension:`zip`})},l,f))})})})}exports.createConditionalNode=o,exports.createMergeNode=l,exports.createMultiplexNode=u,exports.createZipNode=p;
@@ -8,7 +8,7 @@ import * as _uploadista_core_errors0 from "@uploadista/core/errors";
8
8
  import { UploadistaError } from "@uploadista/core/errors";
9
9
  import { UploadFile } from "@uploadista/core/types";
10
10
  import { Effect } from "effect";
11
- import { UploadServer } from "@uploadista/core/upload";
11
+ import { UploadEngine } from "@uploadista/core/upload";
12
12
 
13
13
  //#region src/nodes/conditional-node.d.ts
14
14
  declare function createConditionalNode(id: string, {
@@ -75,7 +75,7 @@ declare function createMergeNode(id: string, {
75
75
  circuitBreaker?: _uploadista_core_flow2.FlowCircuitBreakerConfig;
76
76
  } & {
77
77
  type: NodeType;
78
- }, UploadistaError, UploadServer>;
78
+ }, UploadistaError, UploadEngine>;
79
79
  //#endregion
80
80
  //#region src/nodes/multiplex-node.d.ts
81
81
  declare function createMultiplexNode(id: string, {
@@ -108,7 +108,7 @@ declare function createMultiplexNode(id: string, {
108
108
  circuitBreaker?: _uploadista_core_flow2.FlowCircuitBreakerConfig;
109
109
  } & {
110
110
  type: NodeType;
111
- }, UploadistaError, UploadServer>;
111
+ }, UploadistaError, UploadEngine>;
112
112
  //#endregion
113
113
  //#region src/nodes/zip-node.d.ts
114
114
  declare function createZipNode(id: string, {
@@ -141,7 +141,7 @@ declare function createZipNode(id: string, {
141
141
  circuitBreaker?: _uploadista_core_flow2.FlowCircuitBreakerConfig;
142
142
  } & {
143
143
  type: NodeType;
144
- }, UploadistaError, UploadServer | ZipPlugin>;
144
+ }, UploadistaError, UploadEngine | ZipPlugin>;
145
145
  //#endregion
146
146
  export { createConditionalNode, createMergeNode, createMultiplexNode, createZipNode };
147
147
  //# sourceMappingURL=index.d.cts.map
@@ -5,7 +5,7 @@ import { UploadFile } from "@uploadista/core/types";
5
5
  import { Effect } from "effect";
6
6
  import * as _uploadista_core_errors0 from "@uploadista/core/errors";
7
7
  import { UploadistaError } from "@uploadista/core/errors";
8
- import { UploadServer } from "@uploadista/core/upload";
8
+ import { UploadEngine } from "@uploadista/core/upload";
9
9
  import * as zod0 from "zod";
10
10
  import { z } from "zod";
11
11
  import * as zod_v4_core0 from "zod/v4/core";
@@ -75,7 +75,7 @@ declare function createMergeNode(id: string, {
75
75
  circuitBreaker?: _uploadista_core_flow2.FlowCircuitBreakerConfig;
76
76
  } & {
77
77
  type: NodeType;
78
- }, UploadistaError, UploadServer>;
78
+ }, UploadistaError, UploadEngine>;
79
79
  //#endregion
80
80
  //#region src/nodes/multiplex-node.d.ts
81
81
  declare function createMultiplexNode(id: string, {
@@ -108,7 +108,7 @@ declare function createMultiplexNode(id: string, {
108
108
  circuitBreaker?: _uploadista_core_flow2.FlowCircuitBreakerConfig;
109
109
  } & {
110
110
  type: NodeType;
111
- }, UploadistaError, UploadServer>;
111
+ }, UploadistaError, UploadEngine>;
112
112
  //#endregion
113
113
  //#region src/nodes/zip-node.d.ts
114
114
  declare function createZipNode(id: string, {
@@ -141,7 +141,7 @@ declare function createZipNode(id: string, {
141
141
  circuitBreaker?: _uploadista_core_flow2.FlowCircuitBreakerConfig;
142
142
  } & {
143
143
  type: NodeType;
144
- }, UploadistaError, UploadServer | ZipPlugin>;
144
+ }, UploadistaError, UploadEngine | ZipPlugin>;
145
145
  //#endregion
146
146
  export { createConditionalNode, createMergeNode, createMultiplexNode, createZipNode };
147
147
  //# sourceMappingURL=index.d.mts.map
@@ -1,2 +1,2 @@
1
- import{NodeType as e,STORAGE_OUTPUT_TYPE_ID as t,ZipPlugin as n,completeNodeExecution as r,createFlowNode as i,resolveUploadMetadata as a}from"@uploadista/core/flow";import{uploadFileSchema as o}from"@uploadista/core/types";import{Effect as s}from"effect";import{UploadistaError as c}from"@uploadista/core/errors";import{UploadServer as l}from"@uploadista/core/upload";import{z as u}from"zod";function d(t,{field:n,operator:a,value:c}){return i({id:t,name:`Conditional Router`,description:`Routes flow based on ${n} ${a} ${c}`,type:e.conditional,nodeTypeId:`conditional`,inputSchema:o,outputSchema:o,condition:{field:n,operator:a,value:c},run:({data:e})=>s.succeed(r(e))})}const f=u.record(u.string(),o),p=o;function m(n,{strategy:a,separator:o}){return s.gen(function*(){let o=yield*l;return yield*i({id:n,name:`Merge Files`,description:`Merges multiple files using ${a} strategy`,type:e.merge,nodeTypeId:`merge`,outputTypeId:t,inputSchema:f,outputSchema:p,multiInput:!0,run:({data:e,storageId:t,clientId:n})=>s.gen(function*(){if(!e||Object.keys(e).length===0)return yield*s.fail(c.fromCode(`VALIDATION_ERROR`,{body:`No inputs provided to merge node`}));let i=Object.values(e);if(i.length===0)return yield*s.fail(c.fromCode(`VALIDATION_ERROR`,{body:`No files to merge`}));switch(a){case`concat`:{let e=[],a=0;for(let t of i){let r=yield*o.read(t.id,n);e.push(r),a+=r.byteLength}let s=new Uint8Array(a),c=0;for(let t of e)s.set(t,c),c+=t.byteLength;let l=new ReadableStream({start(e){e.enqueue(s),e.close()}});return r(yield*o.upload({storageId:t,size:s.byteLength,type:`application/octet-stream`,fileName:`merged_${i.length}_files.bin`,lastModified:0,metadata:JSON.stringify({mimeType:`application/octet-stream`,originalName:`merged_${i.length}_files`,extension:`bin`})},n,l))}default:return yield*s.fail(c.fromCode(`VALIDATION_ERROR`,{body:`Unknown merge strategy: ${a}`}))}})})})}function h(n,{outputCount:u,strategy:d}){return s.gen(function*(){let u=yield*l;return yield*i({id:n,name:`Multiplex`,description:`Multiplexes input using ${d} strategy`,type:e.multiplex,nodeTypeId:`multiplex`,outputTypeId:t,inputSchema:o,outputSchema:o,multiOutput:!0,run:({data:e,storageId:t,clientId:n})=>s.gen(function*(){let{type:i,fileName:o,metadata:l,metadataJson:f}=a(e.metadata),p=l?{...e,metadata:l}:e;if(d===`copy`){let e=yield*u.read(p.id,n),s=new ReadableStream({start(t){t.enqueue(e),t.close()}}),c=yield*u.upload({storageId:t,size:e.byteLength,type:i,fileName:o,lastModified:0,metadata:f},n,s),l=a(c.metadata);return r(l.metadata?{...c,metadata:l.metadata}:c)}else if(d===`split`)return yield*s.fail(c.fromCode(`VALIDATION_ERROR`,{body:`Split strategy is not supported with UploadFile pattern`}));return yield*s.fail(c.fromCode(`VALIDATION_ERROR`,{body:`Unknown multiplex strategy: ${d}`}))})})})}const g=u.record(u.string(),o),_=o;function v(a,{zipName:o,includeMetadata:u}){return s.gen(function*(){let d=yield*l,f=yield*n;return yield*i({id:a,name:`Zip Files`,description:`Combines multiple files into a zip archive`,type:e.process,nodeTypeId:`zip`,outputTypeId:t,inputSchema:g,outputSchema:_,multiInput:!0,run:({data:e,storageId:t,clientId:n})=>s.gen(function*(){if(!e||Object.keys(e).length===0)return yield*s.fail(c.fromCode(`VALIDATION_ERROR`,{body:`No inputs provided to zip node`}));let i=yield*s.forEach(Object.values(e),e=>s.gen(function*(){let t=yield*d.read(e.id,n);return{id:e.id,data:t,metadata:e.metadata}}),{concurrency:`unbounded`}),a=yield*f.zip(i,{zipName:o,includeMetadata:u}),l=new ReadableStream({start(e){e.enqueue(a),e.close()}});return r(yield*d.upload({storageId:t,size:a.byteLength,type:`application/zip`,fileName:o,lastModified:0,metadata:JSON.stringify({mimeType:`application/zip`,type:`application/zip`,originalName:o,fileName:o,extension:`zip`})},n,l))})})})}export{d as createConditionalNode,m as createMergeNode,h as createMultiplexNode,v as createZipNode};
1
+ import{NodeType as e,STORAGE_OUTPUT_TYPE_ID as t,ZipPlugin as n,completeNodeExecution as r,createFlowNode as i,resolveUploadMetadata as a}from"@uploadista/core/flow";import{uploadFileSchema as o}from"@uploadista/core/types";import{Effect as s}from"effect";import{UploadistaError as c}from"@uploadista/core/errors";import{UploadEngine as l}from"@uploadista/core/upload";import{z as u}from"zod";function d(t,{field:n,operator:a,value:c}){return i({id:t,name:`Conditional Router`,description:`Routes flow based on ${n} ${a} ${c}`,type:e.conditional,nodeTypeId:`conditional`,inputSchema:o,outputSchema:o,condition:{field:n,operator:a,value:c},run:({data:e})=>s.succeed(r(e))})}const f=u.record(u.string(),o),p=o;function m(n,{strategy:a,separator:o}){return s.gen(function*(){let o=yield*l;return yield*i({id:n,name:`Merge Files`,description:`Merges multiple files using ${a} strategy`,type:e.merge,nodeTypeId:`merge`,outputTypeId:t,inputSchema:f,outputSchema:p,multiInput:!0,run:({data:e,storageId:t,clientId:n})=>s.gen(function*(){if(!e||Object.keys(e).length===0)return yield*s.fail(c.fromCode(`VALIDATION_ERROR`,{body:`No inputs provided to merge node`}));let i=Object.values(e);if(i.length===0)return yield*s.fail(c.fromCode(`VALIDATION_ERROR`,{body:`No files to merge`}));switch(a){case`concat`:{let e=[],a=0;for(let t of i){let r=yield*o.read(t.id,n);e.push(r),a+=r.byteLength}let s=new Uint8Array(a),c=0;for(let t of e)s.set(t,c),c+=t.byteLength;let l=new ReadableStream({start(e){e.enqueue(s),e.close()}});return r(yield*o.upload({storageId:t,size:s.byteLength,type:`application/octet-stream`,fileName:`merged_${i.length}_files.bin`,lastModified:0,metadata:JSON.stringify({mimeType:`application/octet-stream`,originalName:`merged_${i.length}_files`,extension:`bin`})},n,l))}default:return yield*s.fail(c.fromCode(`VALIDATION_ERROR`,{body:`Unknown merge strategy: ${a}`}))}})})})}function h(n,{outputCount:u,strategy:d}){return s.gen(function*(){let u=yield*l;return yield*i({id:n,name:`Multiplex`,description:`Multiplexes input using ${d} strategy`,type:e.multiplex,nodeTypeId:`multiplex`,outputTypeId:t,inputSchema:o,outputSchema:o,multiOutput:!0,run:({data:e,storageId:t,clientId:n})=>s.gen(function*(){let{type:i,fileName:o,metadata:l,metadataJson:f}=a(e.metadata),p=l?{...e,metadata:l}:e;if(d===`copy`){let e=yield*u.read(p.id,n),s=new ReadableStream({start(t){t.enqueue(e),t.close()}}),c=yield*u.upload({storageId:t,size:e.byteLength,type:i,fileName:o,lastModified:0,metadata:f},n,s),l=a(c.metadata);return r(l.metadata?{...c,metadata:l.metadata}:c)}else if(d===`split`)return yield*s.fail(c.fromCode(`VALIDATION_ERROR`,{body:`Split strategy is not supported with UploadFile pattern`}));return yield*s.fail(c.fromCode(`VALIDATION_ERROR`,{body:`Unknown multiplex strategy: ${d}`}))})})})}const g=u.record(u.string(),o),_=o;function v(a,{zipName:o,includeMetadata:u}){return s.gen(function*(){let d=yield*l,f=yield*n;return yield*i({id:a,name:`Zip Files`,description:`Combines multiple files into a zip archive`,type:e.process,nodeTypeId:`zip`,outputTypeId:t,inputSchema:g,outputSchema:_,multiInput:!0,run:({data:e,storageId:t,clientId:n})=>s.gen(function*(){if(!e||Object.keys(e).length===0)return yield*s.fail(c.fromCode(`VALIDATION_ERROR`,{body:`No inputs provided to zip node`}));let i=yield*s.forEach(Object.values(e),e=>s.gen(function*(){let t=yield*d.read(e.id,n);return{id:e.id,data:t,metadata:e.metadata}}),{concurrency:`unbounded`}),a=yield*f.zip(i,{zipName:o,includeMetadata:u}),l=new ReadableStream({start(e){e.enqueue(a),e.close()}});return r(yield*d.upload({storageId:t,size:a.byteLength,type:`application/zip`,fileName:o,lastModified:0,metadata:JSON.stringify({mimeType:`application/zip`,type:`application/zip`,originalName:o,fileName:o,extension:`zip`})},n,l))})})})}export{d as createConditionalNode,m as createMergeNode,h as createMultiplexNode,v as createZipNode};
2
2
  //# sourceMappingURL=index.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.mjs","names":["inputSchema","outputSchema","inputBytesArray: Uint8Array[]"],"sources":["../../src/nodes/conditional-node.ts","../../src/nodes/merge-node.ts","../../src/nodes/multiplex-node.ts","../../src/nodes/zip-node.ts"],"sourcesContent":["import {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { Effect } from \"effect\";\nimport type { ConditionalParams } from \"@/types/conditional-node\";\n\nexport function createConditionalNode(\n id: string,\n { field, operator, value }: ConditionalParams,\n) {\n return createFlowNode<UploadFile, UploadFile>({\n id,\n name: \"Conditional Router\",\n description: `Routes flow based on ${field} ${operator} ${value}`,\n type: NodeType.conditional,\n nodeTypeId: \"conditional\",\n inputSchema: uploadFileSchema,\n outputSchema: uploadFileSchema,\n condition: { field, operator, value },\n run: ({ data }) => {\n // The actual routing logic is handled by the flow engine\n // This node just passes through the data\n return Effect.succeed(completeNodeExecution(data));\n },\n });\n}\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n STORAGE_OUTPUT_TYPE_ID,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { UploadServer } from \"@uploadista/core/upload\";\nimport { Effect } from \"effect\";\nimport { z } from \"zod\";\nimport type { MergeParams } from \"@/types/merge-node\";\n\nconst inputSchema = z.record(z.string(), uploadFileSchema);\nconst outputSchema = uploadFileSchema;\n\nexport function createMergeNode(\n id: string,\n { strategy, separator: _separator }: MergeParams,\n) {\n return Effect.gen(function* () {\n const uploadServer = yield* UploadServer;\n\n return yield* createFlowNode<Record<string, UploadFile>, UploadFile>({\n id,\n name: \"Merge Files\",\n description: `Merges multiple files using ${strategy} strategy`,\n type: NodeType.merge,\n nodeTypeId: \"merge\",\n outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n inputSchema,\n outputSchema,\n multiInput: true,\n run: ({ data: inputs, storageId, clientId }) => {\n return Effect.gen(function* () {\n if (!inputs || Object.keys(inputs).length === 0) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"No inputs provided to merge node\",\n }),\n );\n }\n\n const inputFiles = Object.values(inputs);\n\n if (inputFiles.length === 0) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"No files to merge\",\n }),\n );\n }\n\n switch (strategy) {\n case \"concat\": {\n // Read bytes from all input files\n const inputBytesArray: Uint8Array[] = [];\n let totalSize = 0;\n\n for (const file of inputFiles) {\n const bytes = yield* uploadServer.read(file.id, clientId);\n inputBytesArray.push(bytes);\n totalSize += bytes.byteLength;\n }\n\n // Concatenate all files into one\n const mergedBytes = new Uint8Array(totalSize);\n let offset = 0;\n for (const bytes of inputBytesArray) {\n mergedBytes.set(bytes, offset);\n offset += bytes.byteLength;\n }\n\n // Create a stream from the merged bytes\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(mergedBytes);\n controller.close();\n },\n });\n\n // Upload the merged file\n const result = yield* uploadServer.upload(\n {\n storageId,\n size: mergedBytes.byteLength,\n type: \"application/octet-stream\",\n fileName: `merged_${inputFiles.length}_files.bin`,\n lastModified: 0,\n metadata: JSON.stringify({\n mimeType: \"application/octet-stream\",\n originalName: `merged_${inputFiles.length}_files`,\n extension: \"bin\",\n }),\n },\n clientId,\n stream,\n );\n\n return completeNodeExecution(result);\n }\n default: {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: `Unknown merge strategy: ${strategy}`,\n }),\n );\n }\n }\n });\n },\n });\n });\n}\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n resolveUploadMetadata,\n STORAGE_OUTPUT_TYPE_ID,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { UploadServer } from \"@uploadista/core/upload\";\nimport { Effect } from \"effect\";\nimport type { MultiplexParams } from \"@/types/multiplex-node\";\n\nexport function createMultiplexNode(\n id: string,\n { outputCount: _outputCount, strategy }: MultiplexParams,\n) {\n return Effect.gen(function* () {\n const uploadServer = yield* UploadServer;\n\n return yield* createFlowNode<UploadFile, UploadFile>({\n id,\n name: \"Multiplex\",\n description: `Multiplexes input using ${strategy} strategy`,\n type: NodeType.multiplex,\n nodeTypeId: \"multiplex\",\n outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n inputSchema: uploadFileSchema,\n outputSchema: uploadFileSchema,\n multiOutput: true,\n run: ({ data: file, storageId, clientId }) => {\n return Effect.gen(function* () {\n const { type, fileName, metadata, metadataJson } =\n resolveUploadMetadata(file.metadata);\n const normalizedFile = metadata ? { ...file, metadata } : file;\n\n if (strategy === \"copy\") {\n // For copy strategy, read and re-upload the file\n const inputBytes = yield* uploadServer.read(\n normalizedFile.id,\n clientId,\n );\n\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(inputBytes);\n controller.close();\n },\n });\n\n const result = yield* uploadServer.upload(\n {\n storageId,\n size: inputBytes.byteLength,\n type,\n fileName,\n lastModified: 0,\n metadata: metadataJson,\n },\n clientId,\n stream,\n );\n\n const resolvedResult = resolveUploadMetadata(result.metadata);\n\n return completeNodeExecution(\n resolvedResult.metadata\n ? { ...result, metadata: resolvedResult.metadata }\n : result,\n );\n } else if (strategy === \"split\") {\n // Split strategy is not supported in the new pattern\n // as it would require returning multiple UploadFiles\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"Split strategy is not supported with UploadFile pattern\",\n }),\n );\n }\n\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: `Unknown multiplex strategy: ${strategy}`,\n }),\n );\n });\n },\n });\n });\n}\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n STORAGE_OUTPUT_TYPE_ID,\n ZipPlugin,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { UploadServer } from \"@uploadista/core/upload\";\nimport { Effect } from \"effect\";\nimport { z } from \"zod\";\nimport type { ZipParams } from \"@/types/zip-node\";\n\nconst inputSchema = z.record(z.string(), uploadFileSchema);\nconst outputSchema = uploadFileSchema;\n\nexport function createZipNode(\n id: string,\n { zipName, includeMetadata }: ZipParams,\n) {\n return Effect.gen(function* () {\n const uploadServer = yield* UploadServer;\n const zipPlugin = yield* ZipPlugin;\n return yield* createFlowNode<Record<string, UploadFile>, UploadFile>({\n id,\n name: \"Zip Files\",\n description: \"Combines multiple files into a zip archive\",\n type: NodeType.process,\n nodeTypeId: \"zip\",\n outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n inputSchema,\n outputSchema,\n multiInput: true,\n run: ({ data: inputs, storageId, clientId }) => {\n return Effect.gen(function* () {\n if (!inputs || Object.keys(inputs).length === 0) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"No inputs provided to zip node\",\n }),\n );\n }\n\n const zipInputs = yield* Effect.forEach(\n Object.values(inputs),\n (input) =>\n Effect.gen(function* () {\n const data = yield* uploadServer.read(input.id, clientId);\n return {\n id: input.id,\n data,\n metadata: input.metadata,\n };\n }),\n { concurrency: \"unbounded\" },\n );\n\n const zipBytes = yield* zipPlugin.zip(zipInputs, {\n zipName,\n includeMetadata,\n });\n\n // Create a stream from the zip bytes\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(zipBytes);\n controller.close();\n },\n });\n\n // Upload the zip file\n const result = yield* uploadServer.upload(\n {\n storageId,\n size: zipBytes.byteLength,\n type: \"application/zip\",\n fileName: zipName,\n lastModified: 0,\n metadata: JSON.stringify({\n mimeType: \"application/zip\",\n type: \"application/zip\",\n originalName: zipName,\n fileName: zipName,\n extension: \"zip\",\n }),\n },\n clientId,\n stream,\n );\n\n return completeNodeExecution(result);\n });\n },\n });\n });\n}\n"],"mappings":"yYASA,SAAgB,EACd,EACA,CAAE,QAAO,WAAU,SACnB,CACA,OAAO,EAAuC,CAC5C,KACA,KAAM,qBACN,YAAa,wBAAwB,EAAM,GAAG,EAAS,GAAG,IAC1D,KAAM,EAAS,YACf,WAAY,cACZ,YAAa,EACb,aAAc,EACd,UAAW,CAAE,QAAO,WAAU,QAAO,CACrC,KAAM,CAAE,UAGC,EAAO,QAAQ,EAAsB,EAAK,CAAC,CAErD,CAAC,CCdJ,MAAMA,EAAc,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAiB,CACpDC,EAAe,EAErB,SAAgB,EACd,EACA,CAAE,WAAU,UAAW,GACvB,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EAE5B,OAAO,MAAO,EAAuD,CACnE,KACA,KAAM,cACN,YAAa,+BAA+B,EAAS,WACrD,KAAM,EAAS,MACf,WAAY,QACZ,aAAc,EACd,YAAA,EACA,aAAA,EACA,WAAY,GACZ,KAAM,CAAE,KAAM,EAAQ,YAAW,cACxB,EAAO,IAAI,WAAa,CAC7B,GAAI,CAAC,GAAU,OAAO,KAAK,EAAO,CAAC,SAAW,EAC5C,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,mCACP,CAAC,CACH,CAGH,IAAM,EAAa,OAAO,OAAO,EAAO,CAExC,GAAI,EAAW,SAAW,EACxB,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,oBACP,CAAC,CACH,CAGH,OAAQ,EAAR,CACE,IAAK,SAAU,CAEb,IAAMC,EAAgC,EAAE,CACpC,EAAY,EAEhB,IAAK,IAAM,KAAQ,EAAY,CAC7B,IAAM,EAAQ,MAAO,EAAa,KAAK,EAAK,GAAI,EAAS,CACzD,EAAgB,KAAK,EAAM,CAC3B,GAAa,EAAM,WAIrB,IAAM,EAAc,IAAI,WAAW,EAAU,CACzC,EAAS,EACb,IAAK,IAAM,KAAS,EAClB,EAAY,IAAI,EAAO,EAAO,CAC9B,GAAU,EAAM,WAIlB,IAAM,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAY,CAC/B,EAAW,OAAO,EAErB,CAAC,CAoBF,OAAO,EAjBQ,MAAO,EAAa,OACjC,CACE,YACA,KAAM,EAAY,WAClB,KAAM,2BACN,SAAU,UAAU,EAAW,OAAO,YACtC,aAAc,EACd,SAAU,KAAK,UAAU,CACvB,SAAU,2BACV,aAAc,UAAU,EAAW,OAAO,QAC1C,UAAW,MACZ,CAAC,CACH,CACD,EACA,EACD,CAEmC,CAEtC,QACE,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,2BAA2B,IAClC,CAAC,CACH,GAGL,CAEL,CAAC,EACF,CCnGJ,SAAgB,EACd,EACA,CAAE,YAAa,EAAc,YAC7B,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EAE5B,OAAO,MAAO,EAAuC,CACnD,KACA,KAAM,YACN,YAAa,2BAA2B,EAAS,WACjD,KAAM,EAAS,UACf,WAAY,YACZ,aAAc,EACd,YAAa,EACb,aAAc,EACd,YAAa,GACb,KAAM,CAAE,KAAM,EAAM,YAAW,cACtB,EAAO,IAAI,WAAa,CAC7B,GAAM,CAAE,OAAM,WAAU,WAAU,gBAChC,EAAsB,EAAK,SAAS,CAChC,EAAiB,EAAW,CAAE,GAAG,EAAM,WAAU,CAAG,EAE1D,GAAI,IAAa,OAAQ,CAEvB,IAAM,EAAa,MAAO,EAAa,KACrC,EAAe,GACf,EACD,CAEK,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAW,CAC9B,EAAW,OAAO,EAErB,CAAC,CAEI,EAAS,MAAO,EAAa,OACjC,CACE,YACA,KAAM,EAAW,WACjB,OACA,WACA,aAAc,EACd,SAAU,EACX,CACD,EACA,EACD,CAEK,EAAiB,EAAsB,EAAO,SAAS,CAE7D,OAAO,EACL,EAAe,SACX,CAAE,GAAG,EAAQ,SAAU,EAAe,SAAU,CAChD,EACL,SACQ,IAAa,QAGtB,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,0DACP,CAAC,CACH,CAGH,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,+BAA+B,IACtC,CAAC,CACH,EACD,CAEL,CAAC,EACF,CC1EJ,MAAM,EAAc,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAiB,CACpD,EAAe,EAErB,SAAgB,EACd,EACA,CAAE,UAAS,mBACX,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EACtB,EAAY,MAAO,EACzB,OAAO,MAAO,EAAuD,CACnE,KACA,KAAM,YACN,YAAa,6CACb,KAAM,EAAS,QACf,WAAY,MACZ,aAAc,EACd,cACA,eACA,WAAY,GACZ,KAAM,CAAE,KAAM,EAAQ,YAAW,cACxB,EAAO,IAAI,WAAa,CAC7B,GAAI,CAAC,GAAU,OAAO,KAAK,EAAO,CAAC,SAAW,EAC5C,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,iCACP,CAAC,CACH,CAGH,IAAM,EAAY,MAAO,EAAO,QAC9B,OAAO,OAAO,EAAO,CACpB,GACC,EAAO,IAAI,WAAa,CACtB,IAAM,EAAO,MAAO,EAAa,KAAK,EAAM,GAAI,EAAS,CACzD,MAAO,CACL,GAAI,EAAM,GACV,OACA,SAAU,EAAM,SACjB,EACD,CACJ,CAAE,YAAa,YAAa,CAC7B,CAEK,EAAW,MAAO,EAAU,IAAI,EAAW,CAC/C,UACA,kBACD,CAAC,CAGI,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAS,CAC5B,EAAW,OAAO,EAErB,CAAC,CAsBF,OAAO,EAnBQ,MAAO,EAAa,OACjC,CACE,YACA,KAAM,EAAS,WACf,KAAM,kBACN,SAAU,EACV,aAAc,EACd,SAAU,KAAK,UAAU,CACvB,SAAU,kBACV,KAAM,kBACN,aAAc,EACd,SAAU,EACV,UAAW,MACZ,CAAC,CACH,CACD,EACA,EACD,CAEmC,EACpC,CAEL,CAAC,EACF"}
1
+ {"version":3,"file":"index.mjs","names":["inputSchema","outputSchema","inputBytesArray: Uint8Array[]"],"sources":["../../src/nodes/conditional-node.ts","../../src/nodes/merge-node.ts","../../src/nodes/multiplex-node.ts","../../src/nodes/zip-node.ts"],"sourcesContent":["import {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { Effect } from \"effect\";\nimport type { ConditionalParams } from \"@/types/conditional-node\";\n\nexport function createConditionalNode(\n id: string,\n { field, operator, value }: ConditionalParams,\n) {\n return createFlowNode<UploadFile, UploadFile>({\n id,\n name: \"Conditional Router\",\n description: `Routes flow based on ${field} ${operator} ${value}`,\n type: NodeType.conditional,\n nodeTypeId: \"conditional\",\n inputSchema: uploadFileSchema,\n outputSchema: uploadFileSchema,\n condition: { field, operator, value },\n run: ({ data }) => {\n // The actual routing logic is handled by the flow engine\n // This node just passes through the data\n return Effect.succeed(completeNodeExecution(data));\n },\n });\n}\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n STORAGE_OUTPUT_TYPE_ID,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { UploadEngine } from \"@uploadista/core/upload\";\nimport { Effect } from \"effect\";\nimport { z } from \"zod\";\nimport type { MergeParams } from \"@/types/merge-node\";\n\nconst inputSchema = z.record(z.string(), uploadFileSchema);\nconst outputSchema = uploadFileSchema;\n\nexport function createMergeNode(\n id: string,\n { strategy, separator: _separator }: MergeParams,\n) {\n return Effect.gen(function* () {\n const uploadEngine = yield* UploadEngine;\n\n return yield* createFlowNode<Record<string, UploadFile>, UploadFile>({\n id,\n name: \"Merge Files\",\n description: `Merges multiple files using ${strategy} strategy`,\n type: NodeType.merge,\n nodeTypeId: \"merge\",\n outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n inputSchema,\n outputSchema,\n multiInput: true,\n run: ({ data: inputs, storageId, clientId }) => {\n return Effect.gen(function* () {\n if (!inputs || Object.keys(inputs).length === 0) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"No inputs provided to merge node\",\n }),\n );\n }\n\n const inputFiles = Object.values(inputs);\n\n if (inputFiles.length === 0) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"No files to merge\",\n }),\n );\n }\n\n switch (strategy) {\n case \"concat\": {\n // Read bytes from all input files\n const inputBytesArray: Uint8Array[] = [];\n let totalSize = 0;\n\n for (const file of inputFiles) {\n const bytes = yield* uploadEngine.read(file.id, clientId);\n inputBytesArray.push(bytes);\n totalSize += bytes.byteLength;\n }\n\n // Concatenate all files into one\n const mergedBytes = new Uint8Array(totalSize);\n let offset = 0;\n for (const bytes of inputBytesArray) {\n mergedBytes.set(bytes, offset);\n offset += bytes.byteLength;\n }\n\n // Create a stream from the merged bytes\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(mergedBytes);\n controller.close();\n },\n });\n\n // Upload the merged file\n const result = yield* uploadEngine.upload(\n {\n storageId,\n size: mergedBytes.byteLength,\n type: \"application/octet-stream\",\n fileName: `merged_${inputFiles.length}_files.bin`,\n lastModified: 0,\n metadata: JSON.stringify({\n mimeType: \"application/octet-stream\",\n originalName: `merged_${inputFiles.length}_files`,\n extension: \"bin\",\n }),\n },\n clientId,\n stream,\n );\n\n return completeNodeExecution(result);\n }\n default: {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: `Unknown merge strategy: ${strategy}`,\n }),\n );\n }\n }\n });\n },\n });\n });\n}\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n resolveUploadMetadata,\n STORAGE_OUTPUT_TYPE_ID,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { UploadEngine } from \"@uploadista/core/upload\";\nimport { Effect } from \"effect\";\nimport type { MultiplexParams } from \"@/types/multiplex-node\";\n\nexport function createMultiplexNode(\n id: string,\n { outputCount: _outputCount, strategy }: MultiplexParams,\n) {\n return Effect.gen(function* () {\n const uploadEngine = yield* UploadEngine;\n\n return yield* createFlowNode<UploadFile, UploadFile>({\n id,\n name: \"Multiplex\",\n description: `Multiplexes input using ${strategy} strategy`,\n type: NodeType.multiplex,\n nodeTypeId: \"multiplex\",\n outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n inputSchema: uploadFileSchema,\n outputSchema: uploadFileSchema,\n multiOutput: true,\n run: ({ data: file, storageId, clientId }) => {\n return Effect.gen(function* () {\n const { type, fileName, metadata, metadataJson } =\n resolveUploadMetadata(file.metadata);\n const normalizedFile = metadata ? { ...file, metadata } : file;\n\n if (strategy === \"copy\") {\n // For copy strategy, read and re-upload the file\n const inputBytes = yield* uploadEngine.read(\n normalizedFile.id,\n clientId,\n );\n\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(inputBytes);\n controller.close();\n },\n });\n\n const result = yield* uploadEngine.upload(\n {\n storageId,\n size: inputBytes.byteLength,\n type,\n fileName,\n lastModified: 0,\n metadata: metadataJson,\n },\n clientId,\n stream,\n );\n\n const resolvedResult = resolveUploadMetadata(result.metadata);\n\n return completeNodeExecution(\n resolvedResult.metadata\n ? { ...result, metadata: resolvedResult.metadata }\n : result,\n );\n } else if (strategy === \"split\") {\n // Split strategy is not supported in the new pattern\n // as it would require returning multiple UploadFiles\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"Split strategy is not supported with UploadFile pattern\",\n }),\n );\n }\n\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: `Unknown multiplex strategy: ${strategy}`,\n }),\n );\n });\n },\n });\n });\n}\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n STORAGE_OUTPUT_TYPE_ID,\n ZipPlugin,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { UploadEngine } from \"@uploadista/core/upload\";\nimport { Effect } from \"effect\";\nimport { z } from \"zod\";\nimport type { ZipParams } from \"@/types/zip-node\";\n\nconst inputSchema = z.record(z.string(), uploadFileSchema);\nconst outputSchema = uploadFileSchema;\n\nexport function createZipNode(\n id: string,\n { zipName, includeMetadata }: ZipParams,\n) {\n return Effect.gen(function* () {\n const uploadEngine = yield* UploadEngine;\n const zipPlugin = yield* ZipPlugin;\n return yield* createFlowNode<Record<string, UploadFile>, UploadFile>({\n id,\n name: \"Zip Files\",\n description: \"Combines multiple files into a zip archive\",\n type: NodeType.process,\n nodeTypeId: \"zip\",\n outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n inputSchema,\n outputSchema,\n multiInput: true,\n run: ({ data: inputs, storageId, clientId }) => {\n return Effect.gen(function* () {\n if (!inputs || Object.keys(inputs).length === 0) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"No inputs provided to zip node\",\n }),\n );\n }\n\n const zipInputs = yield* Effect.forEach(\n Object.values(inputs),\n (input) =>\n Effect.gen(function* () {\n const data = yield* uploadEngine.read(input.id, clientId);\n return {\n id: input.id,\n data,\n metadata: input.metadata,\n };\n }),\n { concurrency: \"unbounded\" },\n );\n\n const zipBytes = yield* zipPlugin.zip(zipInputs, {\n zipName,\n includeMetadata,\n });\n\n // Create a stream from the zip bytes\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(zipBytes);\n controller.close();\n },\n });\n\n // Upload the zip file\n const result = yield* uploadEngine.upload(\n {\n storageId,\n size: zipBytes.byteLength,\n type: \"application/zip\",\n fileName: zipName,\n lastModified: 0,\n metadata: JSON.stringify({\n mimeType: \"application/zip\",\n type: \"application/zip\",\n originalName: zipName,\n fileName: zipName,\n extension: \"zip\",\n }),\n },\n clientId,\n stream,\n );\n\n return completeNodeExecution(result);\n });\n },\n });\n });\n}\n"],"mappings":"yYASA,SAAgB,EACd,EACA,CAAE,QAAO,WAAU,SACnB,CACA,OAAO,EAAuC,CAC5C,KACA,KAAM,qBACN,YAAa,wBAAwB,EAAM,GAAG,EAAS,GAAG,IAC1D,KAAM,EAAS,YACf,WAAY,cACZ,YAAa,EACb,aAAc,EACd,UAAW,CAAE,QAAO,WAAU,QAAO,CACrC,KAAM,CAAE,UAGC,EAAO,QAAQ,EAAsB,EAAK,CAAC,CAErD,CAAC,CCdJ,MAAMA,EAAc,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAiB,CACpDC,EAAe,EAErB,SAAgB,EACd,EACA,CAAE,WAAU,UAAW,GACvB,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EAE5B,OAAO,MAAO,EAAuD,CACnE,KACA,KAAM,cACN,YAAa,+BAA+B,EAAS,WACrD,KAAM,EAAS,MACf,WAAY,QACZ,aAAc,EACd,YAAA,EACA,aAAA,EACA,WAAY,GACZ,KAAM,CAAE,KAAM,EAAQ,YAAW,cACxB,EAAO,IAAI,WAAa,CAC7B,GAAI,CAAC,GAAU,OAAO,KAAK,EAAO,CAAC,SAAW,EAC5C,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,mCACP,CAAC,CACH,CAGH,IAAM,EAAa,OAAO,OAAO,EAAO,CAExC,GAAI,EAAW,SAAW,EACxB,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,oBACP,CAAC,CACH,CAGH,OAAQ,EAAR,CACE,IAAK,SAAU,CAEb,IAAMC,EAAgC,EAAE,CACpC,EAAY,EAEhB,IAAK,IAAM,KAAQ,EAAY,CAC7B,IAAM,EAAQ,MAAO,EAAa,KAAK,EAAK,GAAI,EAAS,CACzD,EAAgB,KAAK,EAAM,CAC3B,GAAa,EAAM,WAIrB,IAAM,EAAc,IAAI,WAAW,EAAU,CACzC,EAAS,EACb,IAAK,IAAM,KAAS,EAClB,EAAY,IAAI,EAAO,EAAO,CAC9B,GAAU,EAAM,WAIlB,IAAM,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAY,CAC/B,EAAW,OAAO,EAErB,CAAC,CAoBF,OAAO,EAjBQ,MAAO,EAAa,OACjC,CACE,YACA,KAAM,EAAY,WAClB,KAAM,2BACN,SAAU,UAAU,EAAW,OAAO,YACtC,aAAc,EACd,SAAU,KAAK,UAAU,CACvB,SAAU,2BACV,aAAc,UAAU,EAAW,OAAO,QAC1C,UAAW,MACZ,CAAC,CACH,CACD,EACA,EACD,CAEmC,CAEtC,QACE,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,2BAA2B,IAClC,CAAC,CACH,GAGL,CAEL,CAAC,EACF,CCnGJ,SAAgB,EACd,EACA,CAAE,YAAa,EAAc,YAC7B,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EAE5B,OAAO,MAAO,EAAuC,CACnD,KACA,KAAM,YACN,YAAa,2BAA2B,EAAS,WACjD,KAAM,EAAS,UACf,WAAY,YACZ,aAAc,EACd,YAAa,EACb,aAAc,EACd,YAAa,GACb,KAAM,CAAE,KAAM,EAAM,YAAW,cACtB,EAAO,IAAI,WAAa,CAC7B,GAAM,CAAE,OAAM,WAAU,WAAU,gBAChC,EAAsB,EAAK,SAAS,CAChC,EAAiB,EAAW,CAAE,GAAG,EAAM,WAAU,CAAG,EAE1D,GAAI,IAAa,OAAQ,CAEvB,IAAM,EAAa,MAAO,EAAa,KACrC,EAAe,GACf,EACD,CAEK,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAW,CAC9B,EAAW,OAAO,EAErB,CAAC,CAEI,EAAS,MAAO,EAAa,OACjC,CACE,YACA,KAAM,EAAW,WACjB,OACA,WACA,aAAc,EACd,SAAU,EACX,CACD,EACA,EACD,CAEK,EAAiB,EAAsB,EAAO,SAAS,CAE7D,OAAO,EACL,EAAe,SACX,CAAE,GAAG,EAAQ,SAAU,EAAe,SAAU,CAChD,EACL,SACQ,IAAa,QAGtB,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,0DACP,CAAC,CACH,CAGH,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,+BAA+B,IACtC,CAAC,CACH,EACD,CAEL,CAAC,EACF,CC1EJ,MAAM,EAAc,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAiB,CACpD,EAAe,EAErB,SAAgB,EACd,EACA,CAAE,UAAS,mBACX,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EACtB,EAAY,MAAO,EACzB,OAAO,MAAO,EAAuD,CACnE,KACA,KAAM,YACN,YAAa,6CACb,KAAM,EAAS,QACf,WAAY,MACZ,aAAc,EACd,cACA,eACA,WAAY,GACZ,KAAM,CAAE,KAAM,EAAQ,YAAW,cACxB,EAAO,IAAI,WAAa,CAC7B,GAAI,CAAC,GAAU,OAAO,KAAK,EAAO,CAAC,SAAW,EAC5C,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,iCACP,CAAC,CACH,CAGH,IAAM,EAAY,MAAO,EAAO,QAC9B,OAAO,OAAO,EAAO,CACpB,GACC,EAAO,IAAI,WAAa,CACtB,IAAM,EAAO,MAAO,EAAa,KAAK,EAAM,GAAI,EAAS,CACzD,MAAO,CACL,GAAI,EAAM,GACV,OACA,SAAU,EAAM,SACjB,EACD,CACJ,CAAE,YAAa,YAAa,CAC7B,CAEK,EAAW,MAAO,EAAU,IAAI,EAAW,CAC/C,UACA,kBACD,CAAC,CAGI,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAS,CAC5B,EAAW,OAAO,EAErB,CAAC,CAsBF,OAAO,EAnBQ,MAAO,EAAa,OACjC,CACE,YACA,KAAM,EAAS,WACf,KAAM,kBACN,SAAU,EACV,aAAc,EACd,SAAU,KAAK,UAAU,CACvB,SAAU,kBACV,KAAM,kBACN,aAAc,EACd,SAAU,EACV,UAAW,MACZ,CAAC,CACH,CACD,EACA,EACD,CAEmC,EACpC,CAEL,CAAC,EACF"}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@uploadista/flow-utility-nodes",
3
3
  "type": "module",
4
- "version": "0.0.20-beta.6",
4
+ "version": "0.0.20-beta.8",
5
5
  "description": "Utility nodes for Uploadista Flow",
6
6
  "license": "MIT",
7
7
  "author": "Uploadista",
@@ -20,7 +20,7 @@
20
20
  }
21
21
  },
22
22
  "dependencies": {
23
- "@uploadista/core": "0.0.20-beta.6"
23
+ "@uploadista/core": "0.0.20-beta.8"
24
24
  },
25
25
  "peerDependencies": {
26
26
  "effect": "^3.0.0",
@@ -28,15 +28,15 @@
28
28
  },
29
29
  "devDependencies": {
30
30
  "@effect/vitest": "0.27.0",
31
- "@types/node": "24.10.2",
32
- "effect": "3.19.10",
33
- "tsdown": "0.17.2",
31
+ "@types/node": "24.10.4",
32
+ "effect": "3.19.12",
33
+ "tsdown": "0.18.0",
34
34
  "vitest": "4.0.15",
35
- "zod": "4.1.13",
36
- "@uploadista/typescript-config": "0.0.20-beta.6"
35
+ "zod": "4.2.0",
36
+ "@uploadista/typescript-config": "0.0.20-beta.8"
37
37
  },
38
38
  "scripts": {
39
- "build": "tsdown",
39
+ "build": "tsc --noEmit && tsdown",
40
40
  "check": "biome check --write ./src",
41
41
  "format": "biome format --write ./src",
42
42
  "lint": "biome lint --write ./src",
@@ -6,7 +6,7 @@ import {
6
6
  STORAGE_OUTPUT_TYPE_ID,
7
7
  } from "@uploadista/core/flow";
8
8
  import { type UploadFile, uploadFileSchema } from "@uploadista/core/types";
9
- import { UploadServer } from "@uploadista/core/upload";
9
+ import { UploadEngine } from "@uploadista/core/upload";
10
10
  import { Effect } from "effect";
11
11
  import { z } from "zod";
12
12
  import type { MergeParams } from "@/types/merge-node";
@@ -19,7 +19,7 @@ export function createMergeNode(
19
19
  { strategy, separator: _separator }: MergeParams,
20
20
  ) {
21
21
  return Effect.gen(function* () {
22
- const uploadServer = yield* UploadServer;
22
+ const uploadEngine = yield* UploadEngine;
23
23
 
24
24
  return yield* createFlowNode<Record<string, UploadFile>, UploadFile>({
25
25
  id,
@@ -58,7 +58,7 @@ export function createMergeNode(
58
58
  let totalSize = 0;
59
59
 
60
60
  for (const file of inputFiles) {
61
- const bytes = yield* uploadServer.read(file.id, clientId);
61
+ const bytes = yield* uploadEngine.read(file.id, clientId);
62
62
  inputBytesArray.push(bytes);
63
63
  totalSize += bytes.byteLength;
64
64
  }
@@ -80,7 +80,7 @@ export function createMergeNode(
80
80
  });
81
81
 
82
82
  // Upload the merged file
83
- const result = yield* uploadServer.upload(
83
+ const result = yield* uploadEngine.upload(
84
84
  {
85
85
  storageId,
86
86
  size: mergedBytes.byteLength,
@@ -7,7 +7,7 @@ import {
7
7
  STORAGE_OUTPUT_TYPE_ID,
8
8
  } from "@uploadista/core/flow";
9
9
  import { type UploadFile, uploadFileSchema } from "@uploadista/core/types";
10
- import { UploadServer } from "@uploadista/core/upload";
10
+ import { UploadEngine } from "@uploadista/core/upload";
11
11
  import { Effect } from "effect";
12
12
  import type { MultiplexParams } from "@/types/multiplex-node";
13
13
 
@@ -16,7 +16,7 @@ export function createMultiplexNode(
16
16
  { outputCount: _outputCount, strategy }: MultiplexParams,
17
17
  ) {
18
18
  return Effect.gen(function* () {
19
- const uploadServer = yield* UploadServer;
19
+ const uploadEngine = yield* UploadEngine;
20
20
 
21
21
  return yield* createFlowNode<UploadFile, UploadFile>({
22
22
  id,
@@ -36,7 +36,7 @@ export function createMultiplexNode(
36
36
 
37
37
  if (strategy === "copy") {
38
38
  // For copy strategy, read and re-upload the file
39
- const inputBytes = yield* uploadServer.read(
39
+ const inputBytes = yield* uploadEngine.read(
40
40
  normalizedFile.id,
41
41
  clientId,
42
42
  );
@@ -48,7 +48,7 @@ export function createMultiplexNode(
48
48
  },
49
49
  });
50
50
 
51
- const result = yield* uploadServer.upload(
51
+ const result = yield* uploadEngine.upload(
52
52
  {
53
53
  storageId,
54
54
  size: inputBytes.byteLength,
@@ -7,7 +7,7 @@ import {
7
7
  ZipPlugin,
8
8
  } from "@uploadista/core/flow";
9
9
  import { type UploadFile, uploadFileSchema } from "@uploadista/core/types";
10
- import { UploadServer } from "@uploadista/core/upload";
10
+ import { UploadEngine } from "@uploadista/core/upload";
11
11
  import { Effect } from "effect";
12
12
  import { z } from "zod";
13
13
  import type { ZipParams } from "@/types/zip-node";
@@ -20,7 +20,7 @@ export function createZipNode(
20
20
  { zipName, includeMetadata }: ZipParams,
21
21
  ) {
22
22
  return Effect.gen(function* () {
23
- const uploadServer = yield* UploadServer;
23
+ const uploadEngine = yield* UploadEngine;
24
24
  const zipPlugin = yield* ZipPlugin;
25
25
  return yield* createFlowNode<Record<string, UploadFile>, UploadFile>({
26
26
  id,
@@ -46,7 +46,7 @@ export function createZipNode(
46
46
  Object.values(inputs),
47
47
  (input) =>
48
48
  Effect.gen(function* () {
49
- const data = yield* uploadServer.read(input.id, clientId);
49
+ const data = yield* uploadEngine.read(input.id, clientId);
50
50
  return {
51
51
  id: input.id,
52
52
  data,
@@ -70,7 +70,7 @@ export function createZipNode(
70
70
  });
71
71
 
72
72
  // Upload the zip file
73
- const result = yield* uploadServer.upload(
73
+ const result = yield* uploadEngine.upload(
74
74
  {
75
75
  storageId,
76
76
  size: zipBytes.byteLength,
@@ -1,9 +1,6 @@
1
1
  import { describe, expect, it } from "@effect/vitest";
2
2
  import { UploadistaError } from "@uploadista/core/errors";
3
- import {
4
- TestUploadServer,
5
- TestZipPlugin,
6
- } from "@uploadista/core/testing";
3
+ import { TestUploadEngine, TestZipPlugin } from "@uploadista/core/testing";
7
4
  import type { UploadFile } from "@uploadista/core/types";
8
5
  import { Effect, Layer } from "effect";
9
6
  import {
@@ -54,7 +51,7 @@ const createTestImageFile = (
54
51
  /**
55
52
  * Test layer combining all mocks
56
53
  */
57
- const TestLayer = Layer.mergeAll(TestUploadServer, TestZipPlugin);
54
+ const TestLayer = Layer.mergeAll(TestUploadEngine, TestZipPlugin);
58
55
 
59
56
  describe("Utility Nodes", () => {
60
57
  describe("ConditionalNode", () => {