@uploadista/flow-utility-nodes 0.0.20-beta.9 → 0.1.0-beta.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,5 @@
1
1
  import { a as MergeParams, r as MultiplexParams, s as ConditionalParams, t as ZipParams } from "../zip-node-7DDOD4H0.mjs";
2
- import * as _uploadista_core_flow2 from "@uploadista/core/flow";
2
+ import * as _uploadista_core_flow0 from "@uploadista/core/flow";
3
3
  import { NodeType, ZipPlugin } from "@uploadista/core/flow";
4
4
  import { UploadFile } from "@uploadista/core/types";
5
5
  import { Effect } from "effect";
@@ -15,7 +15,7 @@ declare function createConditionalNode(id: string, {
15
15
  field,
16
16
  operator,
17
17
  value
18
- }: ConditionalParams): Effect.Effect<_uploadista_core_flow2.FlowNodeData & {
18
+ }: ConditionalParams): Effect.Effect<_uploadista_core_flow0.FlowNodeData & {
19
19
  inputSchema: zod0.ZodType<UploadFile, unknown, zod_v4_core0.$ZodTypeInternals<UploadFile, unknown>>;
20
20
  outputSchema: zod0.ZodType<UploadFile, unknown, zod_v4_core0.$ZodTypeInternals<UploadFile, unknown>>;
21
21
  run: (args: {
@@ -25,7 +25,7 @@ declare function createConditionalNode(id: string, {
25
25
  flowId: string;
26
26
  inputs?: Record<string, unknown>;
27
27
  clientId: string | null;
28
- }) => Effect.Effect<_uploadista_core_flow2.NodeExecutionResult<UploadFile>, _uploadista_core_errors0.UploadistaError, never>;
28
+ }) => Effect.Effect<_uploadista_core_flow0.NodeExecutionResult<UploadFile>, _uploadista_core_errors0.UploadistaError, never>;
29
29
  condition?: {
30
30
  field: string;
31
31
  operator: string;
@@ -39,7 +39,7 @@ declare function createConditionalNode(id: string, {
39
39
  retryDelay?: number;
40
40
  exponentialBackoff?: boolean;
41
41
  };
42
- circuitBreaker?: _uploadista_core_flow2.FlowCircuitBreakerConfig;
42
+ circuitBreaker?: _uploadista_core_flow0.FlowCircuitBreakerConfig;
43
43
  } & {
44
44
  type: NodeType;
45
45
  }, _uploadista_core_errors0.UploadistaError, never>;
@@ -48,7 +48,7 @@ declare function createConditionalNode(id: string, {
48
48
  declare function createMergeNode(id: string, {
49
49
  strategy,
50
50
  separator: _separator
51
- }: MergeParams): Effect.Effect<_uploadista_core_flow2.FlowNodeData & {
51
+ }: MergeParams): Effect.Effect<_uploadista_core_flow0.FlowNodeData & {
52
52
  inputSchema: z.ZodType<Record<string, UploadFile>, unknown, z.core.$ZodTypeInternals<Record<string, UploadFile>, unknown>>;
53
53
  outputSchema: z.ZodType<UploadFile, unknown, z.core.$ZodTypeInternals<UploadFile, unknown>>;
54
54
  run: (args: {
@@ -58,7 +58,7 @@ declare function createMergeNode(id: string, {
58
58
  flowId: string;
59
59
  inputs?: Record<string, unknown>;
60
60
  clientId: string | null;
61
- }) => Effect.Effect<_uploadista_core_flow2.NodeExecutionResult<UploadFile>, UploadistaError, never>;
61
+ }) => Effect.Effect<_uploadista_core_flow0.NodeExecutionResult<UploadFile>, UploadistaError, never>;
62
62
  condition?: {
63
63
  field: string;
64
64
  operator: string;
@@ -72,7 +72,7 @@ declare function createMergeNode(id: string, {
72
72
  retryDelay?: number;
73
73
  exponentialBackoff?: boolean;
74
74
  };
75
- circuitBreaker?: _uploadista_core_flow2.FlowCircuitBreakerConfig;
75
+ circuitBreaker?: _uploadista_core_flow0.FlowCircuitBreakerConfig;
76
76
  } & {
77
77
  type: NodeType;
78
78
  }, UploadistaError, UploadEngine>;
@@ -81,7 +81,7 @@ declare function createMergeNode(id: string, {
81
81
  declare function createMultiplexNode(id: string, {
82
82
  outputCount: _outputCount,
83
83
  strategy
84
- }: MultiplexParams): Effect.Effect<_uploadista_core_flow2.FlowNodeData & {
84
+ }: MultiplexParams): Effect.Effect<_uploadista_core_flow0.FlowNodeData & {
85
85
  inputSchema: zod0.ZodType<UploadFile, unknown, zod_v4_core0.$ZodTypeInternals<UploadFile, unknown>>;
86
86
  outputSchema: zod0.ZodType<UploadFile, unknown, zod_v4_core0.$ZodTypeInternals<UploadFile, unknown>>;
87
87
  run: (args: {
@@ -91,7 +91,7 @@ declare function createMultiplexNode(id: string, {
91
91
  flowId: string;
92
92
  inputs?: Record<string, unknown>;
93
93
  clientId: string | null;
94
- }) => Effect.Effect<_uploadista_core_flow2.NodeExecutionResult<UploadFile>, UploadistaError, never>;
94
+ }) => Effect.Effect<_uploadista_core_flow0.NodeExecutionResult<UploadFile>, UploadistaError, never>;
95
95
  condition?: {
96
96
  field: string;
97
97
  operator: string;
@@ -105,7 +105,7 @@ declare function createMultiplexNode(id: string, {
105
105
  retryDelay?: number;
106
106
  exponentialBackoff?: boolean;
107
107
  };
108
- circuitBreaker?: _uploadista_core_flow2.FlowCircuitBreakerConfig;
108
+ circuitBreaker?: _uploadista_core_flow0.FlowCircuitBreakerConfig;
109
109
  } & {
110
110
  type: NodeType;
111
111
  }, UploadistaError, UploadEngine>;
@@ -114,7 +114,7 @@ declare function createMultiplexNode(id: string, {
114
114
  declare function createZipNode(id: string, {
115
115
  zipName,
116
116
  includeMetadata
117
- }: ZipParams): Effect.Effect<_uploadista_core_flow2.FlowNodeData & {
117
+ }: ZipParams): Effect.Effect<_uploadista_core_flow0.FlowNodeData & {
118
118
  inputSchema: z.ZodType<Record<string, UploadFile>, unknown, z.core.$ZodTypeInternals<Record<string, UploadFile>, unknown>>;
119
119
  outputSchema: z.ZodType<UploadFile, unknown, z.core.$ZodTypeInternals<UploadFile, unknown>>;
120
120
  run: (args: {
@@ -124,7 +124,7 @@ declare function createZipNode(id: string, {
124
124
  flowId: string;
125
125
  inputs?: Record<string, unknown>;
126
126
  clientId: string | null;
127
- }) => Effect.Effect<_uploadista_core_flow2.NodeExecutionResult<UploadFile>, UploadistaError, never>;
127
+ }) => Effect.Effect<_uploadista_core_flow0.NodeExecutionResult<UploadFile>, UploadistaError, never>;
128
128
  condition?: {
129
129
  field: string;
130
130
  operator: string;
@@ -138,7 +138,7 @@ declare function createZipNode(id: string, {
138
138
  retryDelay?: number;
139
139
  exponentialBackoff?: boolean;
140
140
  };
141
- circuitBreaker?: _uploadista_core_flow2.FlowCircuitBreakerConfig;
141
+ circuitBreaker?: _uploadista_core_flow0.FlowCircuitBreakerConfig;
142
142
  } & {
143
143
  type: NodeType;
144
144
  }, UploadistaError, UploadEngine | ZipPlugin>;
@@ -1 +1 @@
1
- {"version":3,"file":"index.mjs","names":["inputSchema","outputSchema","inputBytesArray: Uint8Array[]"],"sources":["../../src/nodes/conditional-node.ts","../../src/nodes/merge-node.ts","../../src/nodes/multiplex-node.ts","../../src/nodes/zip-node.ts"],"sourcesContent":["import {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { Effect } from \"effect\";\nimport type { ConditionalParams } from \"@/types/conditional-node\";\n\nexport function createConditionalNode(\n id: string,\n { field, operator, value }: ConditionalParams,\n) {\n return createFlowNode<UploadFile, UploadFile>({\n id,\n name: \"Conditional Router\",\n description: `Routes flow based on ${field} ${operator} ${value}`,\n type: NodeType.conditional,\n nodeTypeId: \"conditional\",\n inputSchema: uploadFileSchema,\n outputSchema: uploadFileSchema,\n condition: { field, operator, value },\n run: ({ data }) => {\n // The actual routing logic is handled by the flow engine\n // This node just passes through the data\n return Effect.succeed(completeNodeExecution(data));\n },\n });\n}\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n STORAGE_OUTPUT_TYPE_ID,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { UploadEngine } from \"@uploadista/core/upload\";\nimport { Effect } from \"effect\";\nimport { z } from \"zod\";\nimport type { MergeParams } from \"@/types/merge-node\";\n\nconst inputSchema = z.record(z.string(), uploadFileSchema);\nconst outputSchema = uploadFileSchema;\n\nexport function createMergeNode(\n id: string,\n { strategy, separator: _separator }: MergeParams,\n) {\n return Effect.gen(function* () {\n const uploadEngine = yield* UploadEngine;\n\n return yield* createFlowNode<Record<string, UploadFile>, UploadFile>({\n id,\n name: \"Merge Files\",\n description: `Merges multiple files using ${strategy} strategy`,\n type: NodeType.merge,\n nodeTypeId: \"merge\",\n outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n inputSchema,\n outputSchema,\n multiInput: true,\n run: ({ data: inputs, storageId, clientId }) => {\n return Effect.gen(function* () {\n if (!inputs || Object.keys(inputs).length === 0) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"No inputs provided to merge node\",\n }),\n );\n }\n\n const inputFiles = Object.values(inputs);\n\n if (inputFiles.length === 0) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"No files to merge\",\n }),\n );\n }\n\n switch (strategy) {\n case \"concat\": {\n // Read bytes from all input files\n const inputBytesArray: Uint8Array[] = [];\n let totalSize = 0;\n\n for (const file of inputFiles) {\n const bytes = yield* uploadEngine.read(file.id, clientId);\n inputBytesArray.push(bytes);\n totalSize += bytes.byteLength;\n }\n\n // Concatenate all files into one\n const mergedBytes = new Uint8Array(totalSize);\n let offset = 0;\n for (const bytes of inputBytesArray) {\n mergedBytes.set(bytes, offset);\n offset += bytes.byteLength;\n }\n\n // Create a stream from the merged bytes\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(mergedBytes);\n controller.close();\n },\n });\n\n // Upload the merged file\n const result = yield* uploadEngine.upload(\n {\n storageId,\n size: mergedBytes.byteLength,\n type: \"application/octet-stream\",\n fileName: `merged_${inputFiles.length}_files.bin`,\n lastModified: 0,\n metadata: JSON.stringify({\n mimeType: \"application/octet-stream\",\n originalName: `merged_${inputFiles.length}_files`,\n extension: \"bin\",\n }),\n },\n clientId,\n stream,\n );\n\n return completeNodeExecution(result);\n }\n default: {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: `Unknown merge strategy: ${strategy}`,\n }),\n );\n }\n }\n });\n },\n });\n });\n}\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n resolveUploadMetadata,\n STORAGE_OUTPUT_TYPE_ID,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { UploadEngine } from \"@uploadista/core/upload\";\nimport { Effect } from \"effect\";\nimport type { MultiplexParams } from \"@/types/multiplex-node\";\n\nexport function createMultiplexNode(\n id: string,\n { outputCount: _outputCount, strategy }: MultiplexParams,\n) {\n return Effect.gen(function* () {\n const uploadEngine = yield* UploadEngine;\n\n return yield* createFlowNode<UploadFile, UploadFile>({\n id,\n name: \"Multiplex\",\n description: `Multiplexes input using ${strategy} strategy`,\n type: NodeType.multiplex,\n nodeTypeId: \"multiplex\",\n outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n inputSchema: uploadFileSchema,\n outputSchema: uploadFileSchema,\n multiOutput: true,\n run: ({ data: file, storageId, clientId }) => {\n return Effect.gen(function* () {\n const { type, fileName, metadata, metadataJson } =\n resolveUploadMetadata(file.metadata);\n const normalizedFile = metadata ? { ...file, metadata } : file;\n\n if (strategy === \"copy\") {\n // For copy strategy, read and re-upload the file\n const inputBytes = yield* uploadEngine.read(\n normalizedFile.id,\n clientId,\n );\n\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(inputBytes);\n controller.close();\n },\n });\n\n const result = yield* uploadEngine.upload(\n {\n storageId,\n size: inputBytes.byteLength,\n type,\n fileName,\n lastModified: 0,\n metadata: metadataJson,\n },\n clientId,\n stream,\n );\n\n const resolvedResult = resolveUploadMetadata(result.metadata);\n\n return completeNodeExecution(\n resolvedResult.metadata\n ? { ...result, metadata: resolvedResult.metadata }\n : result,\n );\n } else if (strategy === \"split\") {\n // Split strategy is not supported in the new pattern\n // as it would require returning multiple UploadFiles\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"Split strategy is not supported with UploadFile pattern\",\n }),\n );\n }\n\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: `Unknown multiplex strategy: ${strategy}`,\n }),\n );\n });\n },\n });\n });\n}\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n STORAGE_OUTPUT_TYPE_ID,\n ZipPlugin,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { UploadEngine } from \"@uploadista/core/upload\";\nimport { Effect } from \"effect\";\nimport { z } from \"zod\";\nimport type { ZipParams } from \"@/types/zip-node\";\n\nconst inputSchema = z.record(z.string(), uploadFileSchema);\nconst outputSchema = uploadFileSchema;\n\nexport function createZipNode(\n id: string,\n { zipName, includeMetadata }: ZipParams,\n) {\n return Effect.gen(function* () {\n const uploadEngine = yield* UploadEngine;\n const zipPlugin = yield* ZipPlugin;\n return yield* createFlowNode<Record<string, UploadFile>, UploadFile>({\n id,\n name: \"Zip Files\",\n description: \"Combines multiple files into a zip archive\",\n type: NodeType.process,\n nodeTypeId: \"zip\",\n outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n inputSchema,\n outputSchema,\n multiInput: true,\n run: ({ data: inputs, storageId, clientId }) => {\n return Effect.gen(function* () {\n if (!inputs || Object.keys(inputs).length === 0) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"No inputs provided to zip node\",\n }),\n );\n }\n\n const zipInputs = yield* Effect.forEach(\n Object.values(inputs),\n (input) =>\n Effect.gen(function* () {\n const data = yield* uploadEngine.read(input.id, clientId);\n return {\n id: input.id,\n data,\n metadata: input.metadata,\n };\n }),\n { concurrency: \"unbounded\" },\n );\n\n const zipBytes = yield* zipPlugin.zip(zipInputs, {\n zipName,\n includeMetadata,\n });\n\n // Create a stream from the zip bytes\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(zipBytes);\n controller.close();\n },\n });\n\n // Upload the zip file\n const result = yield* uploadEngine.upload(\n {\n storageId,\n size: zipBytes.byteLength,\n type: \"application/zip\",\n fileName: zipName,\n lastModified: 0,\n metadata: JSON.stringify({\n mimeType: \"application/zip\",\n type: \"application/zip\",\n originalName: zipName,\n fileName: zipName,\n extension: \"zip\",\n }),\n },\n clientId,\n stream,\n );\n\n return completeNodeExecution(result);\n });\n },\n });\n });\n}\n"],"mappings":"yYASA,SAAgB,EACd,EACA,CAAE,QAAO,WAAU,SACnB,CACA,OAAO,EAAuC,CAC5C,KACA,KAAM,qBACN,YAAa,wBAAwB,EAAM,GAAG,EAAS,GAAG,IAC1D,KAAM,EAAS,YACf,WAAY,cACZ,YAAa,EACb,aAAc,EACd,UAAW,CAAE,QAAO,WAAU,QAAO,CACrC,KAAM,CAAE,UAGC,EAAO,QAAQ,EAAsB,EAAK,CAAC,CAErD,CAAC,CCdJ,MAAMA,EAAc,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAiB,CACpDC,EAAe,EAErB,SAAgB,EACd,EACA,CAAE,WAAU,UAAW,GACvB,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EAE5B,OAAO,MAAO,EAAuD,CACnE,KACA,KAAM,cACN,YAAa,+BAA+B,EAAS,WACrD,KAAM,EAAS,MACf,WAAY,QACZ,aAAc,EACd,YAAA,EACA,aAAA,EACA,WAAY,GACZ,KAAM,CAAE,KAAM,EAAQ,YAAW,cACxB,EAAO,IAAI,WAAa,CAC7B,GAAI,CAAC,GAAU,OAAO,KAAK,EAAO,CAAC,SAAW,EAC5C,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,mCACP,CAAC,CACH,CAGH,IAAM,EAAa,OAAO,OAAO,EAAO,CAExC,GAAI,EAAW,SAAW,EACxB,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,oBACP,CAAC,CACH,CAGH,OAAQ,EAAR,CACE,IAAK,SAAU,CAEb,IAAMC,EAAgC,EAAE,CACpC,EAAY,EAEhB,IAAK,IAAM,KAAQ,EAAY,CAC7B,IAAM,EAAQ,MAAO,EAAa,KAAK,EAAK,GAAI,EAAS,CACzD,EAAgB,KAAK,EAAM,CAC3B,GAAa,EAAM,WAIrB,IAAM,EAAc,IAAI,WAAW,EAAU,CACzC,EAAS,EACb,IAAK,IAAM,KAAS,EAClB,EAAY,IAAI,EAAO,EAAO,CAC9B,GAAU,EAAM,WAIlB,IAAM,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAY,CAC/B,EAAW,OAAO,EAErB,CAAC,CAoBF,OAAO,EAjBQ,MAAO,EAAa,OACjC,CACE,YACA,KAAM,EAAY,WAClB,KAAM,2BACN,SAAU,UAAU,EAAW,OAAO,YACtC,aAAc,EACd,SAAU,KAAK,UAAU,CACvB,SAAU,2BACV,aAAc,UAAU,EAAW,OAAO,QAC1C,UAAW,MACZ,CAAC,CACH,CACD,EACA,EACD,CAEmC,CAEtC,QACE,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,2BAA2B,IAClC,CAAC,CACH,GAGL,CAEL,CAAC,EACF,CCnGJ,SAAgB,EACd,EACA,CAAE,YAAa,EAAc,YAC7B,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EAE5B,OAAO,MAAO,EAAuC,CACnD,KACA,KAAM,YACN,YAAa,2BAA2B,EAAS,WACjD,KAAM,EAAS,UACf,WAAY,YACZ,aAAc,EACd,YAAa,EACb,aAAc,EACd,YAAa,GACb,KAAM,CAAE,KAAM,EAAM,YAAW,cACtB,EAAO,IAAI,WAAa,CAC7B,GAAM,CAAE,OAAM,WAAU,WAAU,gBAChC,EAAsB,EAAK,SAAS,CAChC,EAAiB,EAAW,CAAE,GAAG,EAAM,WAAU,CAAG,EAE1D,GAAI,IAAa,OAAQ,CAEvB,IAAM,EAAa,MAAO,EAAa,KACrC,EAAe,GACf,EACD,CAEK,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAW,CAC9B,EAAW,OAAO,EAErB,CAAC,CAEI,EAAS,MAAO,EAAa,OACjC,CACE,YACA,KAAM,EAAW,WACjB,OACA,WACA,aAAc,EACd,SAAU,EACX,CACD,EACA,EACD,CAEK,EAAiB,EAAsB,EAAO,SAAS,CAE7D,OAAO,EACL,EAAe,SACX,CAAE,GAAG,EAAQ,SAAU,EAAe,SAAU,CAChD,EACL,SACQ,IAAa,QAGtB,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,0DACP,CAAC,CACH,CAGH,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,+BAA+B,IACtC,CAAC,CACH,EACD,CAEL,CAAC,EACF,CC1EJ,MAAM,EAAc,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAiB,CACpD,EAAe,EAErB,SAAgB,EACd,EACA,CAAE,UAAS,mBACX,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EACtB,EAAY,MAAO,EACzB,OAAO,MAAO,EAAuD,CACnE,KACA,KAAM,YACN,YAAa,6CACb,KAAM,EAAS,QACf,WAAY,MACZ,aAAc,EACd,cACA,eACA,WAAY,GACZ,KAAM,CAAE,KAAM,EAAQ,YAAW,cACxB,EAAO,IAAI,WAAa,CAC7B,GAAI,CAAC,GAAU,OAAO,KAAK,EAAO,CAAC,SAAW,EAC5C,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,iCACP,CAAC,CACH,CAGH,IAAM,EAAY,MAAO,EAAO,QAC9B,OAAO,OAAO,EAAO,CACpB,GACC,EAAO,IAAI,WAAa,CACtB,IAAM,EAAO,MAAO,EAAa,KAAK,EAAM,GAAI,EAAS,CACzD,MAAO,CACL,GAAI,EAAM,GACV,OACA,SAAU,EAAM,SACjB,EACD,CACJ,CAAE,YAAa,YAAa,CAC7B,CAEK,EAAW,MAAO,EAAU,IAAI,EAAW,CAC/C,UACA,kBACD,CAAC,CAGI,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAS,CAC5B,EAAW,OAAO,EAErB,CAAC,CAsBF,OAAO,EAnBQ,MAAO,EAAa,OACjC,CACE,YACA,KAAM,EAAS,WACf,KAAM,kBACN,SAAU,EACV,aAAc,EACd,SAAU,KAAK,UAAU,CACvB,SAAU,kBACV,KAAM,kBACN,aAAc,EACd,SAAU,EACV,UAAW,MACZ,CAAC,CACH,CACD,EACA,EACD,CAEmC,EACpC,CAEL,CAAC,EACF"}
1
+ {"version":3,"file":"index.mjs","names":["inputSchema","outputSchema"],"sources":["../../src/nodes/conditional-node.ts","../../src/nodes/merge-node.ts","../../src/nodes/multiplex-node.ts","../../src/nodes/zip-node.ts"],"sourcesContent":["import {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { Effect } from \"effect\";\nimport type { ConditionalParams } from \"@/types/conditional-node\";\n\nexport function createConditionalNode(\n id: string,\n { field, operator, value }: ConditionalParams,\n) {\n return createFlowNode<UploadFile, UploadFile>({\n id,\n name: \"Conditional Router\",\n description: `Routes flow based on ${field} ${operator} ${value}`,\n type: NodeType.conditional,\n nodeTypeId: \"conditional\",\n inputSchema: uploadFileSchema,\n outputSchema: uploadFileSchema,\n condition: { field, operator, value },\n run: ({ data }) => {\n // The actual routing logic is handled by the flow engine\n // This node just passes through the data\n return Effect.succeed(completeNodeExecution(data));\n },\n });\n}\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n STORAGE_OUTPUT_TYPE_ID,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { UploadEngine } from \"@uploadista/core/upload\";\nimport { Effect } from \"effect\";\nimport { z } from \"zod\";\nimport type { MergeParams } from \"@/types/merge-node\";\n\nconst inputSchema = z.record(z.string(), uploadFileSchema);\nconst outputSchema = uploadFileSchema;\n\nexport function createMergeNode(\n id: string,\n { strategy, separator: _separator }: MergeParams,\n) {\n return Effect.gen(function* () {\n const uploadEngine = yield* UploadEngine;\n\n return yield* createFlowNode<Record<string, UploadFile>, UploadFile>({\n id,\n name: \"Merge Files\",\n description: `Merges multiple files using ${strategy} strategy`,\n type: NodeType.merge,\n nodeTypeId: \"merge\",\n outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n inputSchema,\n outputSchema,\n multiInput: true,\n run: ({ data: inputs, storageId, clientId }) => {\n return Effect.gen(function* () {\n if (!inputs || Object.keys(inputs).length === 0) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"No inputs provided to merge node\",\n }),\n );\n }\n\n const inputFiles = Object.values(inputs);\n\n if (inputFiles.length === 0) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"No files to merge\",\n }),\n );\n }\n\n switch (strategy) {\n case \"concat\": {\n // Read bytes from all input files\n const inputBytesArray: Uint8Array[] = [];\n let totalSize = 0;\n\n for (const file of inputFiles) {\n const bytes = yield* uploadEngine.read(file.id, clientId);\n inputBytesArray.push(bytes);\n totalSize += bytes.byteLength;\n }\n\n // Concatenate all files into one\n const mergedBytes = new Uint8Array(totalSize);\n let offset = 0;\n for (const bytes of inputBytesArray) {\n mergedBytes.set(bytes, offset);\n offset += bytes.byteLength;\n }\n\n // Create a stream from the merged bytes\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(mergedBytes);\n controller.close();\n },\n });\n\n // Upload the merged file\n const result = yield* uploadEngine.upload(\n {\n storageId,\n size: mergedBytes.byteLength,\n type: \"application/octet-stream\",\n fileName: `merged_${inputFiles.length}_files.bin`,\n lastModified: 0,\n metadata: JSON.stringify({\n mimeType: \"application/octet-stream\",\n originalName: `merged_${inputFiles.length}_files`,\n extension: \"bin\",\n }),\n },\n clientId,\n stream,\n );\n\n return completeNodeExecution(result);\n }\n default: {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: `Unknown merge strategy: ${strategy}`,\n }),\n );\n }\n }\n });\n },\n });\n });\n}\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n resolveUploadMetadata,\n STORAGE_OUTPUT_TYPE_ID,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { UploadEngine } from \"@uploadista/core/upload\";\nimport { Effect } from \"effect\";\nimport type { MultiplexParams } from \"@/types/multiplex-node\";\n\nexport function createMultiplexNode(\n id: string,\n { outputCount: _outputCount, strategy }: MultiplexParams,\n) {\n return Effect.gen(function* () {\n const uploadEngine = yield* UploadEngine;\n\n return yield* createFlowNode<UploadFile, UploadFile>({\n id,\n name: \"Multiplex\",\n description: `Multiplexes input using ${strategy} strategy`,\n type: NodeType.multiplex,\n nodeTypeId: \"multiplex\",\n outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n inputSchema: uploadFileSchema,\n outputSchema: uploadFileSchema,\n multiOutput: true,\n run: ({ data: file, storageId, clientId }) => {\n return Effect.gen(function* () {\n const { type, fileName, metadata, metadataJson } =\n resolveUploadMetadata(file.metadata);\n const normalizedFile = metadata ? { ...file, metadata } : file;\n\n if (strategy === \"copy\") {\n // For copy strategy, read and re-upload the file\n const inputBytes = yield* uploadEngine.read(\n normalizedFile.id,\n clientId,\n );\n\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(inputBytes);\n controller.close();\n },\n });\n\n const result = yield* uploadEngine.upload(\n {\n storageId,\n size: inputBytes.byteLength,\n type,\n fileName,\n lastModified: 0,\n metadata: metadataJson,\n },\n clientId,\n stream,\n );\n\n const resolvedResult = resolveUploadMetadata(result.metadata);\n\n return completeNodeExecution(\n resolvedResult.metadata\n ? { ...result, metadata: resolvedResult.metadata }\n : result,\n );\n } else if (strategy === \"split\") {\n // Split strategy is not supported in the new pattern\n // as it would require returning multiple UploadFiles\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"Split strategy is not supported with UploadFile pattern\",\n }),\n );\n }\n\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: `Unknown multiplex strategy: ${strategy}`,\n }),\n );\n });\n },\n });\n });\n}\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n STORAGE_OUTPUT_TYPE_ID,\n ZipPlugin,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { UploadEngine } from \"@uploadista/core/upload\";\nimport { Effect } from \"effect\";\nimport { z } from \"zod\";\nimport type { ZipParams } from \"@/types/zip-node\";\n\nconst inputSchema = z.record(z.string(), uploadFileSchema);\nconst outputSchema = uploadFileSchema;\n\nexport function createZipNode(\n id: string,\n { zipName, includeMetadata }: ZipParams,\n) {\n return Effect.gen(function* () {\n const uploadEngine = yield* UploadEngine;\n const zipPlugin = yield* ZipPlugin;\n return yield* createFlowNode<Record<string, UploadFile>, UploadFile>({\n id,\n name: \"Zip Files\",\n description: \"Combines multiple files into a zip archive\",\n type: NodeType.process,\n nodeTypeId: \"zip\",\n outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n inputSchema,\n outputSchema,\n multiInput: true,\n run: ({ data: inputs, storageId, clientId }) => {\n return Effect.gen(function* () {\n if (!inputs || Object.keys(inputs).length === 0) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"No inputs provided to zip node\",\n }),\n );\n }\n\n const zipInputs = yield* Effect.forEach(\n Object.values(inputs),\n (input) =>\n Effect.gen(function* () {\n const data = yield* uploadEngine.read(input.id, clientId);\n return {\n id: input.id,\n data,\n metadata: input.metadata,\n };\n }),\n { concurrency: \"unbounded\" },\n );\n\n const zipBytes = yield* zipPlugin.zip(zipInputs, {\n zipName,\n includeMetadata,\n });\n\n // Create a stream from the zip bytes\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(zipBytes);\n controller.close();\n },\n });\n\n // Upload the zip file\n const result = yield* uploadEngine.upload(\n {\n storageId,\n size: zipBytes.byteLength,\n type: \"application/zip\",\n fileName: zipName,\n lastModified: 0,\n metadata: JSON.stringify({\n mimeType: \"application/zip\",\n type: \"application/zip\",\n originalName: zipName,\n fileName: zipName,\n extension: \"zip\",\n }),\n },\n clientId,\n stream,\n );\n\n return completeNodeExecution(result);\n });\n },\n });\n });\n}\n"],"mappings":"yYASA,SAAgB,EACd,EACA,CAAE,QAAO,WAAU,SACnB,CACA,OAAO,EAAuC,CAC5C,KACA,KAAM,qBACN,YAAa,wBAAwB,EAAM,GAAG,EAAS,GAAG,IAC1D,KAAM,EAAS,YACf,WAAY,cACZ,YAAa,EACb,aAAc,EACd,UAAW,CAAE,QAAO,WAAU,QAAO,CACrC,KAAM,CAAE,UAGC,EAAO,QAAQ,EAAsB,EAAK,CAAC,CAErD,CAAC,CCdJ,MAAMA,EAAc,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAiB,CACpDC,EAAe,EAErB,SAAgB,EACd,EACA,CAAE,WAAU,UAAW,GACvB,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EAE5B,OAAO,MAAO,EAAuD,CACnE,KACA,KAAM,cACN,YAAa,+BAA+B,EAAS,WACrD,KAAM,EAAS,MACf,WAAY,QACZ,aAAc,EACd,YAAA,EACA,aAAA,EACA,WAAY,GACZ,KAAM,CAAE,KAAM,EAAQ,YAAW,cACxB,EAAO,IAAI,WAAa,CAC7B,GAAI,CAAC,GAAU,OAAO,KAAK,EAAO,CAAC,SAAW,EAC5C,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,mCACP,CAAC,CACH,CAGH,IAAM,EAAa,OAAO,OAAO,EAAO,CAExC,GAAI,EAAW,SAAW,EACxB,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,oBACP,CAAC,CACH,CAGH,OAAQ,EAAR,CACE,IAAK,SAAU,CAEb,IAAM,EAAgC,EAAE,CACpC,EAAY,EAEhB,IAAK,IAAM,KAAQ,EAAY,CAC7B,IAAM,EAAQ,MAAO,EAAa,KAAK,EAAK,GAAI,EAAS,CACzD,EAAgB,KAAK,EAAM,CAC3B,GAAa,EAAM,WAIrB,IAAM,EAAc,IAAI,WAAW,EAAU,CACzC,EAAS,EACb,IAAK,IAAM,KAAS,EAClB,EAAY,IAAI,EAAO,EAAO,CAC9B,GAAU,EAAM,WAIlB,IAAM,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAY,CAC/B,EAAW,OAAO,EAErB,CAAC,CAoBF,OAAO,EAjBQ,MAAO,EAAa,OACjC,CACE,YACA,KAAM,EAAY,WAClB,KAAM,2BACN,SAAU,UAAU,EAAW,OAAO,YACtC,aAAc,EACd,SAAU,KAAK,UAAU,CACvB,SAAU,2BACV,aAAc,UAAU,EAAW,OAAO,QAC1C,UAAW,MACZ,CAAC,CACH,CACD,EACA,EACD,CAEmC,CAEtC,QACE,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,2BAA2B,IAClC,CAAC,CACH,GAGL,CAEL,CAAC,EACF,CCnGJ,SAAgB,EACd,EACA,CAAE,YAAa,EAAc,YAC7B,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EAE5B,OAAO,MAAO,EAAuC,CACnD,KACA,KAAM,YACN,YAAa,2BAA2B,EAAS,WACjD,KAAM,EAAS,UACf,WAAY,YACZ,aAAc,EACd,YAAa,EACb,aAAc,EACd,YAAa,GACb,KAAM,CAAE,KAAM,EAAM,YAAW,cACtB,EAAO,IAAI,WAAa,CAC7B,GAAM,CAAE,OAAM,WAAU,WAAU,gBAChC,EAAsB,EAAK,SAAS,CAChC,EAAiB,EAAW,CAAE,GAAG,EAAM,WAAU,CAAG,EAE1D,GAAI,IAAa,OAAQ,CAEvB,IAAM,EAAa,MAAO,EAAa,KACrC,EAAe,GACf,EACD,CAEK,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAW,CAC9B,EAAW,OAAO,EAErB,CAAC,CAEI,EAAS,MAAO,EAAa,OACjC,CACE,YACA,KAAM,EAAW,WACjB,OACA,WACA,aAAc,EACd,SAAU,EACX,CACD,EACA,EACD,CAEK,EAAiB,EAAsB,EAAO,SAAS,CAE7D,OAAO,EACL,EAAe,SACX,CAAE,GAAG,EAAQ,SAAU,EAAe,SAAU,CAChD,EACL,SACQ,IAAa,QAGtB,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,0DACP,CAAC,CACH,CAGH,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,+BAA+B,IACtC,CAAC,CACH,EACD,CAEL,CAAC,EACF,CC1EJ,MAAM,EAAc,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAiB,CACpD,EAAe,EAErB,SAAgB,EACd,EACA,CAAE,UAAS,mBACX,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EACtB,EAAY,MAAO,EACzB,OAAO,MAAO,EAAuD,CACnE,KACA,KAAM,YACN,YAAa,6CACb,KAAM,EAAS,QACf,WAAY,MACZ,aAAc,EACd,cACA,eACA,WAAY,GACZ,KAAM,CAAE,KAAM,EAAQ,YAAW,cACxB,EAAO,IAAI,WAAa,CAC7B,GAAI,CAAC,GAAU,OAAO,KAAK,EAAO,CAAC,SAAW,EAC5C,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,iCACP,CAAC,CACH,CAGH,IAAM,EAAY,MAAO,EAAO,QAC9B,OAAO,OAAO,EAAO,CACpB,GACC,EAAO,IAAI,WAAa,CACtB,IAAM,EAAO,MAAO,EAAa,KAAK,EAAM,GAAI,EAAS,CACzD,MAAO,CACL,GAAI,EAAM,GACV,OACA,SAAU,EAAM,SACjB,EACD,CACJ,CAAE,YAAa,YAAa,CAC7B,CAEK,EAAW,MAAO,EAAU,IAAI,EAAW,CAC/C,UACA,kBACD,CAAC,CAGI,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAS,CAC5B,EAAW,OAAO,EAErB,CAAC,CAsBF,OAAO,EAnBQ,MAAO,EAAa,OACjC,CACE,YACA,KAAM,EAAS,WACf,KAAM,kBACN,SAAU,EACV,aAAc,EACd,SAAU,KAAK,UAAU,CACvB,SAAU,kBACV,KAAM,kBACN,aAAc,EACd,SAAU,EACV,UAAW,MACZ,CAAC,CACH,CACD,EACA,EACD,CAEmC,EACpC,CAEL,CAAC,EACF"}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@uploadista/flow-utility-nodes",
3
3
  "type": "module",
4
- "version": "0.0.20-beta.9",
4
+ "version": "0.1.0-beta.5",
5
5
  "description": "Utility nodes for Uploadista Flow",
6
6
  "license": "MIT",
7
7
  "author": "Uploadista",
@@ -20,7 +20,7 @@
20
20
  }
21
21
  },
22
22
  "dependencies": {
23
- "@uploadista/core": "0.0.20-beta.9"
23
+ "@uploadista/core": "0.1.0-beta.5"
24
24
  },
25
25
  "peerDependencies": {
26
26
  "effect": "^3.0.0",
@@ -28,19 +28,19 @@
28
28
  },
29
29
  "devDependencies": {
30
30
  "@effect/vitest": "0.27.0",
31
- "@types/node": "24.10.4",
32
- "effect": "3.19.12",
33
- "tsdown": "0.18.0",
34
- "vitest": "4.0.15",
35
- "zod": "4.2.0",
36
- "@uploadista/typescript-config": "0.0.20-beta.9"
31
+ "@types/node": "24.10.8",
32
+ "effect": "3.19.14",
33
+ "tsdown": "0.19.0",
34
+ "vitest": "4.0.17",
35
+ "zod": "4.3.5",
36
+ "@uploadista/typescript-config": "0.1.0-beta.5"
37
37
  },
38
38
  "scripts": {
39
39
  "build": "tsc --noEmit && tsdown",
40
40
  "check": "biome check --write ./src",
41
41
  "format": "biome format --write ./src",
42
42
  "lint": "biome lint --write ./src",
43
- "test": "vitest",
43
+ "test": "vitest run",
44
44
  "test:run": "vitest run",
45
45
  "test:watch": "vitest --watch"
46
46
  }