@uploadista/flow-utility-nodes 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. package/.turbo/turbo-build.log +5 -0
  2. package/.turbo/turbo-check.log +5 -0
  3. package/LICENSE +21 -0
  4. package/README.md +289 -0
  5. package/dist/conditional-node.d.ts +25 -0
  6. package/dist/conditional-node.d.ts.map +1 -0
  7. package/dist/conditional-node.js +34 -0
  8. package/dist/index.d.ts +5 -0
  9. package/dist/index.d.ts.map +1 -0
  10. package/dist/index.js +4 -0
  11. package/dist/merge-node.d.ts +12 -0
  12. package/dist/merge-node.d.ts.map +1 -0
  13. package/dist/merge-node.js +77 -0
  14. package/dist/multiplex-node.d.ts +42 -0
  15. package/dist/multiplex-node.d.ts.map +1 -0
  16. package/dist/multiplex-node.js +67 -0
  17. package/dist/nodes/conditional-node.d.ts +5 -0
  18. package/dist/nodes/conditional-node.d.ts.map +1 -0
  19. package/dist/nodes/conditional-node.js +19 -0
  20. package/dist/nodes/index.d.ts +5 -0
  21. package/dist/nodes/index.d.ts.map +1 -0
  22. package/dist/nodes/index.js +4 -0
  23. package/dist/nodes/merge-node.d.ts +7 -0
  24. package/dist/nodes/merge-node.d.ts.map +1 -0
  25. package/dist/nodes/merge-node.js +82 -0
  26. package/dist/nodes/multiplex-node.d.ts +7 -0
  27. package/dist/nodes/multiplex-node.d.ts.map +1 -0
  28. package/dist/nodes/multiplex-node.js +57 -0
  29. package/dist/nodes/zip-node.d.ts +8 -0
  30. package/dist/nodes/zip-node.d.ts.map +1 -0
  31. package/dist/nodes/zip-node.js +64 -0
  32. package/dist/types/conditional-node.d.ts +21 -0
  33. package/dist/types/conditional-node.d.ts.map +1 -0
  34. package/dist/types/conditional-node.js +13 -0
  35. package/dist/types/index.d.ts +5 -0
  36. package/dist/types/index.d.ts.map +1 -0
  37. package/dist/types/index.js +4 -0
  38. package/dist/types/merge-node.d.ts +11 -0
  39. package/dist/types/merge-node.d.ts.map +1 -0
  40. package/dist/types/merge-node.js +6 -0
  41. package/dist/types/multiplex-node.d.ts +10 -0
  42. package/dist/types/multiplex-node.d.ts.map +1 -0
  43. package/dist/types/multiplex-node.js +5 -0
  44. package/dist/types/zip-node.d.ts +8 -0
  45. package/dist/types/zip-node.d.ts.map +1 -0
  46. package/dist/types/zip-node.js +6 -0
  47. package/dist/zip-node.d.ts +9 -0
  48. package/dist/zip-node.d.ts.map +1 -0
  49. package/dist/zip-node.js +65 -0
  50. package/package.json +35 -0
  51. package/src/nodes/conditional-node.ts +28 -0
  52. package/src/nodes/index.ts +4 -0
  53. package/src/nodes/merge-node.ts +111 -0
  54. package/src/nodes/multiplex-node.ts +87 -0
  55. package/src/nodes/zip-node.ts +92 -0
  56. package/src/types/conditional-node.ts +16 -0
  57. package/src/types/index.ts +4 -0
  58. package/src/types/merge-node.ts +9 -0
  59. package/src/types/multiplex-node.ts +8 -0
  60. package/src/types/zip-node.ts +9 -0
  61. package/tsconfig.json +14 -0
  62. package/tsconfig.tsbuildinfo +1 -0
@@ -0,0 +1,82 @@
1
+ import { UploadistaError } from "@uploadista/core/errors";
2
+ import { completeNodeExecution, createFlowNode, NodeType, } from "@uploadista/core/flow";
3
+ import { uploadFileSchema } from "@uploadista/core/types";
4
+ import { UploadServer } from "@uploadista/core/upload";
5
+ import { Effect } from "effect";
6
+ import { z } from "zod";
7
+ const inputSchema = z.record(z.string(), uploadFileSchema);
8
+ const outputSchema = uploadFileSchema;
9
+ export function createMergeNode(id, { strategy, separator: _separator }) {
10
+ return Effect.gen(function* () {
11
+ const uploadServer = yield* UploadServer;
12
+ return yield* createFlowNode({
13
+ id,
14
+ name: "Merge Files",
15
+ description: `Merges multiple files using ${strategy} strategy`,
16
+ type: NodeType.merge,
17
+ inputSchema,
18
+ outputSchema,
19
+ multiInput: true,
20
+ run: ({ data: inputs, storageId, clientId }) => {
21
+ return Effect.gen(function* () {
22
+ if (!inputs || Object.keys(inputs).length === 0) {
23
+ return yield* Effect.fail(UploadistaError.fromCode("VALIDATION_ERROR", {
24
+ body: "No inputs provided to merge node",
25
+ }));
26
+ }
27
+ const inputFiles = Object.values(inputs);
28
+ if (inputFiles.length === 0) {
29
+ return yield* Effect.fail(UploadistaError.fromCode("VALIDATION_ERROR", {
30
+ body: "No files to merge",
31
+ }));
32
+ }
33
+ switch (strategy) {
34
+ case "concat": {
35
+ // Read bytes from all input files
36
+ const inputBytesArray = [];
37
+ let totalSize = 0;
38
+ for (const file of inputFiles) {
39
+ const bytes = yield* uploadServer.read(file.id, clientId);
40
+ inputBytesArray.push(bytes);
41
+ totalSize += bytes.byteLength;
42
+ }
43
+ // Concatenate all files into one
44
+ const mergedBytes = new Uint8Array(totalSize);
45
+ let offset = 0;
46
+ for (const bytes of inputBytesArray) {
47
+ mergedBytes.set(bytes, offset);
48
+ offset += bytes.byteLength;
49
+ }
50
+ // Create a stream from the merged bytes
51
+ const stream = new ReadableStream({
52
+ start(controller) {
53
+ controller.enqueue(mergedBytes);
54
+ controller.close();
55
+ },
56
+ });
57
+ // Upload the merged file
58
+ const result = yield* uploadServer.upload({
59
+ storageId,
60
+ size: mergedBytes.byteLength,
61
+ type: "application/octet-stream",
62
+ fileName: `merged_${inputFiles.length}_files.bin`,
63
+ lastModified: 0,
64
+ metadata: JSON.stringify({
65
+ mimeType: "application/octet-stream",
66
+ originalName: `merged_${inputFiles.length}_files`,
67
+ extension: "bin",
68
+ }),
69
+ }, clientId, stream);
70
+ return completeNodeExecution(result);
71
+ }
72
+ default: {
73
+ return yield* Effect.fail(UploadistaError.fromCode("VALIDATION_ERROR", {
74
+ body: `Unknown merge strategy: ${strategy}`,
75
+ }));
76
+ }
77
+ }
78
+ });
79
+ },
80
+ });
81
+ });
82
+ }
@@ -0,0 +1,7 @@
1
+ import { UploadistaError } from "@uploadista/core/errors";
2
+ import { type UploadFile } from "@uploadista/core/types";
3
+ import { UploadServer } from "@uploadista/core/upload";
4
+ import { Effect } from "effect";
5
+ import type { MultiplexParams } from "@/types/multiplex-node";
6
+ export declare function createMultiplexNode(id: string, { outputCount: _outputCount, strategy }: MultiplexParams): Effect.Effect<import("@uploadista/core").FlowNode<UploadFile, UploadFile, UploadistaError>, never, UploadServer>;
7
+ //# sourceMappingURL=multiplex-node.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"multiplex-node.d.ts","sourceRoot":"","sources":["../../src/nodes/multiplex-node.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAO1D,OAAO,EAAE,KAAK,UAAU,EAAoB,MAAM,wBAAwB,CAAC;AAC3E,OAAO,EAAE,YAAY,EAAE,MAAM,yBAAyB,CAAC;AACvD,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAChC,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,wBAAwB,CAAC;AAE9D,wBAAgB,mBAAmB,CACjC,EAAE,EAAE,MAAM,EACV,EAAE,WAAW,EAAE,YAAY,EAAE,QAAQ,EAAE,EAAE,eAAe,oHAwEzD"}
@@ -0,0 +1,57 @@
1
+ import { UploadistaError } from "@uploadista/core/errors";
2
+ import { completeNodeExecution, createFlowNode, NodeType, resolveUploadMetadata, } from "@uploadista/core/flow";
3
+ import { uploadFileSchema } from "@uploadista/core/types";
4
+ import { UploadServer } from "@uploadista/core/upload";
5
+ import { Effect } from "effect";
6
+ export function createMultiplexNode(id, { outputCount: _outputCount, strategy }) {
7
+ return Effect.gen(function* () {
8
+ const uploadServer = yield* UploadServer;
9
+ return yield* createFlowNode({
10
+ id,
11
+ name: "Multiplex",
12
+ description: `Multiplexes input using ${strategy} strategy`,
13
+ type: NodeType.multiplex,
14
+ inputSchema: uploadFileSchema,
15
+ outputSchema: uploadFileSchema,
16
+ multiOutput: true,
17
+ run: ({ data: file, storageId, clientId }) => {
18
+ return Effect.gen(function* () {
19
+ const { type, fileName, metadata, metadataJson } = resolveUploadMetadata(file.metadata);
20
+ const normalizedFile = metadata ? { ...file, metadata } : file;
21
+ if (strategy === "copy") {
22
+ // For copy strategy, read and re-upload the file
23
+ const inputBytes = yield* uploadServer.read(normalizedFile.id, clientId);
24
+ const stream = new ReadableStream({
25
+ start(controller) {
26
+ controller.enqueue(inputBytes);
27
+ controller.close();
28
+ },
29
+ });
30
+ const result = yield* uploadServer.upload({
31
+ storageId,
32
+ size: inputBytes.byteLength,
33
+ type,
34
+ fileName,
35
+ lastModified: 0,
36
+ metadata: metadataJson,
37
+ }, clientId, stream);
38
+ const resolvedResult = resolveUploadMetadata(result.metadata);
39
+ return completeNodeExecution(resolvedResult.metadata
40
+ ? { ...result, metadata: resolvedResult.metadata }
41
+ : result);
42
+ }
43
+ else if (strategy === "split") {
44
+ // Split strategy is not supported in the new pattern
45
+ // as it would require returning multiple UploadFiles
46
+ return yield* Effect.fail(UploadistaError.fromCode("VALIDATION_ERROR", {
47
+ body: "Split strategy is not supported with UploadFile pattern",
48
+ }));
49
+ }
50
+ return yield* Effect.fail(UploadistaError.fromCode("VALIDATION_ERROR", {
51
+ body: `Unknown multiplex strategy: ${strategy}`,
52
+ }));
53
+ });
54
+ },
55
+ });
56
+ });
57
+ }
@@ -0,0 +1,8 @@
1
+ import { UploadistaError } from "@uploadista/core/errors";
2
+ import { ZipPlugin } from "@uploadista/core/flow";
3
+ import { type UploadFile } from "@uploadista/core/types";
4
+ import { UploadServer } from "@uploadista/core/upload";
5
+ import { Effect } from "effect";
6
+ import type { ZipParams } from "@/types/zip-node";
7
+ export declare function createZipNode(id: string, { zipName, includeMetadata }: ZipParams): Effect.Effect<import("@uploadista/core").FlowNode<Record<string, UploadFile>, UploadFile, UploadistaError>, never, UploadServer | ZipPlugin>;
8
+ //# sourceMappingURL=zip-node.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"zip-node.d.ts","sourceRoot":"","sources":["../../src/nodes/zip-node.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAIL,SAAS,EACV,MAAM,uBAAuB,CAAC;AAC/B,OAAO,EAAE,KAAK,UAAU,EAAoB,MAAM,wBAAwB,CAAC;AAC3E,OAAO,EAAE,YAAY,EAAE,MAAM,yBAAyB,CAAC;AACvD,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAEhC,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAKlD,wBAAgB,aAAa,CAC3B,EAAE,EAAE,MAAM,EACV,EAAE,OAAO,EAAE,eAAe,EAAE,EAAE,SAAS,gJAyExC"}
@@ -0,0 +1,64 @@
1
+ import { UploadistaError } from "@uploadista/core/errors";
2
+ import { completeNodeExecution, createFlowNode, NodeType, ZipPlugin, } from "@uploadista/core/flow";
3
+ import { uploadFileSchema } from "@uploadista/core/types";
4
+ import { UploadServer } from "@uploadista/core/upload";
5
+ import { Effect } from "effect";
6
+ import { z } from "zod";
7
+ const inputSchema = z.record(z.string(), uploadFileSchema);
8
+ const outputSchema = uploadFileSchema;
9
+ export function createZipNode(id, { zipName, includeMetadata }) {
10
+ return Effect.gen(function* () {
11
+ const uploadServer = yield* UploadServer;
12
+ const zipPlugin = yield* ZipPlugin;
13
+ return yield* createFlowNode({
14
+ id,
15
+ name: "Zip Files",
16
+ description: "Combines multiple files into a zip archive",
17
+ type: NodeType.process,
18
+ inputSchema,
19
+ outputSchema,
20
+ multiInput: true,
21
+ run: ({ data: inputs, storageId, clientId }) => {
22
+ return Effect.gen(function* () {
23
+ if (!inputs || Object.keys(inputs).length === 0) {
24
+ return yield* Effect.fail(UploadistaError.fromCode("VALIDATION_ERROR", {
25
+ body: "No inputs provided to zip node",
26
+ }));
27
+ }
28
+ const zipInputs = yield* Effect.forEach(Object.values(inputs), (input) => Effect.gen(function* () {
29
+ const data = yield* uploadServer.read(input.id, clientId);
30
+ return {
31
+ id: input.id,
32
+ data,
33
+ metadata: input.metadata,
34
+ };
35
+ }), { concurrency: "unbounded" });
36
+ const zipBytes = yield* zipPlugin.zip(zipInputs, { zipName, includeMetadata });
37
+ // Create a stream from the zip bytes
38
+ const stream = new ReadableStream({
39
+ start(controller) {
40
+ controller.enqueue(zipBytes);
41
+ controller.close();
42
+ },
43
+ });
44
+ // Upload the zip file
45
+ const result = yield* uploadServer.upload({
46
+ storageId,
47
+ size: zipBytes.byteLength,
48
+ type: "application/zip",
49
+ fileName: zipName,
50
+ lastModified: 0,
51
+ metadata: JSON.stringify({
52
+ mimeType: "application/zip",
53
+ type: "application/zip",
54
+ originalName: zipName,
55
+ fileName: zipName,
56
+ extension: "zip",
57
+ }),
58
+ }, clientId, stream);
59
+ return completeNodeExecution(result);
60
+ });
61
+ },
62
+ });
63
+ });
64
+ }
@@ -0,0 +1,21 @@
1
+ import { z } from "zod";
2
+ export declare const conditionalParamsSchema: z.ZodObject<{
3
+ field: z.ZodEnum<{
4
+ mimeType: "mimeType";
5
+ size: "size";
6
+ width: "width";
7
+ height: "height";
8
+ extension: "extension";
9
+ }>;
10
+ operator: z.ZodEnum<{
11
+ equals: "equals";
12
+ notEquals: "notEquals";
13
+ greaterThan: "greaterThan";
14
+ lessThan: "lessThan";
15
+ contains: "contains";
16
+ startsWith: "startsWith";
17
+ }>;
18
+ value: z.ZodUnion<readonly [z.ZodString, z.ZodNumber]>;
19
+ }, z.core.$strip>;
20
+ export type ConditionalParams = z.infer<typeof conditionalParamsSchema>;
21
+ //# sourceMappingURL=conditional-node.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"conditional-node.d.ts","sourceRoot":"","sources":["../../src/types/conditional-node.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,eAAO,MAAM,uBAAuB;;;;;;;;;;;;;;;;;iBAWlC,CAAC;AAEH,MAAM,MAAM,iBAAiB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,uBAAuB,CAAC,CAAC"}
@@ -0,0 +1,13 @@
1
+ import { z } from "zod";
2
+ export const conditionalParamsSchema = z.object({
3
+ field: z.enum(["mimeType", "size", "width", "height", "extension"]),
4
+ operator: z.enum([
5
+ "equals",
6
+ "notEquals",
7
+ "greaterThan",
8
+ "lessThan",
9
+ "contains",
10
+ "startsWith",
11
+ ]),
12
+ value: z.union([z.string(), z.number()]),
13
+ });
@@ -0,0 +1,5 @@
1
+ export * from "./conditional-node";
2
+ export * from "./merge-node";
3
+ export * from "./multiplex-node";
4
+ export * from "./zip-node";
5
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/types/index.ts"],"names":[],"mappings":"AAAA,cAAc,oBAAoB,CAAC;AACnC,cAAc,cAAc,CAAC;AAC7B,cAAc,kBAAkB,CAAC;AACjC,cAAc,YAAY,CAAC"}
@@ -0,0 +1,4 @@
1
+ export * from "./conditional-node";
2
+ export * from "./merge-node";
3
+ export * from "./multiplex-node";
4
+ export * from "./zip-node";
@@ -0,0 +1,11 @@
1
+ import { z } from "zod";
2
+ export declare const mergeParamsSchema: z.ZodObject<{
3
+ strategy: z.ZodDefault<z.ZodEnum<{
4
+ concat: "concat";
5
+ batch: "batch";
6
+ }>>;
7
+ separator: z.ZodOptional<z.ZodDefault<z.ZodString>>;
8
+ inputCount: z.ZodDefault<z.ZodNumber>;
9
+ }, z.core.$strip>;
10
+ export type MergeParams = z.infer<typeof mergeParamsSchema>;
11
+ //# sourceMappingURL=merge-node.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"merge-node.d.ts","sourceRoot":"","sources":["../../src/types/merge-node.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,eAAO,MAAM,iBAAiB;;;;;;;iBAI5B,CAAC;AAEH,MAAM,MAAM,WAAW,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,iBAAiB,CAAC,CAAC"}
@@ -0,0 +1,6 @@
1
+ import { z } from "zod";
2
+ export const mergeParamsSchema = z.object({
3
+ strategy: z.enum(["concat", "batch"]).default("batch"),
4
+ separator: z.string().default("\n").optional(),
5
+ inputCount: z.number().min(2).max(10).default(2),
6
+ });
@@ -0,0 +1,10 @@
1
+ import { z } from "zod";
2
+ export declare const multiplexParamsSchema: z.ZodObject<{
3
+ outputCount: z.ZodNumber;
4
+ strategy: z.ZodDefault<z.ZodEnum<{
5
+ copy: "copy";
6
+ split: "split";
7
+ }>>;
8
+ }, z.core.$strip>;
9
+ export type MultiplexParams = z.infer<typeof multiplexParamsSchema>;
10
+ //# sourceMappingURL=multiplex-node.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"multiplex-node.d.ts","sourceRoot":"","sources":["../../src/types/multiplex-node.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,eAAO,MAAM,qBAAqB;;;;;;iBAGhC,CAAC;AAEH,MAAM,MAAM,eAAe,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,qBAAqB,CAAC,CAAC"}
@@ -0,0 +1,5 @@
1
+ import { z } from "zod";
2
+ export const multiplexParamsSchema = z.object({
3
+ outputCount: z.number().min(1).max(10),
4
+ strategy: z.enum(["copy", "split"]).default("copy"),
5
+ });
@@ -0,0 +1,8 @@
1
+ import { z } from "zod";
2
+ export declare const zipParamsSchema: z.ZodObject<{
3
+ zipName: z.ZodDefault<z.ZodString>;
4
+ includeMetadata: z.ZodDefault<z.ZodBoolean>;
5
+ inputCount: z.ZodDefault<z.ZodNumber>;
6
+ }, z.core.$strip>;
7
+ export type ZipParams = z.infer<typeof zipParamsSchema>;
8
+ //# sourceMappingURL=zip-node.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"zip-node.d.ts","sourceRoot":"","sources":["../../src/types/zip-node.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,eAAO,MAAM,eAAe;;;;iBAI1B,CAAC;AAEH,MAAM,MAAM,SAAS,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,eAAe,CAAC,CAAC"}
@@ -0,0 +1,6 @@
1
+ import { z } from "zod";
2
+ export const zipParamsSchema = z.object({
3
+ zipName: z.string().default("archive.zip"),
4
+ includeMetadata: z.boolean().default(false),
5
+ inputCount: z.number().min(2).max(10).default(2),
6
+ });
@@ -0,0 +1,9 @@
1
+ import { type FlowFile, type FlowFileBatch } from "@uploadista/flow-core";
2
+ import { z } from "zod";
3
+ export declare const zipParamsSchema: z.ZodObject<{
4
+ zipName: z.ZodDefault<z.ZodString>;
5
+ includeMetadata: z.ZodDefault<z.ZodBoolean>;
6
+ }, z.core.$strip>;
7
+ export type ZipParams = z.infer<typeof zipParamsSchema>;
8
+ export declare function createZipNode(id: string, { zipName, includeMetadata }: ZipParams): import("@uploadista/flow-core").FlowNode<Record<string, FlowFile | FlowFileBatch>, FlowFile>;
9
+ //# sourceMappingURL=zip-node.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"zip-node.d.ts","sourceRoot":"","sources":["../src/zip-node.ts"],"names":[],"mappings":"AAAA,OAAO,EAEL,KAAK,QAAQ,EACb,KAAK,aAAa,EAKnB,MAAM,uBAAuB,CAAC;AAE/B,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,eAAO,MAAM,eAAe;;;iBAG1B,CAAC;AAEH,MAAM,MAAM,SAAS,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,eAAe,CAAC,CAAC;AASxD,wBAAgB,aAAa,CAC3B,EAAE,EAAE,MAAM,EACV,EAAE,OAAO,EAAE,eAAe,EAAE,EAAE,SAAS,gGAmExC"}
@@ -0,0 +1,65 @@
1
+ import { createFlowNode, flowFileBatchSchema, flowFileSchema, isFlowFile, NodeType, } from "@uploadista/flow-core";
2
+ import JSZip from "jszip";
3
+ import { z } from "zod";
4
+ export const zipParamsSchema = z.object({
5
+ zipName: z.string().default("archive.zip"),
6
+ includeMetadata: z.boolean().default(false),
7
+ });
8
+ // Define schemas for input and output
9
+ const inputSchema = z.record(z.string(), z.union([flowFileSchema, flowFileBatchSchema]));
10
+ export function createZipNode(id, { zipName, includeMetadata }) {
11
+ return createFlowNode({
12
+ id,
13
+ name: "Zip Files",
14
+ description: "Combines multiple files into a zip archive",
15
+ type: NodeType.process,
16
+ inputSchema,
17
+ outputSchema: flowFileSchema,
18
+ multiInput: true,
19
+ run: async ({ data: inputs }) => {
20
+ if (!inputs) {
21
+ throw new Error("No inputs provided to zip node");
22
+ }
23
+ const zip = new JSZip();
24
+ // Process all input files
25
+ for (const [sourceId, input] of Object.entries(inputs)) {
26
+ if (isFlowFile(input)) {
27
+ // Handle single FlowFile
28
+ const fileName = input.metadata.originalName || `${sourceId}_${input.path}`;
29
+ zip.file(fileName, input.inputBytes);
30
+ if (includeMetadata) {
31
+ zip.file(`${fileName}.meta.json`, JSON.stringify(input.metadata, null, 2));
32
+ }
33
+ }
34
+ else {
35
+ // Handle FlowFileBatch
36
+ for (const file of input.files) {
37
+ const fileName = file.metadata.originalName || `${sourceId}_${file.path}`;
38
+ zip.file(fileName, file.inputBytes);
39
+ if (includeMetadata) {
40
+ zip.file(`${fileName}.meta.json`, JSON.stringify(file.metadata, null, 2));
41
+ }
42
+ }
43
+ }
44
+ }
45
+ // Generate the zip file
46
+ const zipBuffer = await zip.generateAsync({
47
+ type: "nodebuffer",
48
+ compression: "DEFLATE",
49
+ compressionOptions: {
50
+ level: 6, // Good balance between speed and compression
51
+ },
52
+ });
53
+ return {
54
+ path: zipName,
55
+ inputBytes: new Uint8Array(zipBuffer.buffer, zipBuffer.byteOffset, zipBuffer.byteLength),
56
+ metadata: {
57
+ mimeType: "application/zip",
58
+ size: zipBuffer.length,
59
+ originalName: zipName,
60
+ extension: "zip",
61
+ },
62
+ };
63
+ },
64
+ });
65
+ }
package/package.json ADDED
@@ -0,0 +1,35 @@
1
+ {
2
+ "name": "@uploadista/flow-utility-nodes",
3
+ "type": "module",
4
+ "version": "0.0.3",
5
+ "description": "Utility nodes for Uploadista Flow",
6
+ "license": "MIT",
7
+ "author": "Uploadista",
8
+ "exports": {
9
+ "./nodes": {
10
+ "types": "./dist/nodes/index.d.ts",
11
+ "import": "./dist/nodes/index.js",
12
+ "default": "./dist/nodes/index.js"
13
+ },
14
+ "./types": {
15
+ "types": "./dist/types/index.d.ts",
16
+ "import": "./dist/types/index.js",
17
+ "default": "./dist/types/index.js"
18
+ }
19
+ },
20
+ "dependencies": {
21
+ "effect": "3.18.4",
22
+ "zod": "4.1.12",
23
+ "@uploadista/core": "0.0.3"
24
+ },
25
+ "devDependencies": {
26
+ "@types/node": "24.8.1",
27
+ "@uploadista/typescript-config": "0.0.3"
28
+ },
29
+ "scripts": {
30
+ "build": "tsc -b",
31
+ "format": "biome format --write ./src",
32
+ "lint": "biome lint --write ./src",
33
+ "check": "biome check --write ./src"
34
+ }
35
+ }
@@ -0,0 +1,28 @@
1
+ import {
2
+ completeNodeExecution,
3
+ createFlowNode,
4
+ NodeType,
5
+ } from "@uploadista/core/flow";
6
+ import { type UploadFile, uploadFileSchema } from "@uploadista/core/types";
7
+ import { Effect } from "effect";
8
+ import type { ConditionalParams } from "@/types/conditional-node";
9
+
10
+ export function createConditionalNode(
11
+ id: string,
12
+ { field, operator, value }: ConditionalParams,
13
+ ) {
14
+ return createFlowNode<UploadFile, UploadFile>({
15
+ id,
16
+ name: "Conditional Router",
17
+ description: `Routes flow based on ${field} ${operator} ${value}`,
18
+ type: NodeType.conditional,
19
+ inputSchema: uploadFileSchema,
20
+ outputSchema: uploadFileSchema,
21
+ condition: { field, operator, value },
22
+ run: ({ data }) => {
23
+ // The actual routing logic is handled by the flow engine
24
+ // This node just passes through the data
25
+ return Effect.succeed(completeNodeExecution(data));
26
+ },
27
+ });
28
+ }
@@ -0,0 +1,4 @@
1
+ export * from "./conditional-node";
2
+ export * from "./merge-node";
3
+ export * from "./multiplex-node";
4
+ export * from "./zip-node";
@@ -0,0 +1,111 @@
1
+ import { UploadistaError } from "@uploadista/core/errors";
2
+ import {
3
+ completeNodeExecution,
4
+ createFlowNode,
5
+ NodeType,
6
+ } from "@uploadista/core/flow";
7
+ import { type UploadFile, uploadFileSchema } from "@uploadista/core/types";
8
+ import { UploadServer } from "@uploadista/core/upload";
9
+ import { Effect } from "effect";
10
+ import { z } from "zod";
11
+ import type { MergeParams } from "@/types/merge-node";
12
+
13
+ const inputSchema = z.record(z.string(), uploadFileSchema);
14
+ const outputSchema = uploadFileSchema;
15
+
16
+ export function createMergeNode(
17
+ id: string,
18
+ { strategy, separator: _separator }: MergeParams,
19
+ ) {
20
+ return Effect.gen(function* () {
21
+ const uploadServer = yield* UploadServer;
22
+
23
+ return yield* createFlowNode<Record<string, UploadFile>, UploadFile>({
24
+ id,
25
+ name: "Merge Files",
26
+ description: `Merges multiple files using ${strategy} strategy`,
27
+ type: NodeType.merge,
28
+ inputSchema,
29
+ outputSchema,
30
+ multiInput: true,
31
+ run: ({ data: inputs, storageId, clientId }) => {
32
+ return Effect.gen(function* () {
33
+ if (!inputs || Object.keys(inputs).length === 0) {
34
+ return yield* Effect.fail(
35
+ UploadistaError.fromCode("VALIDATION_ERROR", {
36
+ body: "No inputs provided to merge node",
37
+ }),
38
+ );
39
+ }
40
+
41
+ const inputFiles = Object.values(inputs);
42
+
43
+ if (inputFiles.length === 0) {
44
+ return yield* Effect.fail(
45
+ UploadistaError.fromCode("VALIDATION_ERROR", {
46
+ body: "No files to merge",
47
+ }),
48
+ );
49
+ }
50
+
51
+ switch (strategy) {
52
+ case "concat": {
53
+ // Read bytes from all input files
54
+ const inputBytesArray: Uint8Array[] = [];
55
+ let totalSize = 0;
56
+
57
+ for (const file of inputFiles) {
58
+ const bytes = yield* uploadServer.read(file.id, clientId);
59
+ inputBytesArray.push(bytes);
60
+ totalSize += bytes.byteLength;
61
+ }
62
+
63
+ // Concatenate all files into one
64
+ const mergedBytes = new Uint8Array(totalSize);
65
+ let offset = 0;
66
+ for (const bytes of inputBytesArray) {
67
+ mergedBytes.set(bytes, offset);
68
+ offset += bytes.byteLength;
69
+ }
70
+
71
+ // Create a stream from the merged bytes
72
+ const stream = new ReadableStream({
73
+ start(controller) {
74
+ controller.enqueue(mergedBytes);
75
+ controller.close();
76
+ },
77
+ });
78
+
79
+ // Upload the merged file
80
+ const result = yield* uploadServer.upload(
81
+ {
82
+ storageId,
83
+ size: mergedBytes.byteLength,
84
+ type: "application/octet-stream",
85
+ fileName: `merged_${inputFiles.length}_files.bin`,
86
+ lastModified: 0,
87
+ metadata: JSON.stringify({
88
+ mimeType: "application/octet-stream",
89
+ originalName: `merged_${inputFiles.length}_files`,
90
+ extension: "bin",
91
+ }),
92
+ },
93
+ clientId,
94
+ stream,
95
+ );
96
+
97
+ return completeNodeExecution(result);
98
+ }
99
+ default: {
100
+ return yield* Effect.fail(
101
+ UploadistaError.fromCode("VALIDATION_ERROR", {
102
+ body: `Unknown merge strategy: ${strategy}`,
103
+ }),
104
+ );
105
+ }
106
+ }
107
+ });
108
+ },
109
+ });
110
+ });
111
+ }