@uploadista/flow-utility-nodes 0.0.20 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/nodes/index.d.cts +1 -1
- package/dist/nodes/index.d.mts +1 -1
- package/dist/nodes/index.mjs.map +1 -1
- package/dist/types/index.d.cts +1 -1
- package/dist/types/index.d.mts +1 -1
- package/dist/{zip-node-7DDOD4H0.d.mts → zip-node-DxxNTUf9.d.mts} +4 -4
- package/dist/{zip-node-7DDOD4H0.d.mts.map → zip-node-DxxNTUf9.d.mts.map} +1 -1
- package/dist/{zip-node-Brh9A17P.d.cts → zip-node-c3Z0LkjZ.d.cts} +4 -4
- package/dist/{zip-node-Brh9A17P.d.cts.map → zip-node-c3Z0LkjZ.d.cts.map} +1 -1
- package/package.json +9 -9
package/dist/nodes/index.d.cts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { a as MergeParams, r as MultiplexParams, s as ConditionalParams, t as ZipParams } from "../zip-node-
|
|
1
|
+
import { a as MergeParams, r as MultiplexParams, s as ConditionalParams, t as ZipParams } from "../zip-node-c3Z0LkjZ.cjs";
|
|
2
2
|
import * as _uploadista_core_flow2 from "@uploadista/core/flow";
|
|
3
3
|
import { NodeType, ZipPlugin } from "@uploadista/core/flow";
|
|
4
4
|
import * as zod_v4_core0 from "zod/v4/core";
|
package/dist/nodes/index.d.mts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { a as MergeParams, r as MultiplexParams, s as ConditionalParams, t as ZipParams } from "../zip-node-
|
|
1
|
+
import { a as MergeParams, r as MultiplexParams, s as ConditionalParams, t as ZipParams } from "../zip-node-DxxNTUf9.mjs";
|
|
2
2
|
import * as _uploadista_core_flow2 from "@uploadista/core/flow";
|
|
3
3
|
import { NodeType, ZipPlugin } from "@uploadista/core/flow";
|
|
4
4
|
import { UploadFile } from "@uploadista/core/types";
|
package/dist/nodes/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.mjs","names":["inputSchema","outputSchema","inputBytesArray: Uint8Array[]"],"sources":["../../src/nodes/conditional-node.ts","../../src/nodes/merge-node.ts","../../src/nodes/multiplex-node.ts","../../src/nodes/zip-node.ts"],"sourcesContent":["import {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { Effect } from \"effect\";\nimport type { ConditionalParams } from \"@/types/conditional-node\";\n\nexport function createConditionalNode(\n id: string,\n { field, operator, value }: ConditionalParams,\n) {\n return createFlowNode<UploadFile, UploadFile>({\n id,\n name: \"Conditional Router\",\n description: `Routes flow based on ${field} ${operator} ${value}`,\n type: NodeType.conditional,\n nodeTypeId: \"conditional\",\n inputSchema: uploadFileSchema,\n outputSchema: uploadFileSchema,\n condition: { field, operator, value },\n run: ({ data }) => {\n // The actual routing logic is handled by the flow engine\n // This node just passes through the data\n return Effect.succeed(completeNodeExecution(data));\n },\n });\n}\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n STORAGE_OUTPUT_TYPE_ID,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { UploadEngine } from \"@uploadista/core/upload\";\nimport { Effect } from \"effect\";\nimport { z } from \"zod\";\nimport type { MergeParams } from \"@/types/merge-node\";\n\nconst inputSchema = z.record(z.string(), uploadFileSchema);\nconst outputSchema = uploadFileSchema;\n\nexport function createMergeNode(\n id: string,\n { strategy, separator: _separator }: MergeParams,\n) {\n return Effect.gen(function* () {\n const uploadEngine = yield* UploadEngine;\n\n return yield* createFlowNode<Record<string, UploadFile>, UploadFile>({\n id,\n name: \"Merge Files\",\n description: `Merges multiple files using ${strategy} strategy`,\n type: NodeType.merge,\n nodeTypeId: \"merge\",\n outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n inputSchema,\n outputSchema,\n multiInput: true,\n run: ({ data: inputs, storageId, clientId }) => {\n return Effect.gen(function* () {\n if (!inputs || Object.keys(inputs).length === 0) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"No inputs provided to merge node\",\n }),\n );\n }\n\n const inputFiles = Object.values(inputs);\n\n if (inputFiles.length === 0) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"No files to merge\",\n }),\n );\n }\n\n switch (strategy) {\n case \"concat\": {\n // Read bytes from all input files\n const inputBytesArray: Uint8Array[] = [];\n let totalSize = 0;\n\n for (const file of inputFiles) {\n const bytes = yield* uploadEngine.read(file.id, clientId);\n inputBytesArray.push(bytes);\n totalSize += bytes.byteLength;\n }\n\n // Concatenate all files into one\n const mergedBytes = new Uint8Array(totalSize);\n let offset = 0;\n for (const bytes of inputBytesArray) {\n mergedBytes.set(bytes, offset);\n offset += bytes.byteLength;\n }\n\n // Create a stream from the merged bytes\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(mergedBytes);\n controller.close();\n },\n });\n\n // Upload the merged file\n const result = yield* uploadEngine.upload(\n {\n storageId,\n size: mergedBytes.byteLength,\n type: \"application/octet-stream\",\n fileName: `merged_${inputFiles.length}_files.bin`,\n lastModified: 0,\n metadata: JSON.stringify({\n mimeType: \"application/octet-stream\",\n originalName: `merged_${inputFiles.length}_files`,\n extension: \"bin\",\n }),\n },\n clientId,\n stream,\n );\n\n return completeNodeExecution(result);\n }\n default: {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: `Unknown merge strategy: ${strategy}`,\n }),\n );\n }\n }\n });\n },\n });\n });\n}\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n resolveUploadMetadata,\n STORAGE_OUTPUT_TYPE_ID,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { UploadEngine } from \"@uploadista/core/upload\";\nimport { Effect } from \"effect\";\nimport type { MultiplexParams } from \"@/types/multiplex-node\";\n\nexport function createMultiplexNode(\n id: string,\n { outputCount: _outputCount, strategy }: MultiplexParams,\n) {\n return Effect.gen(function* () {\n const uploadEngine = yield* UploadEngine;\n\n return yield* createFlowNode<UploadFile, UploadFile>({\n id,\n name: \"Multiplex\",\n description: `Multiplexes input using ${strategy} strategy`,\n type: NodeType.multiplex,\n nodeTypeId: \"multiplex\",\n outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n inputSchema: uploadFileSchema,\n outputSchema: uploadFileSchema,\n multiOutput: true,\n run: ({ data: file, storageId, clientId }) => {\n return Effect.gen(function* () {\n const { type, fileName, metadata, metadataJson } =\n resolveUploadMetadata(file.metadata);\n const normalizedFile = metadata ? { ...file, metadata } : file;\n\n if (strategy === \"copy\") {\n // For copy strategy, read and re-upload the file\n const inputBytes = yield* uploadEngine.read(\n normalizedFile.id,\n clientId,\n );\n\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(inputBytes);\n controller.close();\n },\n });\n\n const result = yield* uploadEngine.upload(\n {\n storageId,\n size: inputBytes.byteLength,\n type,\n fileName,\n lastModified: 0,\n metadata: metadataJson,\n },\n clientId,\n stream,\n );\n\n const resolvedResult = resolveUploadMetadata(result.metadata);\n\n return completeNodeExecution(\n resolvedResult.metadata\n ? { ...result, metadata: resolvedResult.metadata }\n : result,\n );\n } else if (strategy === \"split\") {\n // Split strategy is not supported in the new pattern\n // as it would require returning multiple UploadFiles\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"Split strategy is not supported with UploadFile pattern\",\n }),\n );\n }\n\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: `Unknown multiplex strategy: ${strategy}`,\n }),\n );\n });\n },\n });\n });\n}\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n STORAGE_OUTPUT_TYPE_ID,\n ZipPlugin,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { UploadEngine } from \"@uploadista/core/upload\";\nimport { Effect } from \"effect\";\nimport { z } from \"zod\";\nimport type { ZipParams } from \"@/types/zip-node\";\n\nconst inputSchema = z.record(z.string(), uploadFileSchema);\nconst outputSchema = uploadFileSchema;\n\nexport function createZipNode(\n id: string,\n { zipName, includeMetadata }: ZipParams,\n) {\n return Effect.gen(function* () {\n const uploadEngine = yield* UploadEngine;\n const zipPlugin = yield* ZipPlugin;\n return yield* createFlowNode<Record<string, UploadFile>, UploadFile>({\n id,\n name: \"Zip Files\",\n description: \"Combines multiple files into a zip archive\",\n type: NodeType.process,\n nodeTypeId: \"zip\",\n outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n inputSchema,\n outputSchema,\n multiInput: true,\n run: ({ data: inputs, storageId, clientId }) => {\n return Effect.gen(function* () {\n if (!inputs || Object.keys(inputs).length === 0) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"No inputs provided to zip node\",\n }),\n );\n }\n\n const zipInputs = yield* Effect.forEach(\n Object.values(inputs),\n (input) =>\n Effect.gen(function* () {\n const data = yield* uploadEngine.read(input.id, clientId);\n return {\n id: input.id,\n data,\n metadata: input.metadata,\n };\n }),\n { concurrency: \"unbounded\" },\n );\n\n const zipBytes = yield* zipPlugin.zip(zipInputs, {\n zipName,\n includeMetadata,\n });\n\n // Create a stream from the zip bytes\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(zipBytes);\n controller.close();\n },\n });\n\n // Upload the zip file\n const result = yield* uploadEngine.upload(\n {\n storageId,\n size: zipBytes.byteLength,\n type: \"application/zip\",\n fileName: zipName,\n lastModified: 0,\n metadata: JSON.stringify({\n mimeType: \"application/zip\",\n type: \"application/zip\",\n originalName: zipName,\n fileName: zipName,\n extension: \"zip\",\n }),\n },\n clientId,\n stream,\n );\n\n return completeNodeExecution(result);\n });\n },\n });\n });\n}\n"],"mappings":"yYASA,SAAgB,EACd,EACA,CAAE,QAAO,WAAU,SACnB,CACA,OAAO,EAAuC,CAC5C,KACA,KAAM,qBACN,YAAa,wBAAwB,EAAM,GAAG,EAAS,GAAG,IAC1D,KAAM,EAAS,YACf,WAAY,cACZ,YAAa,EACb,aAAc,EACd,UAAW,CAAE,QAAO,WAAU,QAAO,CACrC,KAAM,CAAE,UAGC,EAAO,QAAQ,EAAsB,EAAK,CAAC,CAErD,CAAC,CCdJ,MAAMA,EAAc,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAiB,CACpDC,EAAe,EAErB,SAAgB,EACd,EACA,CAAE,WAAU,UAAW,GACvB,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EAE5B,OAAO,MAAO,EAAuD,CACnE,KACA,KAAM,cACN,YAAa,+BAA+B,EAAS,WACrD,KAAM,EAAS,MACf,WAAY,QACZ,aAAc,EACd,YAAA,EACA,aAAA,EACA,WAAY,GACZ,KAAM,CAAE,KAAM,EAAQ,YAAW,cACxB,EAAO,IAAI,WAAa,CAC7B,GAAI,CAAC,GAAU,OAAO,KAAK,EAAO,CAAC,SAAW,EAC5C,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,mCACP,CAAC,CACH,CAGH,IAAM,EAAa,OAAO,OAAO,EAAO,CAExC,GAAI,EAAW,SAAW,EACxB,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,oBACP,CAAC,CACH,CAGH,OAAQ,EAAR,CACE,IAAK,SAAU,CAEb,IAAMC,EAAgC,EAAE,CACpC,EAAY,EAEhB,IAAK,IAAM,KAAQ,EAAY,CAC7B,IAAM,EAAQ,MAAO,EAAa,KAAK,EAAK,GAAI,EAAS,CACzD,EAAgB,KAAK,EAAM,CAC3B,GAAa,EAAM,WAIrB,IAAM,EAAc,IAAI,WAAW,EAAU,CACzC,EAAS,EACb,IAAK,IAAM,KAAS,EAClB,EAAY,IAAI,EAAO,EAAO,CAC9B,GAAU,EAAM,WAIlB,IAAM,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAY,CAC/B,EAAW,OAAO,EAErB,CAAC,CAoBF,OAAO,EAjBQ,MAAO,EAAa,OACjC,CACE,YACA,KAAM,EAAY,WAClB,KAAM,2BACN,SAAU,UAAU,EAAW,OAAO,YACtC,aAAc,EACd,SAAU,KAAK,UAAU,CACvB,SAAU,2BACV,aAAc,UAAU,EAAW,OAAO,QAC1C,UAAW,MACZ,CAAC,CACH,CACD,EACA,EACD,CAEmC,CAEtC,QACE,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,2BAA2B,IAClC,CAAC,CACH,GAGL,CAEL,CAAC,EACF,CCnGJ,SAAgB,EACd,EACA,CAAE,YAAa,EAAc,YAC7B,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EAE5B,OAAO,MAAO,EAAuC,CACnD,KACA,KAAM,YACN,YAAa,2BAA2B,EAAS,WACjD,KAAM,EAAS,UACf,WAAY,YACZ,aAAc,EACd,YAAa,EACb,aAAc,EACd,YAAa,GACb,KAAM,CAAE,KAAM,EAAM,YAAW,cACtB,EAAO,IAAI,WAAa,CAC7B,GAAM,CAAE,OAAM,WAAU,WAAU,gBAChC,EAAsB,EAAK,SAAS,CAChC,EAAiB,EAAW,CAAE,GAAG,EAAM,WAAU,CAAG,EAE1D,GAAI,IAAa,OAAQ,CAEvB,IAAM,EAAa,MAAO,EAAa,KACrC,EAAe,GACf,EACD,CAEK,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAW,CAC9B,EAAW,OAAO,EAErB,CAAC,CAEI,EAAS,MAAO,EAAa,OACjC,CACE,YACA,KAAM,EAAW,WACjB,OACA,WACA,aAAc,EACd,SAAU,EACX,CACD,EACA,EACD,CAEK,EAAiB,EAAsB,EAAO,SAAS,CAE7D,OAAO,EACL,EAAe,SACX,CAAE,GAAG,EAAQ,SAAU,EAAe,SAAU,CAChD,EACL,SACQ,IAAa,QAGtB,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,0DACP,CAAC,CACH,CAGH,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,+BAA+B,IACtC,CAAC,CACH,EACD,CAEL,CAAC,EACF,CC1EJ,MAAM,EAAc,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAiB,CACpD,EAAe,EAErB,SAAgB,EACd,EACA,CAAE,UAAS,mBACX,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EACtB,EAAY,MAAO,EACzB,OAAO,MAAO,EAAuD,CACnE,KACA,KAAM,YACN,YAAa,6CACb,KAAM,EAAS,QACf,WAAY,MACZ,aAAc,EACd,cACA,eACA,WAAY,GACZ,KAAM,CAAE,KAAM,EAAQ,YAAW,cACxB,EAAO,IAAI,WAAa,CAC7B,GAAI,CAAC,GAAU,OAAO,KAAK,EAAO,CAAC,SAAW,EAC5C,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,iCACP,CAAC,CACH,CAGH,IAAM,EAAY,MAAO,EAAO,QAC9B,OAAO,OAAO,EAAO,CACpB,GACC,EAAO,IAAI,WAAa,CACtB,IAAM,EAAO,MAAO,EAAa,KAAK,EAAM,GAAI,EAAS,CACzD,MAAO,CACL,GAAI,EAAM,GACV,OACA,SAAU,EAAM,SACjB,EACD,CACJ,CAAE,YAAa,YAAa,CAC7B,CAEK,EAAW,MAAO,EAAU,IAAI,EAAW,CAC/C,UACA,kBACD,CAAC,CAGI,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAS,CAC5B,EAAW,OAAO,EAErB,CAAC,CAsBF,OAAO,EAnBQ,MAAO,EAAa,OACjC,CACE,YACA,KAAM,EAAS,WACf,KAAM,kBACN,SAAU,EACV,aAAc,EACd,SAAU,KAAK,UAAU,CACvB,SAAU,kBACV,KAAM,kBACN,aAAc,EACd,SAAU,EACV,UAAW,MACZ,CAAC,CACH,CACD,EACA,EACD,CAEmC,EACpC,CAEL,CAAC,EACF"}
|
|
1
|
+
{"version":3,"file":"index.mjs","names":["inputSchema","outputSchema"],"sources":["../../src/nodes/conditional-node.ts","../../src/nodes/merge-node.ts","../../src/nodes/multiplex-node.ts","../../src/nodes/zip-node.ts"],"sourcesContent":["import {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { Effect } from \"effect\";\nimport type { ConditionalParams } from \"@/types/conditional-node\";\n\nexport function createConditionalNode(\n id: string,\n { field, operator, value }: ConditionalParams,\n) {\n return createFlowNode<UploadFile, UploadFile>({\n id,\n name: \"Conditional Router\",\n description: `Routes flow based on ${field} ${operator} ${value}`,\n type: NodeType.conditional,\n nodeTypeId: \"conditional\",\n inputSchema: uploadFileSchema,\n outputSchema: uploadFileSchema,\n condition: { field, operator, value },\n run: ({ data }) => {\n // The actual routing logic is handled by the flow engine\n // This node just passes through the data\n return Effect.succeed(completeNodeExecution(data));\n },\n });\n}\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n STORAGE_OUTPUT_TYPE_ID,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { UploadEngine } from \"@uploadista/core/upload\";\nimport { Effect } from \"effect\";\nimport { z } from \"zod\";\nimport type { MergeParams } from \"@/types/merge-node\";\n\nconst inputSchema = z.record(z.string(), uploadFileSchema);\nconst outputSchema = uploadFileSchema;\n\nexport function createMergeNode(\n id: string,\n { strategy, separator: _separator }: MergeParams,\n) {\n return Effect.gen(function* () {\n const uploadEngine = yield* UploadEngine;\n\n return yield* createFlowNode<Record<string, UploadFile>, UploadFile>({\n id,\n name: \"Merge Files\",\n description: `Merges multiple files using ${strategy} strategy`,\n type: NodeType.merge,\n nodeTypeId: \"merge\",\n outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n inputSchema,\n outputSchema,\n multiInput: true,\n run: ({ data: inputs, storageId, clientId }) => {\n return Effect.gen(function* () {\n if (!inputs || Object.keys(inputs).length === 0) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"No inputs provided to merge node\",\n }),\n );\n }\n\n const inputFiles = Object.values(inputs);\n\n if (inputFiles.length === 0) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"No files to merge\",\n }),\n );\n }\n\n switch (strategy) {\n case \"concat\": {\n // Read bytes from all input files\n const inputBytesArray: Uint8Array[] = [];\n let totalSize = 0;\n\n for (const file of inputFiles) {\n const bytes = yield* uploadEngine.read(file.id, clientId);\n inputBytesArray.push(bytes);\n totalSize += bytes.byteLength;\n }\n\n // Concatenate all files into one\n const mergedBytes = new Uint8Array(totalSize);\n let offset = 0;\n for (const bytes of inputBytesArray) {\n mergedBytes.set(bytes, offset);\n offset += bytes.byteLength;\n }\n\n // Create a stream from the merged bytes\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(mergedBytes);\n controller.close();\n },\n });\n\n // Upload the merged file\n const result = yield* uploadEngine.upload(\n {\n storageId,\n size: mergedBytes.byteLength,\n type: \"application/octet-stream\",\n fileName: `merged_${inputFiles.length}_files.bin`,\n lastModified: 0,\n metadata: JSON.stringify({\n mimeType: \"application/octet-stream\",\n originalName: `merged_${inputFiles.length}_files`,\n extension: \"bin\",\n }),\n },\n clientId,\n stream,\n );\n\n return completeNodeExecution(result);\n }\n default: {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: `Unknown merge strategy: ${strategy}`,\n }),\n );\n }\n }\n });\n },\n });\n });\n}\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n resolveUploadMetadata,\n STORAGE_OUTPUT_TYPE_ID,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { UploadEngine } from \"@uploadista/core/upload\";\nimport { Effect } from \"effect\";\nimport type { MultiplexParams } from \"@/types/multiplex-node\";\n\nexport function createMultiplexNode(\n id: string,\n { outputCount: _outputCount, strategy }: MultiplexParams,\n) {\n return Effect.gen(function* () {\n const uploadEngine = yield* UploadEngine;\n\n return yield* createFlowNode<UploadFile, UploadFile>({\n id,\n name: \"Multiplex\",\n description: `Multiplexes input using ${strategy} strategy`,\n type: NodeType.multiplex,\n nodeTypeId: \"multiplex\",\n outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n inputSchema: uploadFileSchema,\n outputSchema: uploadFileSchema,\n multiOutput: true,\n run: ({ data: file, storageId, clientId }) => {\n return Effect.gen(function* () {\n const { type, fileName, metadata, metadataJson } =\n resolveUploadMetadata(file.metadata);\n const normalizedFile = metadata ? { ...file, metadata } : file;\n\n if (strategy === \"copy\") {\n // For copy strategy, read and re-upload the file\n const inputBytes = yield* uploadEngine.read(\n normalizedFile.id,\n clientId,\n );\n\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(inputBytes);\n controller.close();\n },\n });\n\n const result = yield* uploadEngine.upload(\n {\n storageId,\n size: inputBytes.byteLength,\n type,\n fileName,\n lastModified: 0,\n metadata: metadataJson,\n },\n clientId,\n stream,\n );\n\n const resolvedResult = resolveUploadMetadata(result.metadata);\n\n return completeNodeExecution(\n resolvedResult.metadata\n ? { ...result, metadata: resolvedResult.metadata }\n : result,\n );\n } else if (strategy === \"split\") {\n // Split strategy is not supported in the new pattern\n // as it would require returning multiple UploadFiles\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"Split strategy is not supported with UploadFile pattern\",\n }),\n );\n }\n\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: `Unknown multiplex strategy: ${strategy}`,\n }),\n );\n });\n },\n });\n });\n}\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n completeNodeExecution,\n createFlowNode,\n NodeType,\n STORAGE_OUTPUT_TYPE_ID,\n ZipPlugin,\n} from \"@uploadista/core/flow\";\nimport { type UploadFile, uploadFileSchema } from \"@uploadista/core/types\";\nimport { UploadEngine } from \"@uploadista/core/upload\";\nimport { Effect } from \"effect\";\nimport { z } from \"zod\";\nimport type { ZipParams } from \"@/types/zip-node\";\n\nconst inputSchema = z.record(z.string(), uploadFileSchema);\nconst outputSchema = uploadFileSchema;\n\nexport function createZipNode(\n id: string,\n { zipName, includeMetadata }: ZipParams,\n) {\n return Effect.gen(function* () {\n const uploadEngine = yield* UploadEngine;\n const zipPlugin = yield* ZipPlugin;\n return yield* createFlowNode<Record<string, UploadFile>, UploadFile>({\n id,\n name: \"Zip Files\",\n description: \"Combines multiple files into a zip archive\",\n type: NodeType.process,\n nodeTypeId: \"zip\",\n outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n inputSchema,\n outputSchema,\n multiInput: true,\n run: ({ data: inputs, storageId, clientId }) => {\n return Effect.gen(function* () {\n if (!inputs || Object.keys(inputs).length === 0) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: \"No inputs provided to zip node\",\n }),\n );\n }\n\n const zipInputs = yield* Effect.forEach(\n Object.values(inputs),\n (input) =>\n Effect.gen(function* () {\n const data = yield* uploadEngine.read(input.id, clientId);\n return {\n id: input.id,\n data,\n metadata: input.metadata,\n };\n }),\n { concurrency: \"unbounded\" },\n );\n\n const zipBytes = yield* zipPlugin.zip(zipInputs, {\n zipName,\n includeMetadata,\n });\n\n // Create a stream from the zip bytes\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(zipBytes);\n controller.close();\n },\n });\n\n // Upload the zip file\n const result = yield* uploadEngine.upload(\n {\n storageId,\n size: zipBytes.byteLength,\n type: \"application/zip\",\n fileName: zipName,\n lastModified: 0,\n metadata: JSON.stringify({\n mimeType: \"application/zip\",\n type: \"application/zip\",\n originalName: zipName,\n fileName: zipName,\n extension: \"zip\",\n }),\n },\n clientId,\n stream,\n );\n\n return completeNodeExecution(result);\n });\n },\n });\n });\n}\n"],"mappings":"yYASA,SAAgB,EACd,EACA,CAAE,QAAO,WAAU,SACnB,CACA,OAAO,EAAuC,CAC5C,KACA,KAAM,qBACN,YAAa,wBAAwB,EAAM,GAAG,EAAS,GAAG,IAC1D,KAAM,EAAS,YACf,WAAY,cACZ,YAAa,EACb,aAAc,EACd,UAAW,CAAE,QAAO,WAAU,QAAO,CACrC,KAAM,CAAE,UAGC,EAAO,QAAQ,EAAsB,EAAK,CAAC,CAErD,CAAC,CCdJ,MAAMA,EAAc,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAiB,CACpDC,EAAe,EAErB,SAAgB,EACd,EACA,CAAE,WAAU,UAAW,GACvB,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EAE5B,OAAO,MAAO,EAAuD,CACnE,KACA,KAAM,cACN,YAAa,+BAA+B,EAAS,WACrD,KAAM,EAAS,MACf,WAAY,QACZ,aAAc,EACd,YAAA,EACA,aAAA,EACA,WAAY,GACZ,KAAM,CAAE,KAAM,EAAQ,YAAW,cACxB,EAAO,IAAI,WAAa,CAC7B,GAAI,CAAC,GAAU,OAAO,KAAK,EAAO,CAAC,SAAW,EAC5C,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,mCACP,CAAC,CACH,CAGH,IAAM,EAAa,OAAO,OAAO,EAAO,CAExC,GAAI,EAAW,SAAW,EACxB,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,oBACP,CAAC,CACH,CAGH,OAAQ,EAAR,CACE,IAAK,SAAU,CAEb,IAAM,EAAgC,EAAE,CACpC,EAAY,EAEhB,IAAK,IAAM,KAAQ,EAAY,CAC7B,IAAM,EAAQ,MAAO,EAAa,KAAK,EAAK,GAAI,EAAS,CACzD,EAAgB,KAAK,EAAM,CAC3B,GAAa,EAAM,WAIrB,IAAM,EAAc,IAAI,WAAW,EAAU,CACzC,EAAS,EACb,IAAK,IAAM,KAAS,EAClB,EAAY,IAAI,EAAO,EAAO,CAC9B,GAAU,EAAM,WAIlB,IAAM,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAY,CAC/B,EAAW,OAAO,EAErB,CAAC,CAoBF,OAAO,EAjBQ,MAAO,EAAa,OACjC,CACE,YACA,KAAM,EAAY,WAClB,KAAM,2BACN,SAAU,UAAU,EAAW,OAAO,YACtC,aAAc,EACd,SAAU,KAAK,UAAU,CACvB,SAAU,2BACV,aAAc,UAAU,EAAW,OAAO,QAC1C,UAAW,MACZ,CAAC,CACH,CACD,EACA,EACD,CAEmC,CAEtC,QACE,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,2BAA2B,IAClC,CAAC,CACH,GAGL,CAEL,CAAC,EACF,CCnGJ,SAAgB,EACd,EACA,CAAE,YAAa,EAAc,YAC7B,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EAE5B,OAAO,MAAO,EAAuC,CACnD,KACA,KAAM,YACN,YAAa,2BAA2B,EAAS,WACjD,KAAM,EAAS,UACf,WAAY,YACZ,aAAc,EACd,YAAa,EACb,aAAc,EACd,YAAa,GACb,KAAM,CAAE,KAAM,EAAM,YAAW,cACtB,EAAO,IAAI,WAAa,CAC7B,GAAM,CAAE,OAAM,WAAU,WAAU,gBAChC,EAAsB,EAAK,SAAS,CAChC,EAAiB,EAAW,CAAE,GAAG,EAAM,WAAU,CAAG,EAE1D,GAAI,IAAa,OAAQ,CAEvB,IAAM,EAAa,MAAO,EAAa,KACrC,EAAe,GACf,EACD,CAEK,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAW,CAC9B,EAAW,OAAO,EAErB,CAAC,CAEI,EAAS,MAAO,EAAa,OACjC,CACE,YACA,KAAM,EAAW,WACjB,OACA,WACA,aAAc,EACd,SAAU,EACX,CACD,EACA,EACD,CAEK,EAAiB,EAAsB,EAAO,SAAS,CAE7D,OAAO,EACL,EAAe,SACX,CAAE,GAAG,EAAQ,SAAU,EAAe,SAAU,CAChD,EACL,SACQ,IAAa,QAGtB,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,0DACP,CAAC,CACH,CAGH,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,+BAA+B,IACtC,CAAC,CACH,EACD,CAEL,CAAC,EACF,CC1EJ,MAAM,EAAc,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAiB,CACpD,EAAe,EAErB,SAAgB,EACd,EACA,CAAE,UAAS,mBACX,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EACtB,EAAY,MAAO,EACzB,OAAO,MAAO,EAAuD,CACnE,KACA,KAAM,YACN,YAAa,6CACb,KAAM,EAAS,QACf,WAAY,MACZ,aAAc,EACd,cACA,eACA,WAAY,GACZ,KAAM,CAAE,KAAM,EAAQ,YAAW,cACxB,EAAO,IAAI,WAAa,CAC7B,GAAI,CAAC,GAAU,OAAO,KAAK,EAAO,CAAC,SAAW,EAC5C,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,mBAAoB,CAC3C,KAAM,iCACP,CAAC,CACH,CAGH,IAAM,EAAY,MAAO,EAAO,QAC9B,OAAO,OAAO,EAAO,CACpB,GACC,EAAO,IAAI,WAAa,CACtB,IAAM,EAAO,MAAO,EAAa,KAAK,EAAM,GAAI,EAAS,CACzD,MAAO,CACL,GAAI,EAAM,GACV,OACA,SAAU,EAAM,SACjB,EACD,CACJ,CAAE,YAAa,YAAa,CAC7B,CAEK,EAAW,MAAO,EAAU,IAAI,EAAW,CAC/C,UACA,kBACD,CAAC,CAGI,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAS,CAC5B,EAAW,OAAO,EAErB,CAAC,CAsBF,OAAO,EAnBQ,MAAO,EAAa,OACjC,CACE,YACA,KAAM,EAAS,WACf,KAAM,kBACN,SAAU,EACV,aAAc,EACd,SAAU,KAAK,UAAU,CACvB,SAAU,kBACV,KAAM,kBACN,aAAc,EACd,SAAU,EACV,UAAW,MACZ,CAAC,CACH,CACD,EACA,EACD,CAEmC,EACpC,CAEL,CAAC,EACF"}
|
package/dist/types/index.d.cts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import { a as MergeParams, c as conditionalParamsSchema, i as multiplexParamsSchema, n as zipParamsSchema, o as mergeParamsSchema, r as MultiplexParams, s as ConditionalParams, t as ZipParams } from "../zip-node-
|
|
1
|
+
import { a as MergeParams, c as conditionalParamsSchema, i as multiplexParamsSchema, n as zipParamsSchema, o as mergeParamsSchema, r as MultiplexParams, s as ConditionalParams, t as ZipParams } from "../zip-node-c3Z0LkjZ.cjs";
|
|
2
2
|
export { ConditionalParams, MergeParams, MultiplexParams, ZipParams, conditionalParamsSchema, mergeParamsSchema, multiplexParamsSchema, zipParamsSchema };
|
package/dist/types/index.d.mts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import { a as MergeParams, c as conditionalParamsSchema, i as multiplexParamsSchema, n as zipParamsSchema, o as mergeParamsSchema, r as MultiplexParams, s as ConditionalParams, t as ZipParams } from "../zip-node-
|
|
1
|
+
import { a as MergeParams, c as conditionalParamsSchema, i as multiplexParamsSchema, n as zipParamsSchema, o as mergeParamsSchema, r as MultiplexParams, s as ConditionalParams, t as ZipParams } from "../zip-node-DxxNTUf9.mjs";
|
|
2
2
|
export { ConditionalParams, MergeParams, MultiplexParams, ZipParams, conditionalParamsSchema, mergeParamsSchema, multiplexParamsSchema, zipParamsSchema };
|
|
@@ -3,19 +3,19 @@ import { z } from "zod";
|
|
|
3
3
|
//#region src/types/conditional-node.d.ts
|
|
4
4
|
declare const conditionalParamsSchema: z.ZodObject<{
|
|
5
5
|
field: z.ZodEnum<{
|
|
6
|
-
mimeType: "mimeType";
|
|
7
6
|
size: "size";
|
|
7
|
+
mimeType: "mimeType";
|
|
8
8
|
width: "width";
|
|
9
9
|
height: "height";
|
|
10
10
|
extension: "extension";
|
|
11
11
|
}>;
|
|
12
12
|
operator: z.ZodEnum<{
|
|
13
|
+
startsWith: "startsWith";
|
|
13
14
|
equals: "equals";
|
|
14
15
|
notEquals: "notEquals";
|
|
15
16
|
greaterThan: "greaterThan";
|
|
16
17
|
lessThan: "lessThan";
|
|
17
18
|
contains: "contains";
|
|
18
|
-
startsWith: "startsWith";
|
|
19
19
|
}>;
|
|
20
20
|
value: z.ZodUnion<readonly [z.ZodString, z.ZodNumber]>;
|
|
21
21
|
}, z.core.$strip>;
|
|
@@ -36,8 +36,8 @@ type MergeParams = z.infer<typeof mergeParamsSchema>;
|
|
|
36
36
|
declare const multiplexParamsSchema: z.ZodObject<{
|
|
37
37
|
outputCount: z.ZodNumber;
|
|
38
38
|
strategy: z.ZodDefault<z.ZodEnum<{
|
|
39
|
-
copy: "copy";
|
|
40
39
|
split: "split";
|
|
40
|
+
copy: "copy";
|
|
41
41
|
}>>;
|
|
42
42
|
}, z.core.$strip>;
|
|
43
43
|
type MultiplexParams = z.infer<typeof multiplexParamsSchema>;
|
|
@@ -51,4 +51,4 @@ declare const zipParamsSchema: z.ZodObject<{
|
|
|
51
51
|
type ZipParams = z.infer<typeof zipParamsSchema>;
|
|
52
52
|
//#endregion
|
|
53
53
|
export { MergeParams as a, conditionalParamsSchema as c, multiplexParamsSchema as i, zipParamsSchema as n, mergeParamsSchema as o, MultiplexParams as r, ConditionalParams as s, ZipParams as t };
|
|
54
|
-
//# sourceMappingURL=zip-node-
|
|
54
|
+
//# sourceMappingURL=zip-node-DxxNTUf9.d.mts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"zip-node-
|
|
1
|
+
{"version":3,"file":"zip-node-DxxNTUf9.d.mts","names":[],"sources":["../src/types/conditional-node.ts","../src/types/merge-node.ts","../src/types/multiplex-node.ts","../src/types/zip-node.ts"],"sourcesContent":[],"mappings":";;;cAEa,yBAAuB,CAAA,CAAA;;IAAvB,IAAA,EAAA,MAAA;;;;;;;IAAuB,UAAA,EAAA,YAAA;IAAA,MAAA,EAAA,QAAA;IAaxB,SAAA,EAAA,WAAiB;;;;ECbhB,CAAA,CAAA;;;KDaD,iBAAA,GAAoB,CAAA,CAAE,aAAa;;;cCblC,mBAAiB,CAAA,CAAA;;IDAjB,MAAA,EAAA,QAAA;;;;;;KCMD,WAAA,GAAc,CAAA,CAAE,aAAa;;;cCN5B,uBAAqB,CAAA,CAAA;;EFArB,QAAA,cAAA,UAWX,CAAA;;;;;KENU,eAAA,GAAkB,CAAA,CAAE,aAAa;;;cCLhC,iBAAe,CAAA,CAAA;;EHAf,eAAA,cAWX,aAAA,CAAA;;;KGLU,SAAA,GAAY,CAAA,CAAE,aAAa"}
|
|
@@ -3,19 +3,19 @@ import { z } from "zod";
|
|
|
3
3
|
//#region src/types/conditional-node.d.ts
|
|
4
4
|
declare const conditionalParamsSchema: z.ZodObject<{
|
|
5
5
|
field: z.ZodEnum<{
|
|
6
|
-
mimeType: "mimeType";
|
|
7
6
|
size: "size";
|
|
7
|
+
mimeType: "mimeType";
|
|
8
8
|
width: "width";
|
|
9
9
|
height: "height";
|
|
10
10
|
extension: "extension";
|
|
11
11
|
}>;
|
|
12
12
|
operator: z.ZodEnum<{
|
|
13
|
+
startsWith: "startsWith";
|
|
13
14
|
equals: "equals";
|
|
14
15
|
notEquals: "notEquals";
|
|
15
16
|
greaterThan: "greaterThan";
|
|
16
17
|
lessThan: "lessThan";
|
|
17
18
|
contains: "contains";
|
|
18
|
-
startsWith: "startsWith";
|
|
19
19
|
}>;
|
|
20
20
|
value: z.ZodUnion<readonly [z.ZodString, z.ZodNumber]>;
|
|
21
21
|
}, z.core.$strip>;
|
|
@@ -36,8 +36,8 @@ type MergeParams = z.infer<typeof mergeParamsSchema>;
|
|
|
36
36
|
declare const multiplexParamsSchema: z.ZodObject<{
|
|
37
37
|
outputCount: z.ZodNumber;
|
|
38
38
|
strategy: z.ZodDefault<z.ZodEnum<{
|
|
39
|
-
copy: "copy";
|
|
40
39
|
split: "split";
|
|
40
|
+
copy: "copy";
|
|
41
41
|
}>>;
|
|
42
42
|
}, z.core.$strip>;
|
|
43
43
|
type MultiplexParams = z.infer<typeof multiplexParamsSchema>;
|
|
@@ -51,4 +51,4 @@ declare const zipParamsSchema: z.ZodObject<{
|
|
|
51
51
|
type ZipParams = z.infer<typeof zipParamsSchema>;
|
|
52
52
|
//#endregion
|
|
53
53
|
export { MergeParams as a, conditionalParamsSchema as c, multiplexParamsSchema as i, zipParamsSchema as n, mergeParamsSchema as o, MultiplexParams as r, ConditionalParams as s, ZipParams as t };
|
|
54
|
-
//# sourceMappingURL=zip-node-
|
|
54
|
+
//# sourceMappingURL=zip-node-c3Z0LkjZ.d.cts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"zip-node-
|
|
1
|
+
{"version":3,"file":"zip-node-c3Z0LkjZ.d.cts","names":[],"sources":["../src/types/conditional-node.ts","../src/types/merge-node.ts","../src/types/multiplex-node.ts","../src/types/zip-node.ts"],"sourcesContent":[],"mappings":";;;cAEa,yBAAuB,CAAA,CAAA;;IAAvB,IAAA,EAAA,MAAA;;;;;;;IAAuB,UAAA,EAAA,YAAA;IAAA,MAAA,EAAA,QAAA;IAaxB,SAAA,EAAA,WAAiB;;;;ECbhB,CAAA,CAAA;;;KDaD,iBAAA,GAAoB,CAAA,CAAE,aAAa;;;cCblC,mBAAiB,CAAA,CAAA;;IDAjB,MAAA,EAAA,QAAA;;;;;;KCMD,WAAA,GAAc,CAAA,CAAE,aAAa;;;cCN5B,uBAAqB,CAAA,CAAA;;EFArB,QAAA,cAAA,UAWX,CAAA;;;;;KENU,eAAA,GAAkB,CAAA,CAAE,aAAa;;;cCLhC,iBAAe,CAAA,CAAA;;EHAf,eAAA,cAWX,aAAA,CAAA;;;KGLU,SAAA,GAAY,CAAA,CAAE,aAAa"}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@uploadista/flow-utility-nodes",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "0.0
|
|
4
|
+
"version": "0.1.0",
|
|
5
5
|
"description": "Utility nodes for Uploadista Flow",
|
|
6
6
|
"license": "MIT",
|
|
7
7
|
"author": "Uploadista",
|
|
@@ -20,7 +20,7 @@
|
|
|
20
20
|
}
|
|
21
21
|
},
|
|
22
22
|
"dependencies": {
|
|
23
|
-
"@uploadista/core": "0.0
|
|
23
|
+
"@uploadista/core": "0.1.0"
|
|
24
24
|
},
|
|
25
25
|
"peerDependencies": {
|
|
26
26
|
"effect": "^3.0.0",
|
|
@@ -28,19 +28,19 @@
|
|
|
28
28
|
},
|
|
29
29
|
"devDependencies": {
|
|
30
30
|
"@effect/vitest": "0.27.0",
|
|
31
|
-
"@types/node": "24.10.
|
|
32
|
-
"effect": "3.19.
|
|
33
|
-
"tsdown": "0.
|
|
34
|
-
"vitest": "4.0.
|
|
35
|
-
"zod": "4.
|
|
36
|
-
"@uploadista/typescript-config": "0.0
|
|
31
|
+
"@types/node": "24.10.8",
|
|
32
|
+
"effect": "3.19.14",
|
|
33
|
+
"tsdown": "0.19.0",
|
|
34
|
+
"vitest": "4.0.17",
|
|
35
|
+
"zod": "4.3.5",
|
|
36
|
+
"@uploadista/typescript-config": "0.1.0"
|
|
37
37
|
},
|
|
38
38
|
"scripts": {
|
|
39
39
|
"build": "tsc --noEmit && tsdown",
|
|
40
40
|
"check": "biome check --write ./src",
|
|
41
41
|
"format": "biome format --write ./src",
|
|
42
42
|
"lint": "biome lint --write ./src",
|
|
43
|
-
"test": "vitest",
|
|
43
|
+
"test": "vitest run",
|
|
44
44
|
"test:run": "vitest run",
|
|
45
45
|
"test:watch": "vitest --watch"
|
|
46
46
|
}
|