@fsai-flow/core 0.0.5 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +17 -0
- package/dist/index.js +61 -0
- package/dist/lib/ActiveWebhooks.d.ts +59 -0
- package/dist/lib/ActiveWebhooks.js +177 -0
- package/dist/lib/ActiveWorkflows.d.ts +87 -0
- package/dist/lib/ActiveWorkflows.js +465 -0
- package/dist/lib/BinaryDataManager/FileSystem.d.ts +26 -0
- package/dist/lib/BinaryDataManager/FileSystem.js +180 -0
- package/dist/lib/BinaryDataManager/index.d.ts +21 -0
- package/dist/lib/BinaryDataManager/index.js +129 -0
- package/dist/lib/ChangeCase.d.ts +9 -0
- package/dist/lib/ChangeCase.js +43 -0
- package/dist/lib/Constants.d.ts +14 -0
- package/dist/lib/Constants.js +18 -0
- package/dist/lib/Credentials.d.ts +27 -0
- package/dist/lib/Credentials.js +88 -0
- package/dist/lib/FileSystem.d.ts +26 -0
- package/dist/lib/FileSystem.js +180 -0
- package/dist/lib/InputConnectionDataLegacy.d.ts +2 -0
- package/dist/lib/InputConnectionDataLegacy.js +72 -0
- package/dist/lib/Interfaces.d.ts +147 -0
- package/dist/lib/Interfaces.js +2 -0
- package/dist/lib/LoadNodeParameterOptions.d.ts +39 -0
- package/dist/lib/LoadNodeParameterOptions.js +152 -0
- package/dist/lib/NodeExecuteFunctions.d.ts +225 -0
- package/dist/lib/NodeExecuteFunctions.js +2467 -0
- package/dist/lib/NodesLoader/constants.d.ts +5 -0
- package/dist/lib/NodesLoader/constants.js +105 -0
- package/dist/lib/NodesLoader/custom-directory-loader.d.ts +9 -0
- package/dist/lib/NodesLoader/custom-directory-loader.js +35 -0
- package/dist/lib/NodesLoader/directory-loader.d.ts +66 -0
- package/dist/lib/NodesLoader/directory-loader.js +367 -0
- package/dist/lib/NodesLoader/index.d.ts +5 -0
- package/dist/lib/NodesLoader/index.js +11 -0
- package/dist/lib/NodesLoader/lazy-package-directory-loader.d.ts +7 -0
- package/dist/lib/NodesLoader/lazy-package-directory-loader.js +44 -0
- package/dist/lib/NodesLoader/load-class-in-isolation.d.ts +1 -0
- package/dist/lib/NodesLoader/load-class-in-isolation.js +17 -0
- package/dist/lib/NodesLoader/package-directory-loader.d.ts +17 -0
- package/dist/lib/NodesLoader/package-directory-loader.js +92 -0
- package/dist/lib/NodesLoader/types.d.ts +14 -0
- package/dist/lib/NodesLoader/types.js +2 -0
- package/dist/lib/RedisLeaderElectionManager.d.ts +53 -0
- package/dist/lib/RedisLeaderElectionManager.js +279 -0
- package/dist/lib/RequestTypes.d.ts +58 -0
- package/dist/lib/RequestTypes.js +8 -0
- package/dist/lib/UserSettings.d.ts +80 -0
- package/dist/lib/UserSettings.js +269 -0
- package/dist/lib/WorkflowExecute.d.ts +53 -0
- package/dist/lib/WorkflowExecute.js +906 -0
- package/dist/lib/index.d.ts +21 -0
- package/dist/lib/index.js +129 -0
- package/dist/utils/crypto.d.ts +1 -0
- package/dist/utils/crypto.js +7 -0
- package/package.json +52 -52
- package/dist/README.md +0 -31
- package/dist/package.json +0 -54
- package/eslint.config.js +0 -19
- package/jest.config.ts +0 -10
- package/project.json +0 -19
- package/src/index.ts +0 -28
- package/src/lib/ActiveWebhooks.ts +0 -245
- package/src/lib/ActiveWorkflows.ts +0 -575
- package/src/lib/BinaryDataManager/FileSystem.ts +0 -214
- package/src/lib/BinaryDataManager/index.ts +0 -187
- package/src/lib/ChangeCase.ts +0 -45
- package/src/lib/Constants.ts +0 -16
- package/src/lib/Credentials.ts +0 -108
- package/src/lib/FileSystem.ts +0 -214
- package/src/lib/InputConnectionDataLegacy.ts +0 -123
- package/src/lib/Interfaces.ts +0 -338
- package/src/lib/LoadNodeParameterOptions.ts +0 -235
- package/src/lib/NodeExecuteFunctions.ts +0 -3700
- package/src/lib/NodesLoader/constants.ts +0 -112
- package/src/lib/NodesLoader/custom-directory-loader.ts +0 -31
- package/src/lib/NodesLoader/directory-loader.ts +0 -458
- package/src/lib/NodesLoader/index.ts +0 -5
- package/src/lib/NodesLoader/lazy-package-directory-loader.ts +0 -55
- package/src/lib/NodesLoader/load-class-in-isolation.ts +0 -19
- package/src/lib/NodesLoader/package-directory-loader.ts +0 -107
- package/src/lib/NodesLoader/types.ts +0 -14
- package/src/lib/RedisLeaderElectionManager.ts +0 -334
- package/src/lib/UserSettings.ts +0 -292
- package/src/lib/WorkflowExecute.ts +0 -1128
- package/src/lib/index.ts +0 -187
- package/src/utils/crypto.ts +0 -5
- package/tests/Credentials.test.ts +0 -88
- package/tests/Helpers.ts +0 -808
- package/tests/WorkflowExecute.test.ts +0 -1242
- package/tsconfig.json +0 -41
- package/tsconfig.lib.json +0 -10
- package/tsconfig.spec.json +0 -14
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import type { IBinaryData, INodeExecutionData } from "@fsai-flow/workflow";
|
|
2
|
+
import type { IBinaryDataConfig } from "../Interfaces";
|
|
3
|
+
export declare class BinaryDataManager {
|
|
4
|
+
private static instance;
|
|
5
|
+
private managers;
|
|
6
|
+
private binaryDataMode;
|
|
7
|
+
private availableModes;
|
|
8
|
+
constructor(config: IBinaryDataConfig);
|
|
9
|
+
static init(config: IBinaryDataConfig, mainManager?: boolean): Promise<void>;
|
|
10
|
+
static getInstance(): BinaryDataManager;
|
|
11
|
+
storeBinaryData(binaryData: IBinaryData, binaryBuffer: Buffer, executionId: string): Promise<IBinaryData>;
|
|
12
|
+
retrieveBinaryData(binaryData: IBinaryData): Promise<Buffer>;
|
|
13
|
+
retrieveBinaryDataByIdentifier(identifier: string): Promise<Buffer>;
|
|
14
|
+
markDataForDeletionByExecutionId(executionId: string): Promise<void>;
|
|
15
|
+
persistBinaryDataForExecutionId(executionId: string): Promise<void>;
|
|
16
|
+
deleteBinaryDataByExecutionId(executionId: string): Promise<void>;
|
|
17
|
+
duplicateBinaryData(inputData: Array<INodeExecutionData[] | null> | unknown, executionId: string): Promise<INodeExecutionData[][]>;
|
|
18
|
+
private generateBinaryId;
|
|
19
|
+
private splitBinaryModeFileId;
|
|
20
|
+
private duplicateBinaryDataInExecData;
|
|
21
|
+
}
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.BinaryDataManager = void 0;
|
|
4
|
+
const Constants_1 = require("../Constants");
|
|
5
|
+
const FileSystem_1 = require("./FileSystem");
|
|
6
|
+
class BinaryDataManager {
|
|
7
|
+
constructor(config) {
|
|
8
|
+
this.binaryDataMode = config.mode;
|
|
9
|
+
this.availableModes = config.availableModes.split(",");
|
|
10
|
+
this.managers = {};
|
|
11
|
+
}
|
|
12
|
+
static async init(config, mainManager = false) {
|
|
13
|
+
if (BinaryDataManager.instance) {
|
|
14
|
+
throw new Error("Binary Data Manager already initialized");
|
|
15
|
+
}
|
|
16
|
+
BinaryDataManager.instance = new BinaryDataManager(config);
|
|
17
|
+
if (BinaryDataManager.instance.availableModes.includes("filesystem")) {
|
|
18
|
+
BinaryDataManager.instance.managers["filesystem"] =
|
|
19
|
+
new FileSystem_1.BinaryDataFileSystem(config);
|
|
20
|
+
await BinaryDataManager.instance.managers["filesystem"].init(mainManager);
|
|
21
|
+
}
|
|
22
|
+
return undefined;
|
|
23
|
+
}
|
|
24
|
+
static getInstance() {
|
|
25
|
+
if (!BinaryDataManager.instance) {
|
|
26
|
+
throw new Error("Binary Data Manager not initialized");
|
|
27
|
+
}
|
|
28
|
+
return BinaryDataManager.instance;
|
|
29
|
+
}
|
|
30
|
+
async storeBinaryData(binaryData, binaryBuffer, executionId) {
|
|
31
|
+
const retBinaryData = binaryData;
|
|
32
|
+
if (this.managers[this.binaryDataMode]) {
|
|
33
|
+
return this.managers[this.binaryDataMode]
|
|
34
|
+
.storeBinaryData(binaryBuffer, executionId)
|
|
35
|
+
.then((filename) => {
|
|
36
|
+
retBinaryData.id = this.generateBinaryId(filename);
|
|
37
|
+
return retBinaryData;
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
retBinaryData.data = binaryBuffer.toString(Constants_1.BINARY_ENCODING);
|
|
41
|
+
return binaryData;
|
|
42
|
+
}
|
|
43
|
+
async retrieveBinaryData(binaryData) {
|
|
44
|
+
if (binaryData.id) {
|
|
45
|
+
return this.retrieveBinaryDataByIdentifier(binaryData.id);
|
|
46
|
+
}
|
|
47
|
+
return Buffer.from(binaryData.data, Constants_1.BINARY_ENCODING);
|
|
48
|
+
}
|
|
49
|
+
async retrieveBinaryDataByIdentifier(identifier) {
|
|
50
|
+
const { mode, id } = this.splitBinaryModeFileId(identifier);
|
|
51
|
+
if (this.managers[mode]) {
|
|
52
|
+
return this.managers[mode].retrieveBinaryDataByIdentifier(id);
|
|
53
|
+
}
|
|
54
|
+
throw new Error("Storage mode used to store binary data not available");
|
|
55
|
+
}
|
|
56
|
+
async markDataForDeletionByExecutionId(executionId) {
|
|
57
|
+
if (this.managers[this.binaryDataMode]) {
|
|
58
|
+
return this.managers[this.binaryDataMode].markDataForDeletionByExecutionId(executionId);
|
|
59
|
+
}
|
|
60
|
+
return Promise.resolve();
|
|
61
|
+
}
|
|
62
|
+
async persistBinaryDataForExecutionId(executionId) {
|
|
63
|
+
if (this.managers[this.binaryDataMode]) {
|
|
64
|
+
return this.managers[this.binaryDataMode].persistBinaryDataForExecutionId(executionId);
|
|
65
|
+
}
|
|
66
|
+
return Promise.resolve();
|
|
67
|
+
}
|
|
68
|
+
async deleteBinaryDataByExecutionId(executionId) {
|
|
69
|
+
if (this.managers[this.binaryDataMode]) {
|
|
70
|
+
return this.managers[this.binaryDataMode].deleteBinaryDataByExecutionId(executionId);
|
|
71
|
+
}
|
|
72
|
+
return Promise.resolve();
|
|
73
|
+
}
|
|
74
|
+
async duplicateBinaryData(inputData, executionId) {
|
|
75
|
+
if (inputData && this.managers[this.binaryDataMode]) {
|
|
76
|
+
const returnInputData = inputData.map(async (executionDataArray) => {
|
|
77
|
+
if (executionDataArray) {
|
|
78
|
+
return Promise.all(executionDataArray.map((executionData) => {
|
|
79
|
+
if (executionData.binary) {
|
|
80
|
+
return this.duplicateBinaryDataInExecData(executionData, executionId);
|
|
81
|
+
}
|
|
82
|
+
return executionData;
|
|
83
|
+
}));
|
|
84
|
+
}
|
|
85
|
+
return executionDataArray;
|
|
86
|
+
});
|
|
87
|
+
return Promise.all(returnInputData);
|
|
88
|
+
}
|
|
89
|
+
return Promise.resolve(inputData);
|
|
90
|
+
}
|
|
91
|
+
generateBinaryId(filename) {
|
|
92
|
+
return `${this.binaryDataMode}:${filename}`;
|
|
93
|
+
}
|
|
94
|
+
splitBinaryModeFileId(fileId) {
|
|
95
|
+
const [mode, id] = fileId.split(":");
|
|
96
|
+
return { mode, id };
|
|
97
|
+
}
|
|
98
|
+
async duplicateBinaryDataInExecData(executionData, executionId) {
|
|
99
|
+
const binaryManager = this.managers[this.binaryDataMode];
|
|
100
|
+
if (executionData.binary) {
|
|
101
|
+
const binaryDataKeys = Object.keys(executionData.binary);
|
|
102
|
+
const bdPromises = binaryDataKeys.map(async (key) => {
|
|
103
|
+
if (!executionData.binary) {
|
|
104
|
+
return { key, newId: undefined };
|
|
105
|
+
}
|
|
106
|
+
const binaryDataId = executionData.binary[key].id;
|
|
107
|
+
if (!binaryDataId) {
|
|
108
|
+
return { key, newId: undefined };
|
|
109
|
+
}
|
|
110
|
+
return binaryManager
|
|
111
|
+
?.duplicateBinaryDataByIdentifier(this.splitBinaryModeFileId(binaryDataId).id, executionId)
|
|
112
|
+
.then((filename) => ({
|
|
113
|
+
newId: this.generateBinaryId(filename),
|
|
114
|
+
key,
|
|
115
|
+
}));
|
|
116
|
+
});
|
|
117
|
+
return Promise.all(bdPromises).then((b) => {
|
|
118
|
+
return b.reduce((acc, curr) => {
|
|
119
|
+
if (acc.binary && curr) {
|
|
120
|
+
acc.binary[curr.key].id = curr.newId;
|
|
121
|
+
}
|
|
122
|
+
return acc;
|
|
123
|
+
}, executionData);
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
return executionData;
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
exports.BinaryDataManager = BinaryDataManager;
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
export declare class ChangeCase {
|
|
2
|
+
private static transformInput;
|
|
3
|
+
static toCamelCase(str: string): string;
|
|
4
|
+
static toPascalCase(str: string): string;
|
|
5
|
+
static toSnakeCase(str: string): string;
|
|
6
|
+
static toCapitalCase(str: string): string;
|
|
7
|
+
static toParamCase(str: string): string;
|
|
8
|
+
static noCase(str: string): string;
|
|
9
|
+
}
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ChangeCase = void 0;
|
|
4
|
+
// biome-ignore lint/complexity/noStaticOnlyClass: Exported as a utility namespace
|
|
5
|
+
class ChangeCase {
|
|
6
|
+
static transformInput(input) {
|
|
7
|
+
return input
|
|
8
|
+
.split(/[^a-zA-Z0-9]+/) // Split by non-alphanumeric characters
|
|
9
|
+
.filter((word) => word.length > 0) // Remove empty segments
|
|
10
|
+
.map((word) => word[0].toUpperCase() + word.slice(1).toLowerCase()); // Capitalize first letter and lowercase the rest
|
|
11
|
+
}
|
|
12
|
+
static toCamelCase(str) {
|
|
13
|
+
const words = ChangeCase.transformInput(str);
|
|
14
|
+
return words[0].toLowerCase() + words.slice(1).join("");
|
|
15
|
+
}
|
|
16
|
+
static toPascalCase(str) {
|
|
17
|
+
const words = str
|
|
18
|
+
.replace(/([a-z0-9])([A-Z])/g, "$1 $2") // insert space before capital letters in camelCase
|
|
19
|
+
.split(/[^a-zA-Z0-9]+|[\s]+/) // split by non-alphanumerics and added spaces
|
|
20
|
+
.filter((word) => word.length > 0)
|
|
21
|
+
.map((word) => word[0].toUpperCase() + word.slice(1).toLowerCase());
|
|
22
|
+
return words.join("");
|
|
23
|
+
}
|
|
24
|
+
static toSnakeCase(str) {
|
|
25
|
+
return str
|
|
26
|
+
.replace(/([a-z])([A-Z])/g, "$1_$2")
|
|
27
|
+
.replace(/[\W_]+/g, "_")
|
|
28
|
+
.toLowerCase();
|
|
29
|
+
}
|
|
30
|
+
static toCapitalCase(str) {
|
|
31
|
+
return str.replace(/\b\w/g, (chr) => chr.toUpperCase());
|
|
32
|
+
}
|
|
33
|
+
static toParamCase(str) {
|
|
34
|
+
return str
|
|
35
|
+
.replace(/([a-z])([A-Z])/g, "$1-$2")
|
|
36
|
+
.replace(/[\W_]+/g, "-")
|
|
37
|
+
.toLowerCase();
|
|
38
|
+
}
|
|
39
|
+
static noCase(str) {
|
|
40
|
+
return str.replace(/[^\w\s]/g, "").toLowerCase();
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
exports.ChangeCase = ChangeCase;
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
export declare const inProduction: boolean;
|
|
2
|
+
export declare const inDevelopment: boolean;
|
|
3
|
+
export declare const inTest: boolean;
|
|
4
|
+
export declare const BINARY_ENCODING = "base64";
|
|
5
|
+
export declare const CUSTOM_EXTENSION_ENV = "N8N_CUSTOM_EXTENSIONS";
|
|
6
|
+
export declare const ENCRYPTION_KEY_ENV_OVERWRITE = "FSAI_FLOW_ENCRYPTION_KEY";
|
|
7
|
+
export declare const EXTENSIONS_SUBDIRECTORY = "custom";
|
|
8
|
+
export declare const USER_FOLDER_ENV_OVERWRITE = "N8N_USER_FOLDER";
|
|
9
|
+
export declare const USER_SETTINGS_FILE_NAME = "config";
|
|
10
|
+
export declare const USER_SETTINGS_SUBFOLDER = ".n8n";
|
|
11
|
+
export declare const PLACEHOLDER_EMPTY_EXECUTION_ID = "__UNKOWN__";
|
|
12
|
+
export declare const PLACEHOLDER_EMPTY_WORKFLOW_ID = "__EMPTY__";
|
|
13
|
+
export declare const TUNNEL_SUBDOMAIN_ENV = "N8N_TUNNEL_SUBDOMAIN";
|
|
14
|
+
export declare const WAIT_TIME_UNLIMITED = "3000-01-01T00:00:00.000Z";
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.WAIT_TIME_UNLIMITED = exports.TUNNEL_SUBDOMAIN_ENV = exports.PLACEHOLDER_EMPTY_WORKFLOW_ID = exports.PLACEHOLDER_EMPTY_EXECUTION_ID = exports.USER_SETTINGS_SUBFOLDER = exports.USER_SETTINGS_FILE_NAME = exports.USER_FOLDER_ENV_OVERWRITE = exports.EXTENSIONS_SUBDIRECTORY = exports.ENCRYPTION_KEY_ENV_OVERWRITE = exports.CUSTOM_EXTENSION_ENV = exports.BINARY_ENCODING = exports.inTest = exports.inDevelopment = exports.inProduction = void 0;
|
|
4
|
+
const { NODE_ENV } = process.env;
|
|
5
|
+
exports.inProduction = NODE_ENV === "production";
|
|
6
|
+
exports.inDevelopment = !NODE_ENV || NODE_ENV === "development";
|
|
7
|
+
exports.inTest = NODE_ENV === "test";
|
|
8
|
+
exports.BINARY_ENCODING = "base64";
|
|
9
|
+
exports.CUSTOM_EXTENSION_ENV = "N8N_CUSTOM_EXTENSIONS";
|
|
10
|
+
exports.ENCRYPTION_KEY_ENV_OVERWRITE = "FSAI_FLOW_ENCRYPTION_KEY";
|
|
11
|
+
exports.EXTENSIONS_SUBDIRECTORY = "custom";
|
|
12
|
+
exports.USER_FOLDER_ENV_OVERWRITE = "N8N_USER_FOLDER";
|
|
13
|
+
exports.USER_SETTINGS_FILE_NAME = "config";
|
|
14
|
+
exports.USER_SETTINGS_SUBFOLDER = ".n8n";
|
|
15
|
+
exports.PLACEHOLDER_EMPTY_EXECUTION_ID = "__UNKOWN__";
|
|
16
|
+
exports.PLACEHOLDER_EMPTY_WORKFLOW_ID = "__EMPTY__";
|
|
17
|
+
exports.TUNNEL_SUBDOMAIN_ENV = "N8N_TUNNEL_SUBDOMAIN";
|
|
18
|
+
exports.WAIT_TIME_UNLIMITED = "3000-01-01T00:00:00.000Z";
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { type CredentialInformation, type ICredentialDataDecryptedObject, ICredentials, type ICredentialsEncrypted } from "@fsai-flow/workflow";
|
|
2
|
+
export declare class Credentials extends ICredentials {
|
|
3
|
+
/**
|
|
4
|
+
* Returns if the given nodeType has access to data
|
|
5
|
+
*/
|
|
6
|
+
hasNodeAccess(nodeType: string): boolean;
|
|
7
|
+
/**
|
|
8
|
+
* Sets new credential object
|
|
9
|
+
*/
|
|
10
|
+
setData(data: ICredentialDataDecryptedObject, encryptionKey: string): void;
|
|
11
|
+
/**
|
|
12
|
+
* Sets new credentials for given key
|
|
13
|
+
*/
|
|
14
|
+
setDataKey(key: string, data: CredentialInformation, encryptionKey: string): void;
|
|
15
|
+
/**
|
|
16
|
+
* Returns the decrypted credential object
|
|
17
|
+
*/
|
|
18
|
+
getData(encryptionKey: string, nodeType?: string): ICredentialDataDecryptedObject;
|
|
19
|
+
/**
|
|
20
|
+
* Returns the decrypted credentials for given key
|
|
21
|
+
*/
|
|
22
|
+
getDataKey(key: string, encryptionKey: string, nodeType?: string): CredentialInformation;
|
|
23
|
+
/**
|
|
24
|
+
* Returns the encrypted credentials to be saved
|
|
25
|
+
*/
|
|
26
|
+
getDataToSave(): ICredentialsEncrypted;
|
|
27
|
+
}
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.Credentials = void 0;
|
|
4
|
+
const workflow_1 = require("@fsai-flow/workflow");
|
|
5
|
+
const crypto_js_1 = require("crypto-js");
|
|
6
|
+
class Credentials extends workflow_1.ICredentials {
|
|
7
|
+
/**
|
|
8
|
+
* Returns if the given nodeType has access to data
|
|
9
|
+
*/
|
|
10
|
+
hasNodeAccess(nodeType) {
|
|
11
|
+
// eslint-disable-next-line no-restricted-syntax
|
|
12
|
+
for (const accessData of this.nodesAccess) {
|
|
13
|
+
if (accessData.nodeType === nodeType) {
|
|
14
|
+
return true;
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
return false;
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Sets new credential object
|
|
21
|
+
*/
|
|
22
|
+
setData(data, encryptionKey) {
|
|
23
|
+
this.data = crypto_js_1.AES.encrypt(JSON.stringify(data), encryptionKey).toString();
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Sets new credentials for given key
|
|
27
|
+
*/
|
|
28
|
+
setDataKey(key, data, encryptionKey) {
|
|
29
|
+
let fullData;
|
|
30
|
+
try {
|
|
31
|
+
fullData = this.getData(encryptionKey);
|
|
32
|
+
}
|
|
33
|
+
catch (_e) {
|
|
34
|
+
fullData = {};
|
|
35
|
+
}
|
|
36
|
+
fullData[key] = data;
|
|
37
|
+
this.setData(fullData, encryptionKey);
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Returns the decrypted credential object
|
|
41
|
+
*/
|
|
42
|
+
getData(encryptionKey, nodeType) {
|
|
43
|
+
if (nodeType && !this.hasNodeAccess(nodeType)) {
|
|
44
|
+
throw new Error(`The node of type "${nodeType}" does not have access to credentials "${this.name}" of type "${this.type}".`);
|
|
45
|
+
}
|
|
46
|
+
if (this.data === undefined) {
|
|
47
|
+
throw new Error("No data is set so nothing can be returned.");
|
|
48
|
+
}
|
|
49
|
+
const decryptedData = crypto_js_1.AES.decrypt(this.data, encryptionKey);
|
|
50
|
+
try {
|
|
51
|
+
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
|
|
52
|
+
return JSON.parse(decryptedData.toString(crypto_js_1.enc.Utf8));
|
|
53
|
+
}
|
|
54
|
+
catch (_e) {
|
|
55
|
+
throw new Error('Credentials could not be decrypted. The likely reason is that a different "encryptionKey" was used to encrypt the data.');
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Returns the decrypted credentials for given key
|
|
60
|
+
*/
|
|
61
|
+
getDataKey(key, encryptionKey, nodeType) {
|
|
62
|
+
const fullData = this.getData(encryptionKey, nodeType);
|
|
63
|
+
if (fullData === null) {
|
|
64
|
+
throw new Error("No data was set.");
|
|
65
|
+
}
|
|
66
|
+
// eslint-disable-next-line no-prototype-builtins
|
|
67
|
+
if (!Object.hasOwn(fullData, key)) {
|
|
68
|
+
throw new Error(`No data for key "${key}" exists.`);
|
|
69
|
+
}
|
|
70
|
+
return fullData[key];
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Returns the encrypted credentials to be saved
|
|
74
|
+
*/
|
|
75
|
+
getDataToSave() {
|
|
76
|
+
if (this.data === undefined) {
|
|
77
|
+
throw new Error("No credentials were set to save.");
|
|
78
|
+
}
|
|
79
|
+
return {
|
|
80
|
+
id: this.id,
|
|
81
|
+
name: this.name,
|
|
82
|
+
type: this.type,
|
|
83
|
+
data: this.data,
|
|
84
|
+
nodesAccess: this.nodesAccess,
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
exports.Credentials = Credentials;
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import type { IBinaryDataConfig, IBinaryDataManager } from "./Interfaces";
|
|
2
|
+
export declare class BinaryDataFileSystem implements IBinaryDataManager {
|
|
3
|
+
private storagePath;
|
|
4
|
+
private binaryDataTTL;
|
|
5
|
+
private persistedBinaryDataTTL;
|
|
6
|
+
constructor(config: IBinaryDataConfig);
|
|
7
|
+
init(startPurger?: boolean): Promise<void>;
|
|
8
|
+
storeBinaryData(binaryBuffer: Buffer, executionId: string): Promise<string>;
|
|
9
|
+
retrieveBinaryDataByIdentifier(identifier: string): Promise<Buffer>;
|
|
10
|
+
markDataForDeletionByExecutionId(executionId: string): Promise<void>;
|
|
11
|
+
deleteMarkedFiles(): Promise<void>;
|
|
12
|
+
deleteMarkedPersistedFiles(): Promise<void>;
|
|
13
|
+
private addBinaryIdToPersistMeta;
|
|
14
|
+
private deleteMarkedFilesByMeta;
|
|
15
|
+
duplicateBinaryDataByIdentifier(binaryDataId: string, prefix: string): Promise<string>;
|
|
16
|
+
deleteBinaryDataByExecutionId(executionId: string): Promise<void>;
|
|
17
|
+
deleteBinaryDataByIdentifier(identifier: string): Promise<void>;
|
|
18
|
+
persistBinaryDataForExecutionId(executionId: string): Promise<void>;
|
|
19
|
+
private generateFileName;
|
|
20
|
+
private getBinaryDataMetaPath;
|
|
21
|
+
private getBinaryDataPersistMetaPath;
|
|
22
|
+
private deleteMetaFileByPath;
|
|
23
|
+
private deleteFromLocalStorage;
|
|
24
|
+
private saveToLocalStorage;
|
|
25
|
+
private retrieveFromLocalStorage;
|
|
26
|
+
}
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.BinaryDataFileSystem = void 0;
|
|
37
|
+
const node_fs_1 = require("node:fs");
|
|
38
|
+
const path = __importStar(require("node:path"));
|
|
39
|
+
const uuid_1 = require("uuid");
|
|
40
|
+
const PREFIX_METAFILE = "binarymeta";
|
|
41
|
+
const PREFIX_PERSISTED_METAFILE = "persistedmeta";
|
|
42
|
+
class BinaryDataFileSystem {
|
|
43
|
+
constructor(config) {
|
|
44
|
+
this.storagePath = config.localStoragePath;
|
|
45
|
+
this.binaryDataTTL = config.binaryDataTTL;
|
|
46
|
+
this.persistedBinaryDataTTL = config.persistedBinaryDataTTL;
|
|
47
|
+
}
|
|
48
|
+
async init(startPurger = false) {
|
|
49
|
+
if (startPurger) {
|
|
50
|
+
setInterval(async () => {
|
|
51
|
+
await this.deleteMarkedFiles();
|
|
52
|
+
}, this.binaryDataTTL * 30000);
|
|
53
|
+
setInterval(async () => {
|
|
54
|
+
await this.deleteMarkedPersistedFiles();
|
|
55
|
+
}, this.persistedBinaryDataTTL * 30000);
|
|
56
|
+
}
|
|
57
|
+
return node_fs_1.promises
|
|
58
|
+
.readdir(this.storagePath)
|
|
59
|
+
.catch(async () => node_fs_1.promises.mkdir(this.storagePath, { recursive: true }))
|
|
60
|
+
.then(async () => node_fs_1.promises.readdir(this.getBinaryDataMetaPath()))
|
|
61
|
+
.catch(async () => node_fs_1.promises.mkdir(this.getBinaryDataMetaPath(), { recursive: true }))
|
|
62
|
+
.then(async () => node_fs_1.promises.readdir(this.getBinaryDataPersistMetaPath()))
|
|
63
|
+
.catch(async () => node_fs_1.promises.mkdir(this.getBinaryDataPersistMetaPath(), { recursive: true }))
|
|
64
|
+
.then(async () => this.deleteMarkedFiles())
|
|
65
|
+
.then(async () => this.deleteMarkedPersistedFiles())
|
|
66
|
+
.then(() => { });
|
|
67
|
+
}
|
|
68
|
+
async storeBinaryData(binaryBuffer, executionId) {
|
|
69
|
+
const binaryDataId = this.generateFileName(executionId);
|
|
70
|
+
return this.addBinaryIdToPersistMeta(executionId, binaryDataId).then(async () => this.saveToLocalStorage(binaryBuffer, binaryDataId).then(() => binaryDataId));
|
|
71
|
+
}
|
|
72
|
+
async retrieveBinaryDataByIdentifier(identifier) {
|
|
73
|
+
return this.retrieveFromLocalStorage(identifier);
|
|
74
|
+
}
|
|
75
|
+
async markDataForDeletionByExecutionId(executionId) {
|
|
76
|
+
const tt = new Date(Date.now() + this.binaryDataTTL * 60000);
|
|
77
|
+
return node_fs_1.promises.writeFile(path.join(this.getBinaryDataMetaPath(), `${PREFIX_METAFILE}_${executionId}_${tt.valueOf()}`), "");
|
|
78
|
+
}
|
|
79
|
+
async deleteMarkedFiles() {
|
|
80
|
+
return this.deleteMarkedFilesByMeta(this.getBinaryDataMetaPath(), PREFIX_METAFILE);
|
|
81
|
+
}
|
|
82
|
+
async deleteMarkedPersistedFiles() {
|
|
83
|
+
return this.deleteMarkedFilesByMeta(this.getBinaryDataPersistMetaPath(), PREFIX_PERSISTED_METAFILE);
|
|
84
|
+
}
|
|
85
|
+
async addBinaryIdToPersistMeta(executionId, identifier) {
|
|
86
|
+
const currentTime = Date.now();
|
|
87
|
+
const timeAtNextHour = currentTime + 3600000 - (currentTime % 3600000);
|
|
88
|
+
const timeoutTime = timeAtNextHour + this.persistedBinaryDataTTL * 60000;
|
|
89
|
+
const filePath = path.join(this.getBinaryDataPersistMetaPath(), `${PREFIX_PERSISTED_METAFILE}_${executionId}_${timeoutTime}`);
|
|
90
|
+
return node_fs_1.promises
|
|
91
|
+
.readFile(filePath)
|
|
92
|
+
.catch(async () => node_fs_1.promises.writeFile(filePath, identifier))
|
|
93
|
+
.then(() => { });
|
|
94
|
+
}
|
|
95
|
+
async deleteMarkedFilesByMeta(metaPath, filePrefix) {
|
|
96
|
+
const currentTimeValue = Date.now();
|
|
97
|
+
const metaFileNames = await node_fs_1.promises.readdir(metaPath);
|
|
98
|
+
const execsAdded = {};
|
|
99
|
+
const proms = metaFileNames.reduce((prev, curr) => {
|
|
100
|
+
const [prefix, executionId, ts] = curr.split("_");
|
|
101
|
+
if (prefix !== filePrefix) {
|
|
102
|
+
return prev;
|
|
103
|
+
}
|
|
104
|
+
const execTimestamp = Number.parseInt(ts, 10);
|
|
105
|
+
if (execTimestamp < currentTimeValue) {
|
|
106
|
+
if (execsAdded[executionId]) {
|
|
107
|
+
// do not delete data, only meta file
|
|
108
|
+
prev.push(this.deleteMetaFileByPath(path.join(metaPath, curr)));
|
|
109
|
+
return prev;
|
|
110
|
+
}
|
|
111
|
+
execsAdded[executionId] = 1;
|
|
112
|
+
prev.push(this.deleteBinaryDataByExecutionId(executionId).then(async () => this.deleteMetaFileByPath(path.join(metaPath, curr))));
|
|
113
|
+
}
|
|
114
|
+
return prev;
|
|
115
|
+
}, [Promise.resolve()]);
|
|
116
|
+
return Promise.all(proms).then(() => { });
|
|
117
|
+
}
|
|
118
|
+
async duplicateBinaryDataByIdentifier(binaryDataId, prefix) {
|
|
119
|
+
const newBinaryDataId = this.generateFileName(prefix);
|
|
120
|
+
return node_fs_1.promises
|
|
121
|
+
.copyFile(path.join(this.storagePath, binaryDataId), path.join(this.storagePath, newBinaryDataId))
|
|
122
|
+
.then(() => newBinaryDataId);
|
|
123
|
+
}
|
|
124
|
+
async deleteBinaryDataByExecutionId(executionId) {
|
|
125
|
+
const regex = new RegExp(`${executionId}_*`);
|
|
126
|
+
const filenames = await node_fs_1.promises.readdir(path.join(this.storagePath));
|
|
127
|
+
const proms = filenames.reduce((allProms, filename) => {
|
|
128
|
+
if (regex.test(filename)) {
|
|
129
|
+
allProms.push(node_fs_1.promises.rm(path.join(this.storagePath, filename)));
|
|
130
|
+
}
|
|
131
|
+
return allProms;
|
|
132
|
+
}, [Promise.resolve()]);
|
|
133
|
+
return Promise.all(proms).then(async () => Promise.resolve());
|
|
134
|
+
}
|
|
135
|
+
async deleteBinaryDataByIdentifier(identifier) {
|
|
136
|
+
return this.deleteFromLocalStorage(identifier);
|
|
137
|
+
}
|
|
138
|
+
async persistBinaryDataForExecutionId(executionId) {
|
|
139
|
+
return node_fs_1.promises
|
|
140
|
+
.readdir(this.getBinaryDataPersistMetaPath())
|
|
141
|
+
.then(async (metafiles) => {
|
|
142
|
+
const proms = metafiles.reduce((prev, curr) => {
|
|
143
|
+
if (curr.startsWith(`${PREFIX_PERSISTED_METAFILE}_${executionId}_`)) {
|
|
144
|
+
prev.push(node_fs_1.promises.rm(path.join(this.getBinaryDataPersistMetaPath(), curr)));
|
|
145
|
+
return prev;
|
|
146
|
+
}
|
|
147
|
+
return prev;
|
|
148
|
+
}, [Promise.resolve()]);
|
|
149
|
+
return Promise.all(proms).then(() => { });
|
|
150
|
+
});
|
|
151
|
+
}
|
|
152
|
+
generateFileName(prefix) {
|
|
153
|
+
return `${prefix}_${(0, uuid_1.v4)()}`;
|
|
154
|
+
}
|
|
155
|
+
getBinaryDataMetaPath() {
|
|
156
|
+
return path.join(this.storagePath, "meta");
|
|
157
|
+
}
|
|
158
|
+
getBinaryDataPersistMetaPath() {
|
|
159
|
+
return path.join(this.storagePath, "persistMeta");
|
|
160
|
+
}
|
|
161
|
+
async deleteMetaFileByPath(metafilePath) {
|
|
162
|
+
return node_fs_1.promises.rm(metafilePath);
|
|
163
|
+
}
|
|
164
|
+
async deleteFromLocalStorage(identifier) {
|
|
165
|
+
return node_fs_1.promises.rm(path.join(this.storagePath, identifier));
|
|
166
|
+
}
|
|
167
|
+
async saveToLocalStorage(data, identifier) {
|
|
168
|
+
await node_fs_1.promises.writeFile(path.join(this.storagePath, identifier), data);
|
|
169
|
+
}
|
|
170
|
+
async retrieveFromLocalStorage(identifier) {
|
|
171
|
+
const filePath = path.join(this.storagePath, identifier);
|
|
172
|
+
try {
|
|
173
|
+
return await node_fs_1.promises.readFile(filePath);
|
|
174
|
+
}
|
|
175
|
+
catch (_e) {
|
|
176
|
+
throw new Error(`Error finding file: ${filePath}`);
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
exports.BinaryDataFileSystem = BinaryDataFileSystem;
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
import { type INodeType, type ISupplyDataFunctions, type IWebhookFunctions, type NodeConnectionType, type Workflow } from "@fsai-flow/workflow";
|
|
2
|
+
export declare function getInputConnectionDataLegacy(this: ISupplyDataFunctions | IWebhookFunctions, workflow: Workflow, connectionType: NodeConnectionType, itemIndex: number, nodeTypeData: INodeType): Promise<unknown>;
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getInputConnectionDataLegacy = getInputConnectionDataLegacy;
|
|
4
|
+
const workflow_1 = require("@fsai-flow/workflow");
|
|
5
|
+
async function getInputConnectionDataLegacy(workflow, connectionType, itemIndex, nodeTypeData) {
|
|
6
|
+
const parentNode = this.getNode();
|
|
7
|
+
const nodeInputs = getNodeInputs(workflow, parentNode, nodeTypeData.description).map((input) => typeof input === "string" ? { type: input } : input);
|
|
8
|
+
const inputConfiguration = nodeInputs.find((input) => input.type === connectionType);
|
|
9
|
+
if (inputConfiguration === undefined) {
|
|
10
|
+
throw new workflow_1.NodeOperationError(parentNode, "Node does not have input of type", {
|
|
11
|
+
description: `Node ${parentNode.name} does not have input of type ${connectionType}`,
|
|
12
|
+
});
|
|
13
|
+
}
|
|
14
|
+
const connectedNodes = getConnectedNodes(workflow, parentNode, connectionType);
|
|
15
|
+
if (connectedNodes.length === 0) {
|
|
16
|
+
if (inputConfiguration.required) {
|
|
17
|
+
throw new workflow_1.NodeOperationError(parentNode, `A ${inputConfiguration?.displayName ?? connectionType} sub-node must be connected and enabled`);
|
|
18
|
+
}
|
|
19
|
+
return inputConfiguration.maxConnections === 1 ? undefined : [];
|
|
20
|
+
}
|
|
21
|
+
if (inputConfiguration.maxConnections !== undefined &&
|
|
22
|
+
connectedNodes.length > inputConfiguration.maxConnections) {
|
|
23
|
+
throw new workflow_1.NodeOperationError(parentNode, `Only ${inputConfiguration.maxConnections} ${connectionType} sub-nodes are/is allowed to be connected`);
|
|
24
|
+
}
|
|
25
|
+
const nodes = [];
|
|
26
|
+
for (const connectedNode of connectedNodes) {
|
|
27
|
+
const connectedNodeType = workflow.nodeTypes.getByNameAndVersion(connectedNode.type, connectedNode.typeVersion);
|
|
28
|
+
if (!connectedNodeType) {
|
|
29
|
+
continue;
|
|
30
|
+
}
|
|
31
|
+
if (connectedNodeType && !connectedNodeType.supplyData) {
|
|
32
|
+
throw new workflow_1.NodeOperationError(connectedNode, "Node does not have a `supplyData` method defined", {
|
|
33
|
+
itemIndex,
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
try {
|
|
37
|
+
const supplyData = await connectedNodeType.supplyData?.call(this, itemIndex);
|
|
38
|
+
if (supplyData) {
|
|
39
|
+
nodes.push(supplyData);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
catch (error) {
|
|
43
|
+
throw new workflow_1.NodeOperationError(connectedNode, `Error in sub-node ${connectedNode.name}`, {
|
|
44
|
+
itemIndex,
|
|
45
|
+
description: error instanceof Error ? error.message : "Unknown error",
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
return inputConfiguration.maxConnections === 1
|
|
50
|
+
? (nodes || [])[0]?.response
|
|
51
|
+
: nodes.map((node) => node.response);
|
|
52
|
+
}
|
|
53
|
+
function getNodeInputs(workflow, node, nodeTypeData) {
|
|
54
|
+
if (Array.isArray(nodeTypeData?.inputs)) {
|
|
55
|
+
return nodeTypeData.inputs;
|
|
56
|
+
}
|
|
57
|
+
// Calculate the outputs dynamically
|
|
58
|
+
try {
|
|
59
|
+
return (workflow.expression.getSimpleParameterValue(node, nodeTypeData.inputs, "internal", {}) || []);
|
|
60
|
+
}
|
|
61
|
+
catch (_e) {
|
|
62
|
+
console.warn("Could not calculate inputs dynamically for node: ", node.name);
|
|
63
|
+
return [];
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
function getConnectedNodes(workflow, node, connectionType) {
|
|
67
|
+
return workflow
|
|
68
|
+
.getParentNodes(node.name, connectionType, 1)
|
|
69
|
+
.map((nodeName) => workflow.getNode(nodeName))
|
|
70
|
+
.filter((node) => !!node)
|
|
71
|
+
.filter((node) => node !== null && node.disabled !== true);
|
|
72
|
+
}
|