@ferchy/n8n-nodes-aimc-toolkit 0.1.8 → 0.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -10,6 +10,8 @@ AIMC Toolkit is a community node package for n8n with focused nodes:
10
10
 
11
11
  n8n is powerful, but real workflows often need basic utilities (validation, parsing, HTTP) and media tasks (convert, compress, merge). I built AIMC Toolkit to remove the busywork and keep workflows short, readable, and fast.
12
12
 
13
+ **Inspiration**: The original idea was sparked by Kenkaii’s SuperCode. I’m grateful for that work and built AIMC Toolkit as my own version, tailored to my needs and expanded with improvements over time.
14
+
13
15
  ## Who This Is For
14
16
 
15
17
  - Automation builders who want fewer nodes and faster iterations.
@@ -74,6 +76,7 @@ brew install ffmpeg
74
76
  - Run once for all items or once per item.
75
77
  - Access libraries as globals (`axios`, `_`, `zod`) or via `libs`.
76
78
  - Built-in helpers (`utils.now`, `utils.safeJson`, `utils.toArray`).
79
+ - **AIMC Connect Mode**: optional AI connectors for LLMs, tools, memory, and more.
77
80
 
78
81
  **Example: normalize data**
79
82
  ```javascript
@@ -105,6 +108,20 @@ result = {
105
108
  }
106
109
  ```
107
110
 
111
+ **AIMC Connect Mode (AI)**
112
+ Enable **AIMC Connect Mode** to attach AI connectors. When enabled, your code can use:
113
+
114
+ - `ai` (object with all connectors)
115
+ - `aiModel`, `aiTools`, `aiMemory`, `aiVectorStore`, `aiChain`, `aiDocument`
116
+
117
+ Example:
118
+ ```javascript
119
+ if (aiModel) {
120
+ const response = await aiModel.invoke('Summarize this input.');
121
+ return { summary: response };
122
+ }
123
+ ```
124
+
108
125
  ### AIMC Media
109
126
 
110
127
  **Operations**
@@ -147,6 +147,7 @@ function normalizeResult(result, fallbackItems) {
147
147
  ];
148
148
  }
149
149
  function buildSandbox(params) {
150
+ var _a, _b, _c, _d, _e, _f, _g, _h, _j;
150
151
  const cache = {};
151
152
  const sandbox = {
152
153
  items: params.items,
@@ -161,6 +162,16 @@ function buildSandbox(params) {
161
162
  : params.items.map((entry) => entry.json),
162
163
  },
163
164
  params: params.nodeParams,
165
+ ai: params.aiContext || {},
166
+ aiModel: (_a = params.aiContext) === null || _a === void 0 ? void 0 : _a.languageModel,
167
+ aiTools: (_b = params.aiContext) === null || _b === void 0 ? void 0 : _b.tools,
168
+ aiMemory: (_c = params.aiContext) === null || _c === void 0 ? void 0 : _c.memory,
169
+ aiChain: (_d = params.aiContext) === null || _d === void 0 ? void 0 : _d.chain,
170
+ aiDocument: (_e = params.aiContext) === null || _e === void 0 ? void 0 : _e.document,
171
+ aiEmbedding: (_f = params.aiContext) === null || _f === void 0 ? void 0 : _f.embedding,
172
+ aiOutputParser: (_g = params.aiContext) === null || _g === void 0 ? void 0 : _g.outputParser,
173
+ aiTextSplitter: (_h = params.aiContext) === null || _h === void 0 ? void 0 : _h.textSplitter,
174
+ aiVectorStore: (_j = params.aiContext) === null || _j === void 0 ? void 0 : _j.vectorStore,
164
175
  utils: {
165
176
  now: () => new Date().toISOString(),
166
177
  sleep: (ms) => new Promise((resolve) => setTimeout(resolve, ms)),
@@ -294,7 +305,32 @@ class AimcCode {
294
305
  defaults: {
295
306
  name: 'AIMC Code',
296
307
  },
297
- inputs: ['main'],
308
+ inputs: `={{
309
+ ((values, aiMode) => {
310
+ const connectorTypes = {
311
+ '${"ai_chain"}': 'Chain',
312
+ '${"ai_document"}': 'Document',
313
+ '${"ai_embedding"}': 'Embedding',
314
+ '${"ai_languageModel"}': 'Language Model',
315
+ '${"ai_memory"}': 'Memory',
316
+ '${"ai_outputParser"}': 'Output Parser',
317
+ '${"ai_textSplitter"}': 'Text Splitter',
318
+ '${"ai_tool"}': 'Tool',
319
+ '${"ai_vectorStore"}': 'Vector Store',
320
+ '${"main"}': 'Main'
321
+ };
322
+ const baseInputs = [{ displayName: '', type: '${"main"}' }];
323
+ if (aiMode && values) {
324
+ return baseInputs.concat(values.map(value => ({
325
+ type: value.type,
326
+ required: value.required,
327
+ maxConnections: value.maxConnections === -1 ? undefined : value.maxConnections,
328
+ displayName: connectorTypes[value.type] !== 'Main' ? connectorTypes[value.type] : undefined
329
+ })));
330
+ }
331
+ return baseInputs;
332
+ })($parameter.aiConnections?.input, $parameter.aiConnectMode)
333
+ }}`,
298
334
  outputs: ['main'],
299
335
  properties: [
300
336
  {
@@ -333,6 +369,67 @@ class AimcCode {
333
369
  maxValue: 300,
334
370
  },
335
371
  },
372
+ {
373
+ displayName: 'AIMC Connect Mode',
374
+ name: 'aiConnectMode',
375
+ type: 'boolean',
376
+ default: false,
377
+ description: 'Enable AI connector inputs for models, tools, memory, and more.',
378
+ },
379
+ {
380
+ displayName: 'AI Connections',
381
+ name: 'aiConnections',
382
+ placeholder: 'Add AI Connection',
383
+ type: 'fixedCollection',
384
+ displayOptions: {
385
+ show: {
386
+ aiConnectMode: [true],
387
+ },
388
+ },
389
+ typeOptions: {
390
+ multipleValues: true,
391
+ },
392
+ default: {},
393
+ options: [
394
+ {
395
+ name: 'input',
396
+ displayName: 'Input',
397
+ values: [
398
+ {
399
+ displayName: 'Type',
400
+ name: 'type',
401
+ type: 'options',
402
+ options: [
403
+ { name: 'Chain', value: 'ai_chain' },
404
+ { name: 'Document', value: 'ai_document' },
405
+ { name: 'Embedding', value: 'ai_embedding' },
406
+ { name: 'Language Model', value: 'ai_languageModel' },
407
+ { name: 'Memory', value: 'ai_memory' },
408
+ { name: 'Output Parser', value: 'ai_outputParser' },
409
+ { name: 'Text Splitter', value: 'ai_textSplitter' },
410
+ { name: 'Tool', value: 'ai_tool' },
411
+ { name: 'Vector Store', value: 'ai_vectorStore' },
412
+ ],
413
+ default: 'ai_languageModel',
414
+ },
415
+ {
416
+ displayName: 'Max Connections',
417
+ name: 'maxConnections',
418
+ type: 'number',
419
+ default: 1,
420
+ description: 'Set -1 for unlimited connections',
421
+ },
422
+ {
423
+ displayName: 'Required',
424
+ name: 'required',
425
+ type: 'boolean',
426
+ default: false,
427
+ description: 'Whether this connection is required',
428
+ },
429
+ ],
430
+ },
431
+ ],
432
+ },
336
433
  {
337
434
  displayName: 'JavaScript Code',
338
435
  name: 'code',
@@ -410,8 +507,38 @@ class AimcCode {
410
507
  const language = this.getNodeParameter('language', 0, 'javascript');
411
508
  const mode = this.getNodeParameter('mode', 0, 'runOnceForAllItems');
412
509
  const timeoutSeconds = this.getNodeParameter('timeoutSeconds', 0, 30);
510
+ const aiConnectMode = this.getNodeParameter('aiConnectMode', 0, false);
413
511
  const timeoutMs = Math.max(1, timeoutSeconds) * 1000;
414
512
  const nodeParams = this.getNode().parameters;
513
+ const aiContext = {};
514
+ const fetchAi = async (type) => {
515
+ if (!aiConnectMode) {
516
+ return undefined;
517
+ }
518
+ const getter = this
519
+ .getInputConnectionData;
520
+ if (!getter) {
521
+ return undefined;
522
+ }
523
+ try {
524
+ const value = await getter(type, 0);
525
+ return Array.isArray(value) ? value[0] : value;
526
+ }
527
+ catch {
528
+ return undefined;
529
+ }
530
+ };
531
+ if (aiConnectMode) {
532
+ aiContext.chain = await fetchAi('ai_chain');
533
+ aiContext.document = await fetchAi('ai_document');
534
+ aiContext.embedding = await fetchAi('ai_embedding');
535
+ aiContext.languageModel = await fetchAi('ai_languageModel');
536
+ aiContext.memory = await fetchAi('ai_memory');
537
+ aiContext.outputParser = await fetchAi('ai_outputParser');
538
+ aiContext.textSplitter = await fetchAi('ai_textSplitter');
539
+ aiContext.tools = await fetchAi('ai_tool');
540
+ aiContext.vectorStore = await fetchAi('ai_vectorStore');
541
+ }
415
542
  if (language === 'python') {
416
543
  const pythonCode = this.getNodeParameter('pythonCode', 0, '');
417
544
  if (!pythonCode.trim()) {
@@ -477,6 +604,7 @@ class AimcCode {
477
604
  item,
478
605
  mode,
479
606
  nodeParams,
607
+ aiContext,
480
608
  });
481
609
  const result = await runCode(sandbox);
482
610
  const normalized = normalizeResult(result, [item]);
@@ -488,6 +616,7 @@ class AimcCode {
488
616
  items,
489
617
  mode,
490
618
  nodeParams,
619
+ aiContext,
491
620
  });
492
621
  const result = await runCode(sandbox);
493
622
  const normalized = normalizeResult(result, items);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ferchy/n8n-nodes-aimc-toolkit",
3
- "version": "0.1.8",
3
+ "version": "0.1.9",
4
4
  "description": "AIMC Toolkit nodes for n8n: code execution and media operations.",
5
5
  "license": "MIT",
6
6
  "author": "Ferchy",