@cascadeflow/n8n-nodes-cascadeflow 0.4.8 → 0.4.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/nodes/LmChatCascadeFlow/LmChatCascadeFlow.node.d.ts +6 -0
- package/dist/nodes/LmChatCascadeFlow/LmChatCascadeFlow.node.d.ts.map +1 -0
- package/dist/nodes/LmChatCascadeFlow/LmChatCascadeFlow.node.js +124 -0
- package/dist/nodes/LmChatCascadeFlow/LmChatCascadeFlow.node.js.map +1 -0
- package/dist/nodes/LmChatCascadeFlow/cascadeflow.svg +15 -0
- package/package.json +5 -3
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import type { INodeType, INodeTypeDescription, ISupplyDataFunctions, SupplyData } from 'n8n-workflow';
|
|
2
|
+
export declare class LmChatCascadeFlow implements INodeType {
|
|
3
|
+
description: INodeTypeDescription;
|
|
4
|
+
supplyData(this: ISupplyDataFunctions): Promise<SupplyData>;
|
|
5
|
+
}
|
|
6
|
+
//# sourceMappingURL=LmChatCascadeFlow.node.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"LmChatCascadeFlow.node.d.ts","sourceRoot":"","sources":["../../../nodes/LmChatCascadeFlow/LmChatCascadeFlow.node.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EACV,SAAS,EACT,oBAAoB,EACpB,oBAAoB,EACpB,UAAU,EACX,MAAM,cAAc,CAAC;AA+DtB,qBAAa,iBAAkB,YAAW,SAAS;IACjD,WAAW,EAAE,oBAAoB,CAyD/B;IAEI,UAAU,CAAC,IAAI,EAAE,oBAAoB,GAAG,OAAO,CAAC,UAAU,CAAC;CAwClE"}
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.LmChatCascadeFlow = void 0;
|
|
4
|
+
const n8n_workflow_1 = require("n8n-workflow");
|
|
5
|
+
const chat_models_1 = require("@langchain/core/language_models/chat_models");
|
|
6
|
+
/**
|
|
7
|
+
* Custom CascadeChatModel that wraps two models (drafter and verifier)
|
|
8
|
+
* and implements cascading logic
|
|
9
|
+
*/
|
|
10
|
+
class CascadeChatModel extends chat_models_1.BaseChatModel {
|
|
11
|
+
constructor(drafterModel, verifierModel, qualityThreshold = 0.7) {
|
|
12
|
+
super({});
|
|
13
|
+
this.drafterModel = drafterModel;
|
|
14
|
+
this.verifierModel = verifierModel;
|
|
15
|
+
this.qualityThreshold = qualityThreshold;
|
|
16
|
+
}
|
|
17
|
+
_llmType() {
|
|
18
|
+
return 'cascade';
|
|
19
|
+
}
|
|
20
|
+
async _generate(messages, options, runManager) {
|
|
21
|
+
try {
|
|
22
|
+
// Step 1: Try the drafter model
|
|
23
|
+
const drafterResult = await this.drafterModel._generate(messages, options, runManager);
|
|
24
|
+
const drafterMessage = drafterResult.generations[0].message;
|
|
25
|
+
// Step 2: Simple quality check (can be enhanced)
|
|
26
|
+
// For now, we'll just check if the response is substantive
|
|
27
|
+
const responseText = drafterMessage.content.toString();
|
|
28
|
+
const qualityScore = Math.min(responseText.length / 100, 1.0); // Simple heuristic
|
|
29
|
+
// Step 3: If quality is sufficient, return drafter response
|
|
30
|
+
if (qualityScore >= this.qualityThreshold) {
|
|
31
|
+
return drafterResult;
|
|
32
|
+
}
|
|
33
|
+
// Step 4: Otherwise, escalate to verifier
|
|
34
|
+
const verifierResult = await this.verifierModel._generate(messages, options, runManager);
|
|
35
|
+
return verifierResult;
|
|
36
|
+
}
|
|
37
|
+
catch (error) {
|
|
38
|
+
// Fallback to verifier on error
|
|
39
|
+
return await this.verifierModel._generate(messages, options, runManager);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
class LmChatCascadeFlow {
|
|
44
|
+
constructor() {
|
|
45
|
+
this.description = {
|
|
46
|
+
displayName: 'CascadeFlow Chat Model',
|
|
47
|
+
name: 'lmChatCascadeFlow',
|
|
48
|
+
icon: 'file:cascadeflow.svg',
|
|
49
|
+
group: ['transform'],
|
|
50
|
+
version: 1,
|
|
51
|
+
description: 'Smart AI model cascading with 40-85% cost savings. Connects two chat models (drafter and verifier) and intelligently cascades between them.',
|
|
52
|
+
defaults: {
|
|
53
|
+
name: 'CascadeFlow Chat Model',
|
|
54
|
+
},
|
|
55
|
+
codex: {
|
|
56
|
+
categories: ['AI'],
|
|
57
|
+
subcategories: {
|
|
58
|
+
AI: ['Language Models', 'Chat Models'],
|
|
59
|
+
},
|
|
60
|
+
resources: {
|
|
61
|
+
primaryDocumentation: [
|
|
62
|
+
{
|
|
63
|
+
url: 'https://github.com/lemony-ai/cascadeflow',
|
|
64
|
+
},
|
|
65
|
+
],
|
|
66
|
+
},
|
|
67
|
+
},
|
|
68
|
+
// Sub-node: no regular inputs, takes AI model connections
|
|
69
|
+
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
|
70
|
+
inputs: [
|
|
71
|
+
{
|
|
72
|
+
displayName: 'Drafter Model',
|
|
73
|
+
type: 'ai_languageModel',
|
|
74
|
+
maxConnections: 1,
|
|
75
|
+
required: true,
|
|
76
|
+
},
|
|
77
|
+
{
|
|
78
|
+
displayName: 'Verifier Model',
|
|
79
|
+
type: 'ai_languageModel',
|
|
80
|
+
maxConnections: 1,
|
|
81
|
+
required: true,
|
|
82
|
+
},
|
|
83
|
+
],
|
|
84
|
+
// Outputs an AI model that can be connected to Agent
|
|
85
|
+
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
|
86
|
+
outputs: ['ai_languageModel'],
|
|
87
|
+
outputNames: ['Model'],
|
|
88
|
+
properties: [
|
|
89
|
+
{
|
|
90
|
+
displayName: 'Quality Threshold',
|
|
91
|
+
name: 'qualityThreshold',
|
|
92
|
+
type: 'number',
|
|
93
|
+
default: 0.7,
|
|
94
|
+
typeOptions: {
|
|
95
|
+
minValue: 0,
|
|
96
|
+
maxValue: 1,
|
|
97
|
+
numberPrecision: 2,
|
|
98
|
+
},
|
|
99
|
+
description: 'Minimum quality score (0-1) to accept drafter response. Lower = more cost savings, higher = better quality.',
|
|
100
|
+
},
|
|
101
|
+
],
|
|
102
|
+
};
|
|
103
|
+
}
|
|
104
|
+
async supplyData() {
|
|
105
|
+
// Get the quality threshold parameter
|
|
106
|
+
const qualityThreshold = this.getNodeParameter('qualityThreshold', 0, 0.7);
|
|
107
|
+
// Get the connected chat models from inputs
|
|
108
|
+
const drafterModel = (await this.getInputConnectionData('ai_languageModel', 0));
|
|
109
|
+
const verifierModel = (await this.getInputConnectionData('ai_languageModel', 1));
|
|
110
|
+
if (!drafterModel) {
|
|
111
|
+
throw new n8n_workflow_1.NodeOperationError(this.getNode(), 'Drafter model is required. Please connect an AI chat model to the "Drafter Model" input.');
|
|
112
|
+
}
|
|
113
|
+
if (!verifierModel) {
|
|
114
|
+
throw new n8n_workflow_1.NodeOperationError(this.getNode(), 'Verifier model is required. Please connect an AI chat model to the "Verifier Model" input.');
|
|
115
|
+
}
|
|
116
|
+
// Create and return the cascade model
|
|
117
|
+
const cascadeModel = new CascadeChatModel(drafterModel, verifierModel, qualityThreshold);
|
|
118
|
+
return {
|
|
119
|
+
response: cascadeModel,
|
|
120
|
+
};
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
exports.LmChatCascadeFlow = LmChatCascadeFlow;
|
|
124
|
+
//# sourceMappingURL=LmChatCascadeFlow.node.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"LmChatCascadeFlow.node.js","sourceRoot":"","sources":["../../../nodes/LmChatCascadeFlow/LmChatCascadeFlow.node.ts"],"names":[],"mappings":";;;AAOA,+CAAsE;AAEtE,6EAA4E;AAK5E;;;GAGG;AACH,MAAM,gBAAiB,SAAQ,2BAAa;IAK1C,YACE,YAA2B,EAC3B,aAA4B,EAC5B,mBAA2B,GAAG;QAE9B,KAAK,CAAC,EAAE,CAAC,CAAC;QACV,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;QACjC,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;IAC3C,CAAC;IAED,QAAQ;QACN,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,KAAK,CAAC,SAAS,CACb,QAAuB,EACvB,OAAkC,EAClC,UAAqC;QAErC,IAAI,CAAC;YACH,gCAAgC;YAChC,MAAM,aAAa,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC,SAAS,CAAC,QAAQ,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;YACvF,MAAM,cAAc,GAAG,aAAa,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;YAE5D,iDAAiD;YACjD,2DAA2D;YAC3D,MAAM,YAAY,GAAG,cAAc,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC;YACvD,MAAM,YAAY,GAAG,IAAI,CAAC,GAAG,CAAC,YAAY,CAAC,MAAM,GAAG,GAAG,EAAE,GAAG,CAAC,CAAC,CAAC,mBAAmB;YAElF,4DAA4D;YAC5D,IAAI,YAAY,IAAI,IAAI,CAAC,gBAAgB,EAAE,CAAC;gBAC1C,OAAO,aAAa,CAAC;YACvB,CAAC;YAED,0CAA0C;YAC1C,MAAM,cAAc,GAAG,MAAM,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,QAAQ,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;YACzF,OAAO,cAAc,CAAC;QACxB,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,gCAAgC;YAChC,OAAO,MAAM,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,QAAQ,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;QAC3E,CAAC;IACH,CAAC;CACF;AAED,MAAa,iBAAiB;IAA9B;QACE,gBAAW,GAAyB;YAClC,WAAW,EAAE,wBAAwB;YACrC,IAAI,EAAE,mBAAmB;YACzB,IAAI,EAAE,sBAAsB;YAC5B,KAAK,EAAE,CAAC,WAAW,CAAC;YACpB,OAAO,EAAE,CAAC;YACV,WAAW,EAAE,6IAA6I;YAC1J,QAAQ,EAAE;gBACR,IAAI,EAAE,wBAAwB;aAC/B;YACD,KAAK,EAAE;gBACL,UAAU,EAAE,CAAC,IAAI,CAAC;gBAClB,aAAa,EAAE;oBACb,EAAE,EAAE,CAAC,iBAAiB,EAAE,aAAa,CAAC;iBACvC;gBACD,SAAS,EAAE;oBACT,oBAAoB,EAAE;wBACpB;4BACE,GAAG,EAAE,0CAA0C;yBAChD;qBACF;iBACF;aACF;YACD,0DAA0D;YAC1D,2FAA2F;YAC3F,MAAM,EAAE;gBACN;oBACE,WAAW,EAAE,eAAe;oBAC5B,IAAI,EAAE,kBAAyB;oBAC/B,cAAc,EAAE,CAAC;oBACjB,QAAQ,EAAE,IAAI;iBACf;gBACD;oBACE,WAAW,EAAE,gBAAgB;oBAC7B,IAAI,EAAE,kBAAyB;oBAC/B,cAAc,EAAE,CAAC;oBACjB,QAAQ,EAAE,IAAI;iBACf;aACF;YACD,qDAAqD;YACrD,+EAA+E;YAC/E,OAAO,EAAE,CAAC,kBAAyB,CAAC;YACpC,WAAW,EAAE,CAAC,OAAO,CAAC;YACtB,UAAU,EAAE;gBACV;oBACE,WAAW,EAAE,mBAAmB;oBAChC,IAAI,EAAE,kBAAkB;oBACxB,IAAI,EAAE,QAAQ;oBACd,OAAO,EAAE,GAAG;oBACZ,WAAW,EAAE;wBACX,QAAQ,EAAE,CAAC;wBACX,QAAQ,EAAE,CAAC;wBACX,eAAe,EAAE,CAAC;qBACnB;oBACD,WAAW,EAAE,6GAA6G;iBAC3H;aACF;SACF,CAAC;IA0CJ,CAAC;IAxCC,KAAK,CAAC,UAAU;QACd,sCAAsC;QACtC,MAAM,gBAAgB,GAAG,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,EAAE,CAAC,EAAE,GAAG,CAAW,CAAC;QAErF,4CAA4C;QAC5C,MAAM,YAAY,GAAG,CAAC,MAAM,IAAI,CAAC,sBAAsB,CACrD,kBAAyB,EACzB,CAAC,CACF,CAAkB,CAAC;QAEpB,MAAM,aAAa,GAAG,CAAC,MAAM,IAAI,CAAC,sBAAsB,CACtD,kBAAyB,EACzB,CAAC,CACF,CAAkB,CAAC;QAEpB,IAAI,CAAC,YAAY,EAAE,CAAC;YAClB,MAAM,IAAI,iCAAkB,CAC1B,IAAI,CAAC,OAAO,EAAE,EACd,0FAA0F,CAC3F,CAAC;QACJ,CAAC;QAED,IAAI,CAAC,aAAa,EAAE,CAAC;YACnB,MAAM,IAAI,iCAAkB,CAC1B,IAAI,CAAC,OAAO,EAAE,EACd,4FAA4F,CAC7F,CAAC;QACJ,CAAC;QAED,sCAAsC;QACtC,MAAM,YAAY,GAAG,IAAI,gBAAgB,CACvC,YAAY,EACZ,aAAa,EACb,gBAAgB,CACjB,CAAC;QAEF,OAAO;YACL,QAAQ,EAAE,YAAY;SACvB,CAAC;IACJ,CAAC;CACF;AApGD,8CAoGC"}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
<?xml version="1.0" encoding="UTF-8"?>
|
|
2
|
+
<svg id="Layer_1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 91.76 91.76">
|
|
3
|
+
<defs>
|
|
4
|
+
<style>
|
|
5
|
+
.cls-1 {
|
|
6
|
+
fill: #667eea;
|
|
7
|
+
}
|
|
8
|
+
</style>
|
|
9
|
+
</defs>
|
|
10
|
+
<path class="cls-1" d="M38.19,54.36c-4.15-4.15-4.15-10.87,0-15.02,4.15-4.15,10.87-4.15,15.02,0,4.15,4.15,4.15,10.87,0,15.02s-10.87,4.15-15.02,0Z"/>
|
|
11
|
+
<path class="cls-1" d="M63.4,91.76H28.7c-7.93,0-14.35-6.42-14.35-14.35h49.05v14.35Z"/>
|
|
12
|
+
<path class="cls-1" d="M14.35,77.41c-7.93,0-14.35-6.42-14.35-14.35V29.8h14.35v47.61Z"/>
|
|
13
|
+
<path class="cls-1" d="M28.36,0h34.7c7.93,0,14.35,6.42,14.35,14.35H28.36V0Z"/>
|
|
14
|
+
<path class="cls-1" d="M77.41,14.35c7.93,0,14.35,6.42,14.35,14.35v33.26h-14.35V14.35Z"/>
|
|
15
|
+
</svg>
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@cascadeflow/n8n-nodes-cascadeflow",
|
|
3
|
-
"version": "0.4.
|
|
3
|
+
"version": "0.4.9",
|
|
4
4
|
"description": "n8n node for cascadeflow - Smart AI model cascading with 40-85% cost savings",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"n8n-community-node-package",
|
|
@@ -30,7 +30,8 @@
|
|
|
30
30
|
"dist/credentials/CascadeFlowApi.credentials.js"
|
|
31
31
|
],
|
|
32
32
|
"nodes": [
|
|
33
|
-
"dist/nodes/CascadeFlow/CascadeFlow.node.js"
|
|
33
|
+
"dist/nodes/CascadeFlow/CascadeFlow.node.js",
|
|
34
|
+
"dist/nodes/LmChatCascadeFlow/LmChatCascadeFlow.node.js"
|
|
34
35
|
]
|
|
35
36
|
},
|
|
36
37
|
"devDependencies": {
|
|
@@ -50,7 +51,8 @@
|
|
|
50
51
|
"@cascadeflow/core": "^0.4.0",
|
|
51
52
|
"openai": "^4.73.1",
|
|
52
53
|
"@anthropic-ai/sdk": "^0.30.0",
|
|
53
|
-
"groq-sdk": "^0.5.0"
|
|
54
|
+
"groq-sdk": "^0.5.0",
|
|
55
|
+
"@langchain/core": "^0.3.0"
|
|
54
56
|
},
|
|
55
57
|
"scripts": {
|
|
56
58
|
"build": "tsc && gulp build:icons",
|