n8n-nodes-rooyai-model 1.0.3 → 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { INodeType, INodeTypeDescription, ISupplyDataFunctions, SupplyData } from 'n8n-workflow';
|
|
2
2
|
export declare class RooyaiChatModel implements INodeType {
|
|
3
3
|
description: INodeTypeDescription;
|
|
4
|
-
|
|
4
|
+
supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData>;
|
|
5
5
|
}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.RooyaiChatModel = void 0;
|
|
4
|
+
const openai_1 = require("@langchain/openai");
|
|
4
5
|
class RooyaiChatModel {
|
|
5
6
|
constructor() {
|
|
6
7
|
this.description = {
|
|
@@ -9,14 +10,41 @@ class RooyaiChatModel {
|
|
|
9
10
|
icon: 'file:rooyai.svg',
|
|
10
11
|
group: ['transform'],
|
|
11
12
|
version: 1,
|
|
12
|
-
description: 'Rooyai AI Chat Model',
|
|
13
|
-
defaults: {
|
|
14
|
-
|
|
15
|
-
|
|
13
|
+
description: 'Rooyai AI Chat Model compatible with Agents',
|
|
14
|
+
defaults: {
|
|
15
|
+
name: 'Rooyai Chat Model',
|
|
16
|
+
},
|
|
17
|
+
codex: {
|
|
18
|
+
categories: ['AI'],
|
|
19
|
+
subcategories: {
|
|
20
|
+
AI: ['Language Models', 'Chat Models'],
|
|
21
|
+
},
|
|
22
|
+
},
|
|
23
|
+
inputs: [],
|
|
24
|
+
outputs: ['ai_languageModel'],
|
|
16
25
|
credentials: [
|
|
17
|
-
{
|
|
18
|
-
|
|
19
|
-
|
|
26
|
+
{
|
|
27
|
+
name: 'rooyaiAccount',
|
|
28
|
+
required: true,
|
|
29
|
+
},
|
|
30
|
+
{
|
|
31
|
+
name: 'rooyaiOpenAI',
|
|
32
|
+
required: true,
|
|
33
|
+
displayOptions: {
|
|
34
|
+
show: {
|
|
35
|
+
provider: ['openai'],
|
|
36
|
+
},
|
|
37
|
+
},
|
|
38
|
+
},
|
|
39
|
+
{
|
|
40
|
+
name: 'rooyaiOpenRouter',
|
|
41
|
+
required: true,
|
|
42
|
+
displayOptions: {
|
|
43
|
+
show: {
|
|
44
|
+
provider: ['openrouter'],
|
|
45
|
+
},
|
|
46
|
+
},
|
|
47
|
+
},
|
|
20
48
|
],
|
|
21
49
|
properties: [
|
|
22
50
|
{
|
|
@@ -24,58 +52,126 @@ class RooyaiChatModel {
|
|
|
24
52
|
name: 'provider',
|
|
25
53
|
type: 'options',
|
|
26
54
|
options: [
|
|
27
|
-
{ name: 'OpenAI', value: 'openai' },
|
|
28
55
|
{ name: 'OpenRouter', value: 'openrouter' },
|
|
56
|
+
{ name: 'OpenAI', value: 'openai' },
|
|
29
57
|
],
|
|
30
58
|
default: 'openrouter',
|
|
59
|
+
noDataExpression: true,
|
|
31
60
|
},
|
|
61
|
+
// OpenRouter Models (قائمة ثابتة شاملة)
|
|
32
62
|
{
|
|
33
63
|
displayName: 'Model',
|
|
34
64
|
name: 'model',
|
|
35
|
-
type: '
|
|
36
|
-
|
|
37
|
-
displayOptions: {
|
|
65
|
+
type: 'options',
|
|
66
|
+
description: 'Select the model to use',
|
|
67
|
+
displayOptions: {
|
|
68
|
+
show: {
|
|
69
|
+
provider: ['openrouter'],
|
|
70
|
+
},
|
|
71
|
+
},
|
|
72
|
+
default: 'openai/gpt-4o',
|
|
73
|
+
options: [
|
|
74
|
+
// OpenAI
|
|
75
|
+
{ name: 'GPT-4o', value: 'openai/gpt-4o' },
|
|
76
|
+
{ name: 'GPT-4o Mini', value: 'openai/gpt-4o-mini' },
|
|
77
|
+
{ name: 'GPT-4 Turbo', value: 'openai/gpt-4-turbo' },
|
|
78
|
+
// Anthropic
|
|
79
|
+
{ name: 'Claude 3.5 Sonnet', value: 'anthropic/claude-3.5-sonnet' },
|
|
80
|
+
{ name: 'Claude 3 Opus', value: 'anthropic/claude-3-opus' },
|
|
81
|
+
{ name: 'Claude 3 Haiku', value: 'anthropic/claude-3-haiku' },
|
|
82
|
+
// Google
|
|
83
|
+
{ name: 'Gemini Pro 1.5', value: 'google/gemini-pro-1.5' },
|
|
84
|
+
{ name: 'Gemini Flash 1.5', value: 'google/gemini-flash-1.5' },
|
|
85
|
+
// Meta
|
|
86
|
+
{ name: 'Llama 3 70B', value: 'meta-llama/llama-3-70b-instruct' },
|
|
87
|
+
{ name: 'Llama 3 8B', value: 'meta-llama/llama-3-8b-instruct' },
|
|
88
|
+
// DeepSeek
|
|
89
|
+
{ name: 'DeepSeek R1', value: 'deepseek/deepseek-r1' },
|
|
90
|
+
{ name: 'DeepSeek V3', value: 'deepseek/deepseek-chat' },
|
|
91
|
+
// Mistral
|
|
92
|
+
{ name: 'Mistral Large', value: 'mistralai/mistral-large' },
|
|
93
|
+
],
|
|
38
94
|
},
|
|
95
|
+
// OpenAI Models
|
|
39
96
|
{
|
|
40
97
|
displayName: 'Model',
|
|
41
98
|
name: 'model',
|
|
42
|
-
type: '
|
|
43
|
-
|
|
44
|
-
|
|
99
|
+
type: 'options',
|
|
100
|
+
displayOptions: {
|
|
101
|
+
show: {
|
|
102
|
+
provider: ['openai'],
|
|
103
|
+
},
|
|
104
|
+
},
|
|
105
|
+
options: [
|
|
106
|
+
{ name: 'GPT-4o', value: 'gpt-4o' },
|
|
107
|
+
{ name: 'GPT-4o Mini', value: 'gpt-4o-mini' },
|
|
108
|
+
{ name: 'GPT-4 Turbo', value: 'gpt-4-turbo' },
|
|
109
|
+
{ name: 'GPT-3.5 Turbo', value: 'gpt-3.5-turbo' },
|
|
110
|
+
],
|
|
111
|
+
default: 'gpt-4o',
|
|
45
112
|
},
|
|
113
|
+
// Options
|
|
46
114
|
{
|
|
47
|
-
displayName: '
|
|
48
|
-
name: '
|
|
49
|
-
type: '
|
|
50
|
-
|
|
115
|
+
displayName: 'Options',
|
|
116
|
+
name: 'options',
|
|
117
|
+
type: 'collection',
|
|
118
|
+
placeholder: 'Add Option',
|
|
119
|
+
default: {},
|
|
120
|
+
options: [
|
|
121
|
+
{
|
|
122
|
+
displayName: 'Temperature',
|
|
123
|
+
name: 'temperature',
|
|
124
|
+
type: 'number',
|
|
125
|
+
default: 0.7,
|
|
126
|
+
typeOptions: {
|
|
127
|
+
minValue: 0,
|
|
128
|
+
maxValue: 2,
|
|
129
|
+
numberStepSize: 0.1,
|
|
130
|
+
},
|
|
131
|
+
},
|
|
132
|
+
{
|
|
133
|
+
displayName: 'Max Tokens',
|
|
134
|
+
name: 'maxTokens',
|
|
135
|
+
type: 'number',
|
|
136
|
+
default: 2000,
|
|
137
|
+
description: 'The maximum number of tokens to generate',
|
|
138
|
+
},
|
|
139
|
+
],
|
|
51
140
|
},
|
|
52
141
|
],
|
|
53
142
|
};
|
|
54
143
|
}
|
|
55
|
-
async
|
|
56
|
-
|
|
144
|
+
async supplyData(itemIndex) {
|
|
145
|
+
var _a;
|
|
57
146
|
const credentials = await this.getCredentials('rooyaiAccount');
|
|
58
|
-
const provider = this.getNodeParameter('provider',
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
147
|
+
const provider = this.getNodeParameter('provider', itemIndex);
|
|
148
|
+
const modelName = this.getNodeParameter('model', itemIndex);
|
|
149
|
+
const options = this.getNodeParameter('options', itemIndex, {});
|
|
150
|
+
let apiKey = '';
|
|
151
|
+
if (provider === 'openrouter') {
|
|
152
|
+
const creds = await this.getCredentials('rooyaiOpenRouter');
|
|
153
|
+
apiKey = creds.apiKey;
|
|
62
154
|
}
|
|
63
155
|
else {
|
|
64
|
-
|
|
156
|
+
const creds = await this.getCredentials('rooyaiOpenAI');
|
|
157
|
+
apiKey = creds.apiKey;
|
|
65
158
|
}
|
|
66
|
-
const
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
messages: [{ role: 'user', content: this.getNodeParameter('messages', 0) }],
|
|
159
|
+
const model = new openai_1.ChatOpenAI({
|
|
160
|
+
openAIApiKey: apiKey,
|
|
161
|
+
configuration: {
|
|
162
|
+
baseURL: 'http://35.226.94.120:3000/v1',
|
|
163
|
+
defaultHeaders: {
|
|
164
|
+
'x-rooyai-token': credentials.token,
|
|
165
|
+
'x-rooyai-provider': provider,
|
|
166
|
+
},
|
|
75
167
|
},
|
|
76
|
-
|
|
168
|
+
modelName: modelName,
|
|
169
|
+
temperature: (_a = options.temperature) !== null && _a !== void 0 ? _a : 0.7,
|
|
170
|
+
maxTokens: options.maxTokens,
|
|
77
171
|
});
|
|
78
|
-
return
|
|
172
|
+
return {
|
|
173
|
+
response: model,
|
|
174
|
+
};
|
|
79
175
|
}
|
|
80
176
|
}
|
|
81
177
|
exports.RooyaiChatModel = RooyaiChatModel;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "n8n-nodes-rooyai-model",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.5",
|
|
4
4
|
"description": "Rooyai AI Model Integration for n8n",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"n8n-community-node-package"
|
|
@@ -23,10 +23,15 @@
|
|
|
23
23
|
"prepublishOnly": "npm run build"
|
|
24
24
|
},
|
|
25
25
|
"files": [
|
|
26
|
-
"dist"
|
|
26
|
+
"dist",
|
|
27
|
+
"nodes/rooyai.svg"
|
|
27
28
|
],
|
|
28
29
|
"devDependencies": {
|
|
29
|
-
"n8n-workflow": "^1.
|
|
30
|
+
"n8n-workflow": "^1.120.4",
|
|
30
31
|
"typescript": "^5.9.3"
|
|
32
|
+
},
|
|
33
|
+
"dependencies": {
|
|
34
|
+
"@langchain/core": "^1.1.12",
|
|
35
|
+
"@langchain/openai": "^1.2.1"
|
|
31
36
|
}
|
|
32
37
|
}
|