n8n-nodes-agnicwallet 1.0.11 → 1.0.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +51 -6
- package/dist/credentials/AgnicWalletApi.credentials.js +70 -41
- package/dist/credentials/AgnicWalletOAuth2Api.credentials.js +120 -91
- package/dist/nodes/AgnicAI/AgnicAI.node.js +1160 -411
- package/dist/nodes/AgnicAILanguageModel/AgnicAILanguageModel.node.js +1016 -355
- package/dist/nodes/AgnicMCPTool/AgnicMCPTool.node.js +12330 -336
- package/dist/nodes/X402HttpRequest/X402HttpRequest.node.js +345 -310
- package/package.json +9 -8
|
@@ -1,365 +1,1026 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
Object.defineProperty
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
20
|
+
|
|
21
|
+
// nodes/AgnicAILanguageModel/AgnicAILanguageModel.node.ts
|
|
22
|
+
var AgnicAILanguageModel_node_exports = {};
|
|
23
|
+
__export(AgnicAILanguageModel_node_exports, {
|
|
24
|
+
AgnicAILanguageModel: () => AgnicAILanguageModel
|
|
25
|
+
});
|
|
26
|
+
module.exports = __toCommonJS(AgnicAILanguageModel_node_exports);
|
|
27
|
+
var import_n8n_workflow = require("n8n-workflow");
|
|
28
|
+
var import_openai = require("@langchain/openai");
|
|
29
|
+
var import_base = require("@langchain/core/callbacks/base");
|
|
30
|
+
var AgnicLlmTracing = class extends import_base.BaseCallbackHandler {
|
|
31
|
+
constructor(executionFunctions) {
|
|
32
|
+
super();
|
|
33
|
+
this.name = "AgnicLlmTracing";
|
|
34
|
+
// This flag makes LangChain wait for handlers before continuing
|
|
35
|
+
this.awaitHandlers = true;
|
|
36
|
+
this.connectionType = import_n8n_workflow.NodeConnectionTypes.AiLanguageModel;
|
|
37
|
+
this.runsMap = {};
|
|
38
|
+
this.executionFunctions = executionFunctions;
|
|
39
|
+
}
|
|
40
|
+
static {
|
|
41
|
+
__name(this, "AgnicLlmTracing");
|
|
42
|
+
}
|
|
43
|
+
async handleLLMStart(llm, prompts, runId) {
|
|
44
|
+
const options = llm.kwargs || llm;
|
|
45
|
+
const { index } = this.executionFunctions.addInputData(
|
|
46
|
+
this.connectionType,
|
|
47
|
+
[[{ json: { messages: prompts, options } }]]
|
|
48
|
+
);
|
|
49
|
+
this.runsMap[runId] = {
|
|
50
|
+
index,
|
|
51
|
+
options,
|
|
52
|
+
messages: prompts
|
|
53
|
+
};
|
|
54
|
+
this.logAiEvent("ai-llm-generated-output-started", {
|
|
55
|
+
messages: prompts,
|
|
56
|
+
options
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
async handleLLMEnd(output, runId) {
|
|
60
|
+
const runDetails = this.runsMap[runId] ?? { index: 0 };
|
|
61
|
+
const generations = output.generations.map(
|
|
62
|
+
(gen) => gen.map((g) => ({ text: g.text, generationInfo: g.generationInfo }))
|
|
63
|
+
);
|
|
64
|
+
const response = {
|
|
65
|
+
generations,
|
|
66
|
+
llmOutput: output.llmOutput
|
|
67
|
+
};
|
|
68
|
+
this.executionFunctions.addOutputData(
|
|
69
|
+
this.connectionType,
|
|
70
|
+
runDetails.index,
|
|
71
|
+
[[{ json: response }]]
|
|
72
|
+
);
|
|
73
|
+
this.logAiEvent("ai-llm-generated-output", {
|
|
74
|
+
messages: runDetails.messages,
|
|
75
|
+
options: runDetails.options,
|
|
76
|
+
response
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
async handleLLMError(error, runId) {
|
|
80
|
+
const runDetails = this.runsMap[runId] ?? { index: 0 };
|
|
81
|
+
this.executionFunctions.addOutputData(
|
|
82
|
+
this.connectionType,
|
|
83
|
+
runDetails.index,
|
|
84
|
+
new import_n8n_workflow.NodeOperationError(this.executionFunctions.getNode(), error, {
|
|
85
|
+
functionality: "configuration-node"
|
|
86
|
+
})
|
|
87
|
+
);
|
|
88
|
+
this.logAiEvent("ai-llm-errored", {
|
|
89
|
+
error: error.message || String(error),
|
|
90
|
+
runId
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
logAiEvent(event, data) {
|
|
94
|
+
try {
|
|
95
|
+
this.executionFunctions.logAiEvent?.(
|
|
96
|
+
event,
|
|
97
|
+
data ? (0, import_n8n_workflow.jsonStringify)(data) : void 0
|
|
98
|
+
);
|
|
99
|
+
} catch {
|
|
21
100
|
}
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
// This stops the spinning indicator and shows success
|
|
49
|
-
this.executionFunctions.addOutputData(this.connectionType, runDetails.index, [[{ json: response }]]);
|
|
50
|
-
// Log AI event for the AI Agent's log panel
|
|
51
|
-
this.logAiEvent("ai-llm-generated-output", {
|
|
52
|
-
messages: runDetails.messages,
|
|
53
|
-
options: runDetails.options,
|
|
54
|
-
response,
|
|
55
|
-
});
|
|
56
|
-
}
|
|
57
|
-
async handleLLMError(error, runId) {
|
|
58
|
-
var _a;
|
|
59
|
-
const runDetails = (_a = this.runsMap[runId]) !== null && _a !== void 0 ? _a : { index: 0 };
|
|
60
|
-
// Add error output
|
|
61
|
-
this.executionFunctions.addOutputData(this.connectionType, runDetails.index, new n8n_workflow_1.NodeOperationError(this.executionFunctions.getNode(), error, {
|
|
62
|
-
functionality: "configuration-node",
|
|
63
|
-
}));
|
|
64
|
-
// Log AI error event
|
|
65
|
-
this.logAiEvent("ai-llm-errored", {
|
|
66
|
-
error: error.message || String(error),
|
|
67
|
-
runId,
|
|
68
|
-
});
|
|
69
|
-
}
|
|
70
|
-
logAiEvent(event, data) {
|
|
71
|
-
var _a, _b;
|
|
72
|
-
try {
|
|
73
|
-
(_b = (_a = this.executionFunctions).logAiEvent) === null || _b === void 0 ? void 0 : _b.call(_a, event, data ? (0, n8n_workflow_1.jsonStringify)(data) : undefined);
|
|
101
|
+
}
|
|
102
|
+
};
|
|
103
|
+
var AgnicAILanguageModel = class {
|
|
104
|
+
constructor() {
|
|
105
|
+
this.description = {
|
|
106
|
+
displayName: "AgnicAI Chat Model",
|
|
107
|
+
name: "lmChatAgnicAI",
|
|
108
|
+
icon: "file:AgnicAILanguageModel.png",
|
|
109
|
+
group: ["transform"],
|
|
110
|
+
version: [1, 1.1],
|
|
111
|
+
description: "Chat model using AgnicPay AI Gateway with X402 payment support",
|
|
112
|
+
defaults: {
|
|
113
|
+
name: "AgnicAI Chat Model"
|
|
114
|
+
},
|
|
115
|
+
codex: {
|
|
116
|
+
categories: ["AI"],
|
|
117
|
+
subcategories: {
|
|
118
|
+
AI: ["Language Models", "Root Nodes"],
|
|
119
|
+
"Language Models": ["Chat Models (Recommended)"]
|
|
120
|
+
},
|
|
121
|
+
resources: {
|
|
122
|
+
primaryDocumentation: [
|
|
123
|
+
{
|
|
124
|
+
url: "https://www.agnicpay.xyz/ai-gateway"
|
|
125
|
+
}
|
|
126
|
+
]
|
|
74
127
|
}
|
|
75
|
-
|
|
76
|
-
|
|
128
|
+
},
|
|
129
|
+
inputs: [],
|
|
130
|
+
outputs: [import_n8n_workflow.NodeConnectionTypes.AiLanguageModel],
|
|
131
|
+
outputNames: ["Model"],
|
|
132
|
+
credentials: [
|
|
133
|
+
{
|
|
134
|
+
name: "agnicWalletOAuth2Api",
|
|
135
|
+
required: false,
|
|
136
|
+
displayOptions: {
|
|
137
|
+
show: {
|
|
138
|
+
authentication: ["oAuth2"]
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
},
|
|
142
|
+
{
|
|
143
|
+
name: "agnicWalletApi",
|
|
144
|
+
required: false,
|
|
145
|
+
displayOptions: {
|
|
146
|
+
show: {
|
|
147
|
+
authentication: ["apiKey"]
|
|
148
|
+
}
|
|
149
|
+
}
|
|
77
150
|
}
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
151
|
+
],
|
|
152
|
+
properties: [
|
|
153
|
+
{
|
|
154
|
+
displayName: "Authentication",
|
|
155
|
+
name: "authentication",
|
|
156
|
+
type: "options",
|
|
157
|
+
options: [
|
|
158
|
+
{
|
|
159
|
+
name: "OAuth2",
|
|
160
|
+
value: "oAuth2",
|
|
161
|
+
description: "Recommended: Connect your account"
|
|
162
|
+
},
|
|
163
|
+
{
|
|
164
|
+
name: "API Key",
|
|
165
|
+
value: "apiKey",
|
|
166
|
+
description: "For CI/CD or programmatic access"
|
|
167
|
+
}
|
|
168
|
+
],
|
|
169
|
+
default: "apiKey",
|
|
170
|
+
description: "How to authenticate with AgnicWallet"
|
|
171
|
+
},
|
|
172
|
+
{
|
|
173
|
+
displayName: "Model",
|
|
174
|
+
name: "model",
|
|
175
|
+
type: "options",
|
|
176
|
+
typeOptions: {
|
|
177
|
+
allowCustomValues: true
|
|
178
|
+
},
|
|
179
|
+
options: [
|
|
180
|
+
// ===== RECOMMENDED MODELS =====
|
|
181
|
+
{
|
|
182
|
+
name: "\u2B50 OpenAI: GPT-4o Mini (Recommended)",
|
|
183
|
+
value: "openai/gpt-4o-mini"
|
|
184
|
+
},
|
|
185
|
+
{
|
|
186
|
+
name: "\u2B50 Anthropic: Claude 3.5 Sonnet (Recommended)",
|
|
187
|
+
value: "anthropic/claude-3.5-sonnet"
|
|
188
|
+
},
|
|
189
|
+
{
|
|
190
|
+
name: "\u2B50 Google: Gemini 2.0 Flash (Recommended)",
|
|
191
|
+
value: "google/gemini-2.0-flash-001"
|
|
192
|
+
},
|
|
193
|
+
{
|
|
194
|
+
name: "\u2B50 Meta: Llama 3.3 70B (Recommended)",
|
|
195
|
+
value: "meta-llama/llama-3.3-70b-instruct"
|
|
196
|
+
},
|
|
197
|
+
{
|
|
198
|
+
name: "\u2B50 DeepSeek: Chat V3 (Recommended - Affordable)",
|
|
199
|
+
value: "deepseek/deepseek-chat"
|
|
200
|
+
},
|
|
201
|
+
// ===== OpenAI Models =====
|
|
202
|
+
{
|
|
203
|
+
name: "OpenAI: GPT-4.1",
|
|
204
|
+
value: "openai/gpt-4.1"
|
|
205
|
+
},
|
|
206
|
+
{
|
|
207
|
+
name: "OpenAI: GPT-4.1 Mini",
|
|
208
|
+
value: "openai/gpt-4.1-mini"
|
|
209
|
+
},
|
|
210
|
+
{
|
|
211
|
+
name: "OpenAI: GPT-4.1 Nano",
|
|
212
|
+
value: "openai/gpt-4.1-nano"
|
|
213
|
+
},
|
|
214
|
+
{
|
|
215
|
+
name: "OpenAI: GPT-4o",
|
|
216
|
+
value: "openai/gpt-4o"
|
|
217
|
+
},
|
|
218
|
+
{
|
|
219
|
+
name: "OpenAI: GPT-4o Mini",
|
|
220
|
+
value: "openai/gpt-4o-mini"
|
|
221
|
+
},
|
|
222
|
+
{
|
|
223
|
+
name: "OpenAI: GPT-4o 2024-11-20",
|
|
224
|
+
value: "openai/gpt-4o-2024-11-20"
|
|
225
|
+
},
|
|
226
|
+
{
|
|
227
|
+
name: "OpenAI: GPT-4o 2024-08-06",
|
|
228
|
+
value: "openai/gpt-4o-2024-08-06"
|
|
229
|
+
},
|
|
230
|
+
{
|
|
231
|
+
name: "OpenAI: GPT-4o 2024-05-13",
|
|
232
|
+
value: "openai/gpt-4o-2024-05-13"
|
|
233
|
+
},
|
|
234
|
+
{
|
|
235
|
+
name: "OpenAI: GPT-4o Mini 2024-07-18",
|
|
236
|
+
value: "openai/gpt-4o-mini-2024-07-18"
|
|
237
|
+
},
|
|
238
|
+
{
|
|
239
|
+
name: "OpenAI: GPT-4 Turbo",
|
|
240
|
+
value: "openai/gpt-4-turbo"
|
|
241
|
+
},
|
|
242
|
+
{
|
|
243
|
+
name: "OpenAI: GPT-4 Turbo Preview",
|
|
244
|
+
value: "openai/gpt-4-turbo-preview"
|
|
245
|
+
},
|
|
246
|
+
{
|
|
247
|
+
name: "OpenAI: GPT-4 1106 Preview",
|
|
248
|
+
value: "openai/gpt-4-1106-preview"
|
|
249
|
+
},
|
|
250
|
+
{
|
|
251
|
+
name: "OpenAI: GPT-4",
|
|
252
|
+
value: "openai/gpt-4"
|
|
253
|
+
},
|
|
254
|
+
{
|
|
255
|
+
name: "OpenAI: GPT-4 32K",
|
|
256
|
+
value: "openai/gpt-4-32k"
|
|
257
|
+
},
|
|
258
|
+
{
|
|
259
|
+
name: "OpenAI: GPT-3.5 Turbo",
|
|
260
|
+
value: "openai/gpt-3.5-turbo"
|
|
261
|
+
},
|
|
262
|
+
{
|
|
263
|
+
name: "OpenAI: GPT-3.5 Turbo 16K",
|
|
264
|
+
value: "openai/gpt-3.5-turbo-16k"
|
|
265
|
+
},
|
|
266
|
+
{
|
|
267
|
+
name: "OpenAI: GPT-3.5 Turbo 0125",
|
|
268
|
+
value: "openai/gpt-3.5-turbo-0125"
|
|
269
|
+
},
|
|
270
|
+
{
|
|
271
|
+
name: "OpenAI: GPT-3.5 Turbo 1106",
|
|
272
|
+
value: "openai/gpt-3.5-turbo-1106"
|
|
273
|
+
},
|
|
274
|
+
{
|
|
275
|
+
name: "OpenAI: o1",
|
|
276
|
+
value: "openai/o1"
|
|
277
|
+
},
|
|
278
|
+
{
|
|
279
|
+
name: "OpenAI: o1 Mini",
|
|
280
|
+
value: "openai/o1-mini"
|
|
281
|
+
},
|
|
282
|
+
{
|
|
283
|
+
name: "OpenAI: o1 Preview",
|
|
284
|
+
value: "openai/o1-preview"
|
|
285
|
+
},
|
|
286
|
+
{
|
|
287
|
+
name: "OpenAI: o3 Mini",
|
|
288
|
+
value: "openai/o3-mini"
|
|
289
|
+
},
|
|
290
|
+
{
|
|
291
|
+
name: "OpenAI: o3 Mini High",
|
|
292
|
+
value: "openai/o3-mini-high"
|
|
293
|
+
},
|
|
294
|
+
{
|
|
295
|
+
name: "OpenAI: o4 Mini",
|
|
296
|
+
value: "openai/o4-mini"
|
|
297
|
+
},
|
|
298
|
+
{
|
|
299
|
+
name: "OpenAI: o4 Mini High",
|
|
300
|
+
value: "openai/o4-mini-high"
|
|
301
|
+
},
|
|
302
|
+
// ===== Anthropic Models =====
|
|
303
|
+
{
|
|
304
|
+
name: "Anthropic: Claude Sonnet 4",
|
|
305
|
+
value: "anthropic/claude-sonnet-4"
|
|
306
|
+
},
|
|
307
|
+
{
|
|
308
|
+
name: "Anthropic: Claude Opus 4",
|
|
309
|
+
value: "anthropic/claude-opus-4"
|
|
310
|
+
},
|
|
311
|
+
{
|
|
312
|
+
name: "Anthropic: Claude 3.7 Sonnet",
|
|
313
|
+
value: "anthropic/claude-3.7-sonnet"
|
|
314
|
+
},
|
|
315
|
+
{
|
|
316
|
+
name: "Anthropic: Claude 3.5 Sonnet",
|
|
317
|
+
value: "anthropic/claude-3.5-sonnet"
|
|
318
|
+
},
|
|
319
|
+
{
|
|
320
|
+
name: "Anthropic: Claude 3.5 Sonnet 2024-10-22",
|
|
321
|
+
value: "anthropic/claude-3.5-sonnet-20241022"
|
|
322
|
+
},
|
|
323
|
+
{
|
|
324
|
+
name: "Anthropic: Claude 3.5 Haiku",
|
|
325
|
+
value: "anthropic/claude-3.5-haiku"
|
|
326
|
+
},
|
|
327
|
+
{
|
|
328
|
+
name: "Anthropic: Claude 3.5 Haiku 2024-10-22",
|
|
329
|
+
value: "anthropic/claude-3.5-haiku-20241022"
|
|
330
|
+
},
|
|
331
|
+
{
|
|
332
|
+
name: "Anthropic: Claude 3 Opus",
|
|
333
|
+
value: "anthropic/claude-3-opus"
|
|
334
|
+
},
|
|
335
|
+
{
|
|
336
|
+
name: "Anthropic: Claude 3 Opus 2024-02-29",
|
|
337
|
+
value: "anthropic/claude-3-opus-20240229"
|
|
338
|
+
},
|
|
339
|
+
{
|
|
340
|
+
name: "Anthropic: Claude 3 Sonnet",
|
|
341
|
+
value: "anthropic/claude-3-sonnet"
|
|
342
|
+
},
|
|
343
|
+
{
|
|
344
|
+
name: "Anthropic: Claude 3 Haiku",
|
|
345
|
+
value: "anthropic/claude-3-haiku"
|
|
346
|
+
},
|
|
347
|
+
{
|
|
348
|
+
name: "Anthropic: Claude 3 Haiku 2024-03-07",
|
|
349
|
+
value: "anthropic/claude-3-haiku-20240307"
|
|
350
|
+
},
|
|
351
|
+
// ===== Google Models =====
|
|
352
|
+
{
|
|
353
|
+
name: "Google: Gemini 3 Flash Preview",
|
|
354
|
+
value: "google/gemini-3-flash-preview"
|
|
355
|
+
},
|
|
356
|
+
{
|
|
357
|
+
name: "Google: Gemini 2.5 Pro Preview",
|
|
358
|
+
value: "google/gemini-2.5-pro-preview"
|
|
359
|
+
},
|
|
360
|
+
{
|
|
361
|
+
name: "Google: Gemini 2.5 Flash Preview",
|
|
362
|
+
value: "google/gemini-2.5-flash-preview"
|
|
363
|
+
},
|
|
364
|
+
{
|
|
365
|
+
name: "Google: Gemini 2.0 Flash",
|
|
366
|
+
value: "google/gemini-2.0-flash-001"
|
|
367
|
+
},
|
|
368
|
+
{
|
|
369
|
+
name: "Google: Gemini 2.0 Flash Lite",
|
|
370
|
+
value: "google/gemini-2.0-flash-lite-001"
|
|
371
|
+
},
|
|
372
|
+
{
|
|
373
|
+
name: "Google: Gemini 2.0 Flash Exp",
|
|
374
|
+
value: "google/gemini-2.0-flash-exp"
|
|
375
|
+
},
|
|
376
|
+
{
|
|
377
|
+
name: "Google: Gemini 2.0 Flash Thinking Exp",
|
|
378
|
+
value: "google/gemini-2.0-flash-thinking-exp"
|
|
379
|
+
},
|
|
380
|
+
{
|
|
381
|
+
name: "Google: Gemini Pro 1.5",
|
|
382
|
+
value: "google/gemini-pro-1.5"
|
|
383
|
+
},
|
|
384
|
+
{
|
|
385
|
+
name: "Google: Gemini Flash 1.5",
|
|
386
|
+
value: "google/gemini-flash-1.5"
|
|
387
|
+
},
|
|
388
|
+
{
|
|
389
|
+
name: "Google: Gemini Flash 1.5 8B",
|
|
390
|
+
value: "google/gemini-flash-1.5-8b"
|
|
391
|
+
},
|
|
392
|
+
{
|
|
393
|
+
name: "Google: Gemini Pro",
|
|
394
|
+
value: "google/gemini-pro"
|
|
395
|
+
},
|
|
396
|
+
{
|
|
397
|
+
name: "Google: Gemma 3 27B",
|
|
398
|
+
value: "google/gemma-3-27b-it"
|
|
399
|
+
},
|
|
400
|
+
{
|
|
401
|
+
name: "Google: Gemma 3 12B",
|
|
402
|
+
value: "google/gemma-3-12b-it"
|
|
403
|
+
},
|
|
404
|
+
{
|
|
405
|
+
name: "Google: Gemma 3 4B",
|
|
406
|
+
value: "google/gemma-3-4b-it"
|
|
407
|
+
},
|
|
408
|
+
{
|
|
409
|
+
name: "Google: Gemma 3 1B",
|
|
410
|
+
value: "google/gemma-3-1b-it"
|
|
411
|
+
},
|
|
412
|
+
{
|
|
413
|
+
name: "Google: Gemma 2 27B",
|
|
414
|
+
value: "google/gemma-2-27b-it"
|
|
415
|
+
},
|
|
416
|
+
{
|
|
417
|
+
name: "Google: Gemma 2 9B",
|
|
418
|
+
value: "google/gemma-2-9b-it"
|
|
419
|
+
},
|
|
420
|
+
// ===== Meta Llama Models =====
|
|
421
|
+
{
|
|
422
|
+
name: "Meta: Llama 4 Maverick",
|
|
423
|
+
value: "meta-llama/llama-4-maverick"
|
|
424
|
+
},
|
|
425
|
+
{
|
|
426
|
+
name: "Meta: Llama 4 Scout",
|
|
427
|
+
value: "meta-llama/llama-4-scout"
|
|
428
|
+
},
|
|
429
|
+
{
|
|
430
|
+
name: "Meta: Llama 3.3 70B Instruct",
|
|
431
|
+
value: "meta-llama/llama-3.3-70b-instruct"
|
|
432
|
+
},
|
|
433
|
+
{
|
|
434
|
+
name: "Meta: Llama 3.2 90B Vision Instruct",
|
|
435
|
+
value: "meta-llama/llama-3.2-90b-vision-instruct"
|
|
436
|
+
},
|
|
437
|
+
{
|
|
438
|
+
name: "Meta: Llama 3.2 11B Vision Instruct",
|
|
439
|
+
value: "meta-llama/llama-3.2-11b-vision-instruct"
|
|
440
|
+
},
|
|
441
|
+
{
|
|
442
|
+
name: "Meta: Llama 3.2 3B Instruct",
|
|
443
|
+
value: "meta-llama/llama-3.2-3b-instruct"
|
|
444
|
+
},
|
|
445
|
+
{
|
|
446
|
+
name: "Meta: Llama 3.2 1B Instruct",
|
|
447
|
+
value: "meta-llama/llama-3.2-1b-instruct"
|
|
448
|
+
},
|
|
449
|
+
{
|
|
450
|
+
name: "Meta: Llama 3.1 405B Instruct",
|
|
451
|
+
value: "meta-llama/llama-3.1-405b-instruct"
|
|
452
|
+
},
|
|
453
|
+
{
|
|
454
|
+
name: "Meta: Llama 3.1 70B Instruct",
|
|
455
|
+
value: "meta-llama/llama-3.1-70b-instruct"
|
|
456
|
+
},
|
|
457
|
+
{
|
|
458
|
+
name: "Meta: Llama 3.1 8B Instruct",
|
|
459
|
+
value: "meta-llama/llama-3.1-8b-instruct"
|
|
460
|
+
},
|
|
461
|
+
{
|
|
462
|
+
name: "Meta: Llama 3 70B Instruct",
|
|
463
|
+
value: "meta-llama/llama-3-70b-instruct"
|
|
464
|
+
},
|
|
465
|
+
{
|
|
466
|
+
name: "Meta: Llama 3 8B Instruct",
|
|
467
|
+
value: "meta-llama/llama-3-8b-instruct"
|
|
468
|
+
},
|
|
469
|
+
// ===== Mistral Models =====
|
|
470
|
+
{
|
|
471
|
+
name: "Mistral: Large 2411",
|
|
472
|
+
value: "mistralai/mistral-large-2411"
|
|
473
|
+
},
|
|
474
|
+
{
|
|
475
|
+
name: "Mistral: Large 2407",
|
|
476
|
+
value: "mistralai/mistral-large-2407"
|
|
477
|
+
},
|
|
478
|
+
{
|
|
479
|
+
name: "Mistral: Large",
|
|
480
|
+
value: "mistralai/mistral-large"
|
|
481
|
+
},
|
|
482
|
+
{
|
|
483
|
+
name: "Mistral: Medium",
|
|
484
|
+
value: "mistralai/mistral-medium"
|
|
485
|
+
},
|
|
486
|
+
{
|
|
487
|
+
name: "Mistral: Small",
|
|
488
|
+
value: "mistralai/mistral-small"
|
|
489
|
+
},
|
|
490
|
+
{
|
|
491
|
+
name: "Mistral: Small 2503",
|
|
492
|
+
value: "mistralai/mistral-small-2503"
|
|
493
|
+
},
|
|
494
|
+
{
|
|
495
|
+
name: "Mistral: Small 2501",
|
|
496
|
+
value: "mistralai/mistral-small-2501"
|
|
497
|
+
},
|
|
498
|
+
{
|
|
499
|
+
name: "Mistral: Small 2409",
|
|
500
|
+
value: "mistralai/mistral-small-2409"
|
|
501
|
+
},
|
|
502
|
+
{
|
|
503
|
+
name: "Mistral: Small Creative",
|
|
504
|
+
value: "mistralai/mistral-small-creative"
|
|
505
|
+
},
|
|
506
|
+
{
|
|
507
|
+
name: "Mistral: Nemo",
|
|
508
|
+
value: "mistralai/mistral-nemo"
|
|
509
|
+
},
|
|
510
|
+
{
|
|
511
|
+
name: "Mistral: Mixtral 8x22B Instruct",
|
|
512
|
+
value: "mistralai/mixtral-8x22b-instruct"
|
|
513
|
+
},
|
|
514
|
+
{
|
|
515
|
+
name: "Mistral: Mixtral 8x7B Instruct",
|
|
516
|
+
value: "mistralai/mixtral-8x7b-instruct"
|
|
517
|
+
},
|
|
518
|
+
{
|
|
519
|
+
name: "Mistral: Pixtral Large",
|
|
520
|
+
value: "mistralai/pixtral-large-latest"
|
|
521
|
+
},
|
|
522
|
+
{
|
|
523
|
+
name: "Mistral: Pixtral 12B",
|
|
524
|
+
value: "mistralai/pixtral-12b"
|
|
525
|
+
},
|
|
526
|
+
{
|
|
527
|
+
name: "Mistral: Codestral",
|
|
528
|
+
value: "mistralai/codestral-latest"
|
|
529
|
+
},
|
|
530
|
+
{
|
|
531
|
+
name: "Mistral: Ministral 3B",
|
|
532
|
+
value: "mistralai/ministral-3b"
|
|
533
|
+
},
|
|
534
|
+
{
|
|
535
|
+
name: "Mistral: Ministral 8B",
|
|
536
|
+
value: "mistralai/ministral-8b"
|
|
537
|
+
},
|
|
538
|
+
// ===== DeepSeek Models =====
|
|
539
|
+
{
|
|
540
|
+
name: "DeepSeek: R1",
|
|
541
|
+
value: "deepseek/deepseek-r1"
|
|
542
|
+
},
|
|
543
|
+
{
|
|
544
|
+
name: "DeepSeek: R1 0528",
|
|
545
|
+
value: "deepseek/deepseek-r1-0528"
|
|
546
|
+
},
|
|
547
|
+
{
|
|
548
|
+
name: "DeepSeek: R1 Distill Llama 70B",
|
|
549
|
+
value: "deepseek/deepseek-r1-distill-llama-70b"
|
|
550
|
+
},
|
|
551
|
+
{
|
|
552
|
+
name: "DeepSeek: R1 Distill Qwen 32B",
|
|
553
|
+
value: "deepseek/deepseek-r1-distill-qwen-32b"
|
|
554
|
+
},
|
|
555
|
+
{
|
|
556
|
+
name: "DeepSeek: R1 Distill Qwen 14B",
|
|
557
|
+
value: "deepseek/deepseek-r1-distill-qwen-14b"
|
|
558
|
+
},
|
|
559
|
+
{
|
|
560
|
+
name: "DeepSeek: Chat V3",
|
|
561
|
+
value: "deepseek/deepseek-chat"
|
|
562
|
+
},
|
|
563
|
+
{
|
|
564
|
+
name: "DeepSeek: Chat V3 0324",
|
|
565
|
+
value: "deepseek/deepseek-chat-v3-0324"
|
|
566
|
+
},
|
|
567
|
+
{
|
|
568
|
+
name: "DeepSeek: Coder",
|
|
569
|
+
value: "deepseek/deepseek-coder"
|
|
570
|
+
},
|
|
571
|
+
{
|
|
572
|
+
name: "DeepSeek: Prover V2",
|
|
573
|
+
value: "deepseek/deepseek-prover-v2"
|
|
574
|
+
},
|
|
575
|
+
// ===== Qwen Models =====
|
|
576
|
+
{
|
|
577
|
+
name: "Qwen: Qwen3 235B A22B",
|
|
578
|
+
value: "qwen/qwen3-235b-a22b"
|
|
579
|
+
},
|
|
580
|
+
{
|
|
581
|
+
name: "Qwen: Qwen3 32B",
|
|
582
|
+
value: "qwen/qwen3-32b"
|
|
583
|
+
},
|
|
584
|
+
{
|
|
585
|
+
name: "Qwen: Qwen3 30B A3B",
|
|
586
|
+
value: "qwen/qwen3-30b-a3b"
|
|
587
|
+
},
|
|
588
|
+
{
|
|
589
|
+
name: "Qwen: Qwen3 14B",
|
|
590
|
+
value: "qwen/qwen3-14b"
|
|
591
|
+
},
|
|
592
|
+
{
|
|
593
|
+
name: "Qwen: Qwen3 8B",
|
|
594
|
+
value: "qwen/qwen3-8b"
|
|
595
|
+
},
|
|
596
|
+
{
|
|
597
|
+
name: "Qwen: Qwen3 4B",
|
|
598
|
+
value: "qwen/qwen3-4b"
|
|
599
|
+
},
|
|
600
|
+
{
|
|
601
|
+
name: "Qwen: Qwen3 1.7B",
|
|
602
|
+
value: "qwen/qwen3-1.7b"
|
|
603
|
+
},
|
|
604
|
+
{
|
|
605
|
+
name: "Qwen: Qwen 2.5 72B Instruct",
|
|
606
|
+
value: "qwen/qwen-2.5-72b-instruct"
|
|
607
|
+
},
|
|
608
|
+
{
|
|
609
|
+
name: "Qwen: Qwen 2.5 32B Instruct",
|
|
610
|
+
value: "qwen/qwen-2.5-32b-instruct"
|
|
611
|
+
},
|
|
612
|
+
{
|
|
613
|
+
name: "Qwen: Qwen 2.5 14B Instruct",
|
|
614
|
+
value: "qwen/qwen-2.5-14b-instruct"
|
|
615
|
+
},
|
|
616
|
+
{
|
|
617
|
+
name: "Qwen: Qwen 2.5 7B Instruct",
|
|
618
|
+
value: "qwen/qwen-2.5-7b-instruct"
|
|
619
|
+
},
|
|
620
|
+
{
|
|
621
|
+
name: "Qwen: Qwen 2.5 Coder 32B Instruct",
|
|
622
|
+
value: "qwen/qwen-2.5-coder-32b-instruct"
|
|
623
|
+
},
|
|
624
|
+
{
|
|
625
|
+
name: "Qwen: Qwen 2.5 Coder 7B Instruct",
|
|
626
|
+
value: "qwen/qwen-2.5-coder-7b-instruct"
|
|
627
|
+
},
|
|
628
|
+
{
|
|
629
|
+
name: "Qwen: QwQ 32B Preview",
|
|
630
|
+
value: "qwen/qwq-32b-preview"
|
|
631
|
+
},
|
|
632
|
+
{
|
|
633
|
+
name: "Qwen: QwQ 32B",
|
|
634
|
+
value: "qwen/qwq-32b"
|
|
635
|
+
},
|
|
636
|
+
{
|
|
637
|
+
name: "Qwen: Qwen 2 VL 72B Instruct",
|
|
638
|
+
value: "qwen/qwen-2-vl-72b-instruct"
|
|
639
|
+
},
|
|
640
|
+
{
|
|
641
|
+
name: "Qwen: Qwen 2 VL 7B Instruct",
|
|
642
|
+
value: "qwen/qwen-2-vl-7b-instruct"
|
|
643
|
+
},
|
|
644
|
+
// ===== Cohere Models =====
|
|
645
|
+
{
|
|
646
|
+
name: "Cohere: Command R+",
|
|
647
|
+
value: "cohere/command-r-plus"
|
|
648
|
+
},
|
|
649
|
+
{
|
|
650
|
+
name: "Cohere: Command R+ 08-2024",
|
|
651
|
+
value: "cohere/command-r-plus-08-2024"
|
|
652
|
+
},
|
|
653
|
+
{
|
|
654
|
+
name: "Cohere: Command R+ 04-2024",
|
|
655
|
+
value: "cohere/command-r-plus-04-2024"
|
|
656
|
+
},
|
|
657
|
+
{
|
|
658
|
+
name: "Cohere: Command R",
|
|
659
|
+
value: "cohere/command-r"
|
|
660
|
+
},
|
|
661
|
+
{
|
|
662
|
+
name: "Cohere: Command R 08-2024",
|
|
663
|
+
value: "cohere/command-r-08-2024"
|
|
664
|
+
},
|
|
665
|
+
{
|
|
666
|
+
name: "Cohere: Command R 03-2024",
|
|
667
|
+
value: "cohere/command-r-03-2024"
|
|
668
|
+
},
|
|
669
|
+
{
|
|
670
|
+
name: "Cohere: Command A",
|
|
671
|
+
value: "cohere/command-a"
|
|
672
|
+
},
|
|
673
|
+
// ===== xAI (Grok) Models =====
|
|
674
|
+
{
|
|
675
|
+
name: "xAI: Grok 3",
|
|
676
|
+
value: "x-ai/grok-3"
|
|
677
|
+
},
|
|
678
|
+
{
|
|
679
|
+
name: "xAI: Grok 3 Fast",
|
|
680
|
+
value: "x-ai/grok-3-fast"
|
|
681
|
+
},
|
|
682
|
+
{
|
|
683
|
+
name: "xAI: Grok 3 Mini",
|
|
684
|
+
value: "x-ai/grok-3-mini"
|
|
685
|
+
},
|
|
686
|
+
{
|
|
687
|
+
name: "xAI: Grok 3 Mini Fast",
|
|
688
|
+
value: "x-ai/grok-3-mini-fast"
|
|
689
|
+
},
|
|
690
|
+
{
|
|
691
|
+
name: "xAI: Grok 2",
|
|
692
|
+
value: "x-ai/grok-2"
|
|
693
|
+
},
|
|
694
|
+
{
|
|
695
|
+
name: "xAI: Grok 2 1212",
|
|
696
|
+
value: "x-ai/grok-2-1212"
|
|
697
|
+
},
|
|
698
|
+
{
|
|
699
|
+
name: "xAI: Grok 2 Vision 1212",
|
|
700
|
+
value: "x-ai/grok-2-vision-1212"
|
|
701
|
+
},
|
|
702
|
+
{
|
|
703
|
+
name: "xAI: Grok Beta",
|
|
704
|
+
value: "x-ai/grok-beta"
|
|
705
|
+
},
|
|
706
|
+
// ===== NVIDIA Models =====
|
|
707
|
+
{
|
|
708
|
+
name: "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
|
|
709
|
+
value: "nvidia/llama-3.1-nemotron-70b-instruct"
|
|
710
|
+
},
|
|
711
|
+
{
|
|
712
|
+
name: "NVIDIA: Llama 3.3 Nemotron Super 49B V1",
|
|
713
|
+
value: "nvidia/llama-3.3-nemotron-super-49b-v1"
|
|
714
|
+
},
|
|
715
|
+
// ===== Microsoft Models =====
|
|
716
|
+
{
|
|
717
|
+
name: "Microsoft: Phi-4",
|
|
718
|
+
value: "microsoft/phi-4"
|
|
719
|
+
},
|
|
720
|
+
{
|
|
721
|
+
name: "Microsoft: Phi-4 Multimodal Instruct",
|
|
722
|
+
value: "microsoft/phi-4-multimodal-instruct"
|
|
723
|
+
},
|
|
724
|
+
{
|
|
725
|
+
name: "Microsoft: MAI DS R1",
|
|
726
|
+
value: "microsoft/mai-ds-r1"
|
|
727
|
+
},
|
|
728
|
+
// ===== Amazon Models =====
|
|
729
|
+
{
|
|
730
|
+
name: "Amazon: Nova Pro 1.0",
|
|
731
|
+
value: "amazon/nova-pro-v1"
|
|
732
|
+
},
|
|
733
|
+
{
|
|
734
|
+
name: "Amazon: Nova Lite 1.0",
|
|
735
|
+
value: "amazon/nova-lite-v1"
|
|
736
|
+
},
|
|
737
|
+
{
|
|
738
|
+
name: "Amazon: Nova Micro 1.0",
|
|
739
|
+
value: "amazon/nova-micro-v1"
|
|
740
|
+
},
|
|
741
|
+
// ===== Perplexity Models =====
|
|
742
|
+
{
|
|
743
|
+
name: "Perplexity: Sonar Deep Research",
|
|
744
|
+
value: "perplexity/sonar-deep-research"
|
|
745
|
+
},
|
|
746
|
+
{
|
|
747
|
+
name: "Perplexity: Sonar Pro",
|
|
748
|
+
value: "perplexity/sonar-pro"
|
|
749
|
+
},
|
|
750
|
+
{
|
|
751
|
+
name: "Perplexity: Sonar",
|
|
752
|
+
value: "perplexity/sonar"
|
|
753
|
+
},
|
|
754
|
+
{
|
|
755
|
+
name: "Perplexity: Sonar Reasoning Pro",
|
|
756
|
+
value: "perplexity/sonar-reasoning-pro"
|
|
757
|
+
},
|
|
758
|
+
{
|
|
759
|
+
name: "Perplexity: Sonar Reasoning",
|
|
760
|
+
value: "perplexity/sonar-reasoning"
|
|
761
|
+
},
|
|
762
|
+
// ===== Nous Research Models =====
|
|
763
|
+
{
|
|
764
|
+
name: "Nous: Hermes 3 405B Instruct",
|
|
765
|
+
value: "nousresearch/hermes-3-llama-3.1-405b"
|
|
766
|
+
},
|
|
767
|
+
{
|
|
768
|
+
name: "Nous: Hermes 3 70B Instruct",
|
|
769
|
+
value: "nousresearch/hermes-3-llama-3.1-70b"
|
|
770
|
+
},
|
|
771
|
+
// ===== 01.AI Models =====
|
|
772
|
+
{
|
|
773
|
+
name: "01.AI: Yi Large",
|
|
774
|
+
value: "01-ai/yi-large"
|
|
775
|
+
},
|
|
776
|
+
{
|
|
777
|
+
name: "01.AI: Yi Large FC",
|
|
778
|
+
value: "01-ai/yi-large-fc"
|
|
779
|
+
},
|
|
780
|
+
{
|
|
781
|
+
name: "01.AI: Yi Large Turbo",
|
|
782
|
+
value: "01-ai/yi-large-turbo"
|
|
783
|
+
},
|
|
784
|
+
// ===== Inflection Models =====
|
|
785
|
+
{
|
|
786
|
+
name: "Inflection: Inflection 3 Pi",
|
|
787
|
+
value: "inflection/inflection-3-pi"
|
|
788
|
+
},
|
|
789
|
+
{
|
|
790
|
+
name: "Inflection: Inflection 3 Productivity",
|
|
791
|
+
value: "inflection/inflection-3-productivity"
|
|
792
|
+
},
|
|
793
|
+
// ===== AI21 Models =====
|
|
794
|
+
{
|
|
795
|
+
name: "AI21: Jamba 1.5 Large",
|
|
796
|
+
value: "ai21/jamba-1.5-large"
|
|
797
|
+
},
|
|
798
|
+
{
|
|
799
|
+
name: "AI21: Jamba 1.5 Mini",
|
|
800
|
+
value: "ai21/jamba-1.5-mini"
|
|
801
|
+
},
|
|
802
|
+
// ===== Databricks Models =====
|
|
803
|
+
{
|
|
804
|
+
name: "Databricks: DBRX Instruct",
|
|
805
|
+
value: "databricks/dbrx-instruct"
|
|
806
|
+
},
|
|
807
|
+
// ===== Fireworks Models =====
|
|
808
|
+
{
|
|
809
|
+
name: "Fireworks: Firellama 405B Instruct",
|
|
810
|
+
value: "fireworks/firellama-405b-instruct"
|
|
811
|
+
},
|
|
812
|
+
// ===== Groq Models =====
|
|
813
|
+
{
|
|
814
|
+
name: "Groq: Llama 3.3 70B Versatile",
|
|
815
|
+
value: "groq/llama-3.3-70b-versatile"
|
|
816
|
+
},
|
|
817
|
+
{
|
|
818
|
+
name: "Groq: Llama 3.1 8B Instant",
|
|
819
|
+
value: "groq/llama-3.1-8b-instant"
|
|
820
|
+
},
|
|
821
|
+
// ===== Cognitive Computations Models =====
|
|
822
|
+
{
|
|
823
|
+
name: "Cognitive Computations: Dolphin 3.0 R1 Mistral 24B",
|
|
824
|
+
value: "cognitivecomputations/dolphin-3.0-r1-mistral-24b"
|
|
825
|
+
},
|
|
826
|
+
{
|
|
827
|
+
name: "Cognitive Computations: Dolphin 3.0 Mistral 24B",
|
|
828
|
+
value: "cognitivecomputations/dolphin-3.0-mistral-24b"
|
|
829
|
+
},
|
|
830
|
+
// ===== FREE MODELS (with tool support) =====
|
|
831
|
+
{
|
|
832
|
+
name: "[FREE] NVIDIA: Nemotron 3 Nano 30B",
|
|
833
|
+
value: "nvidia/nemotron-3-nano-30b-a3b:free"
|
|
834
|
+
},
|
|
835
|
+
{
|
|
836
|
+
name: "[FREE] Xiaomi: MiMo V2 Flash",
|
|
837
|
+
value: "xiaomi/mimo-v2-flash:free"
|
|
838
|
+
},
|
|
839
|
+
{
|
|
840
|
+
name: "[FREE] Meta: Llama 3.1 8B Instruct",
|
|
841
|
+
value: "meta-llama/llama-3.1-8b-instruct:free"
|
|
842
|
+
},
|
|
843
|
+
{
|
|
844
|
+
name: "[FREE] Meta: Llama 3.2 3B Instruct",
|
|
845
|
+
value: "meta-llama/llama-3.2-3b-instruct:free"
|
|
846
|
+
},
|
|
847
|
+
{
|
|
848
|
+
name: "[FREE] Qwen: Qwen3 8B",
|
|
849
|
+
value: "qwen/qwen3-8b:free"
|
|
850
|
+
},
|
|
851
|
+
{
|
|
852
|
+
name: "[FREE] Qwen: Qwen3 4B",
|
|
853
|
+
value: "qwen/qwen3-4b:free"
|
|
854
|
+
},
|
|
855
|
+
{
|
|
856
|
+
name: "[FREE] Qwen: Qwen 2.5 7B Instruct",
|
|
857
|
+
value: "qwen/qwen-2.5-7b-instruct:free"
|
|
858
|
+
},
|
|
859
|
+
{
|
|
860
|
+
name: "[FREE] Qwen: Qwen 2.5 Coder 7B Instruct",
|
|
861
|
+
value: "qwen/qwen-2.5-coder-7b-instruct:free"
|
|
862
|
+
},
|
|
863
|
+
{
|
|
864
|
+
name: "[FREE] Google: Gemma 2 9B",
|
|
865
|
+
value: "google/gemma-2-9b-it:free"
|
|
866
|
+
},
|
|
867
|
+
{
|
|
868
|
+
name: "[FREE] Mistral: Mistral Small 3.1 24B",
|
|
869
|
+
value: "mistralai/mistral-small-3.1-24b-instruct:free"
|
|
321
870
|
}
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
871
|
+
],
|
|
872
|
+
default: "openai/gpt-4o-mini",
|
|
873
|
+
description: "Select a model or type a custom OpenRouter model ID. See https://openrouter.ai/models for all available models."
|
|
874
|
+
},
|
|
875
|
+
{
|
|
876
|
+
displayName: "Options",
|
|
877
|
+
name: "options",
|
|
878
|
+
type: "collection",
|
|
879
|
+
placeholder: "Add Option",
|
|
880
|
+
default: {},
|
|
881
|
+
options: [
|
|
882
|
+
{
|
|
883
|
+
displayName: "Temperature",
|
|
884
|
+
name: "temperature",
|
|
885
|
+
type: "number",
|
|
886
|
+
typeOptions: {
|
|
887
|
+
minValue: 0,
|
|
888
|
+
maxValue: 2,
|
|
889
|
+
numberStepSize: 0.1
|
|
890
|
+
},
|
|
891
|
+
default: 0.7,
|
|
892
|
+
description: "Controls randomness: Lower = more focused and deterministic"
|
|
893
|
+
},
|
|
894
|
+
{
|
|
895
|
+
displayName: "Max Tokens",
|
|
896
|
+
name: "maxTokens",
|
|
897
|
+
type: "number",
|
|
898
|
+
typeOptions: {
|
|
899
|
+
minValue: 1
|
|
900
|
+
},
|
|
901
|
+
default: 2048,
|
|
902
|
+
description: "Maximum number of tokens to generate"
|
|
903
|
+
},
|
|
904
|
+
{
|
|
905
|
+
displayName: "Top P",
|
|
906
|
+
name: "topP",
|
|
907
|
+
type: "number",
|
|
908
|
+
typeOptions: {
|
|
909
|
+
minValue: 0,
|
|
910
|
+
maxValue: 1,
|
|
911
|
+
numberStepSize: 0.1
|
|
912
|
+
},
|
|
913
|
+
default: 1,
|
|
914
|
+
description: "Nucleus sampling: considers tokens with top_p probability mass"
|
|
915
|
+
},
|
|
916
|
+
{
|
|
917
|
+
displayName: "Frequency Penalty",
|
|
918
|
+
name: "frequencyPenalty",
|
|
919
|
+
type: "number",
|
|
920
|
+
typeOptions: {
|
|
921
|
+
minValue: -2,
|
|
922
|
+
maxValue: 2,
|
|
923
|
+
numberStepSize: 0.1
|
|
924
|
+
},
|
|
925
|
+
default: 0,
|
|
926
|
+
description: "Penalizes new tokens based on frequency in text so far"
|
|
927
|
+
},
|
|
928
|
+
{
|
|
929
|
+
displayName: "Presence Penalty",
|
|
930
|
+
name: "presencePenalty",
|
|
931
|
+
type: "number",
|
|
932
|
+
typeOptions: {
|
|
933
|
+
minValue: -2,
|
|
934
|
+
maxValue: 2,
|
|
935
|
+
numberStepSize: 0.1
|
|
936
|
+
},
|
|
937
|
+
default: 0,
|
|
938
|
+
description: "Penalizes new tokens based on presence in text so far"
|
|
939
|
+
},
|
|
940
|
+
{
|
|
941
|
+
displayName: "Timeout",
|
|
942
|
+
name: "timeout",
|
|
943
|
+
type: "number",
|
|
944
|
+
default: 6e4,
|
|
945
|
+
description: "Request timeout in milliseconds"
|
|
328
946
|
}
|
|
947
|
+
]
|
|
329
948
|
}
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
949
|
+
]
|
|
950
|
+
};
|
|
951
|
+
}
|
|
952
|
+
static {
|
|
953
|
+
__name(this, "AgnicAILanguageModel");
|
|
954
|
+
}
|
|
955
|
+
async supplyData(itemIndex) {
|
|
956
|
+
const authentication = this.getNodeParameter(
|
|
957
|
+
"authentication",
|
|
958
|
+
itemIndex
|
|
959
|
+
);
|
|
960
|
+
let apiKey;
|
|
961
|
+
try {
|
|
962
|
+
if (authentication === "oAuth2") {
|
|
963
|
+
const credentials = await this.getCredentials(
|
|
964
|
+
"agnicWalletOAuth2Api",
|
|
965
|
+
itemIndex
|
|
966
|
+
);
|
|
967
|
+
apiKey = credentials.oauthTokenData?.access_token;
|
|
968
|
+
if (!apiKey) {
|
|
969
|
+
throw new Error(
|
|
970
|
+
"OAuth2 access token not found. Please reconnect your AgnicWallet account."
|
|
971
|
+
);
|
|
333
972
|
}
|
|
334
|
-
|
|
335
|
-
const
|
|
336
|
-
|
|
337
|
-
|
|
973
|
+
} else {
|
|
974
|
+
const credentials = await this.getCredentials(
|
|
975
|
+
"agnicWalletApi",
|
|
976
|
+
itemIndex
|
|
977
|
+
);
|
|
978
|
+
apiKey = credentials.apiToken;
|
|
979
|
+
if (!apiKey) {
|
|
980
|
+
throw new Error(
|
|
981
|
+
"API Key not found. Please configure your AgnicWallet API credentials."
|
|
982
|
+
);
|
|
338
983
|
}
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
},
|
|
356
|
-
// Add our custom tracing callback for spinning indicator and AI Agent logging
|
|
357
|
-
callbacks: [new AgnicLlmTracing(this)],
|
|
358
|
-
});
|
|
359
|
-
// Return in the same format as n8n's built-in OpenAI Chat Model
|
|
360
|
-
return {
|
|
361
|
-
response: chatModel,
|
|
362
|
-
};
|
|
984
|
+
}
|
|
985
|
+
} catch (error) {
|
|
986
|
+
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
987
|
+
throw new import_n8n_workflow.NodeOperationError(
|
|
988
|
+
this.getNode(),
|
|
989
|
+
`Authentication failed: ${errorMsg}`,
|
|
990
|
+
{ itemIndex }
|
|
991
|
+
);
|
|
992
|
+
}
|
|
993
|
+
const model = this.getNodeParameter("model", itemIndex);
|
|
994
|
+
if (!model?.trim()) {
|
|
995
|
+
throw new import_n8n_workflow.NodeOperationError(
|
|
996
|
+
this.getNode(),
|
|
997
|
+
"Model must be specified. Select from dropdown or enter a custom OpenRouter model ID.",
|
|
998
|
+
{ itemIndex }
|
|
999
|
+
);
|
|
363
1000
|
}
|
|
364
|
-
}
|
|
365
|
-
|
|
1001
|
+
const options = this.getNodeParameter("options", itemIndex, {});
|
|
1002
|
+
const chatModel = new import_openai.ChatOpenAI({
|
|
1003
|
+
apiKey,
|
|
1004
|
+
model: model.trim(),
|
|
1005
|
+
temperature: options.temperature,
|
|
1006
|
+
maxTokens: options.maxTokens,
|
|
1007
|
+
topP: options.topP,
|
|
1008
|
+
frequencyPenalty: options.frequencyPenalty,
|
|
1009
|
+
presencePenalty: options.presencePenalty,
|
|
1010
|
+
timeout: options.timeout ?? 6e4,
|
|
1011
|
+
maxRetries: 2,
|
|
1012
|
+
configuration: {
|
|
1013
|
+
baseURL: "https://api.agnicpay.xyz/v1"
|
|
1014
|
+
},
|
|
1015
|
+
// Add our custom tracing callback for spinning indicator and AI Agent logging
|
|
1016
|
+
callbacks: [new AgnicLlmTracing(this)]
|
|
1017
|
+
});
|
|
1018
|
+
return {
|
|
1019
|
+
response: chatModel
|
|
1020
|
+
};
|
|
1021
|
+
}
|
|
1022
|
+
};
|
|
1023
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
1024
|
+
0 && (module.exports = {
|
|
1025
|
+
AgnicAILanguageModel
|
|
1026
|
+
});
|