n8n-nodes-agnicwallet 1.0.6 → 1.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/nodes/AgnicAI/AgnicAI.node.d.ts +5 -0
- package/dist/nodes/AgnicAI/AgnicAI.node.js +422 -0
- package/dist/nodes/AgnicAI/AgnicAI.png +0 -0
- package/dist/nodes/AgnicAILanguageModel/AgnicAILanguageModel.node.d.ts +12 -0
- package/dist/nodes/AgnicAILanguageModel/AgnicAILanguageModel.node.js +365 -0
- package/dist/nodes/AgnicAILanguageModel/AgnicAILanguageModel.png +0 -0
- package/dist/nodes/AgnicAILanguageModel/AgnicAILanguageModel.svg +6 -0
- package/dist/nodes/AgnicMCPTool/AgnicMCPTool.node.d.ts +22 -0
- package/dist/nodes/AgnicMCPTool/AgnicMCPTool.node.js +368 -0
- package/dist/nodes/AgnicMCPTool/AgnicMCPTool.png +0 -0
- package/dist/nodes/X402HttpRequest/X402HttpRequest.node.js +1 -0
- package/dist/nodes/X402HttpRequest/X402HttpRequest.png +0 -0
- package/dist/nodes/X402HttpRequest/X402HttpRequest.svg +19 -0
- package/package.json +19 -3
|
@@ -0,0 +1,422 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.AgnicAI = void 0;
|
|
4
|
+
const n8n_workflow_1 = require("n8n-workflow");
|
|
5
|
+
class AgnicAI {
|
|
6
|
+
constructor() {
|
|
7
|
+
this.description = {
|
|
8
|
+
displayName: "AgnicAI",
|
|
9
|
+
name: "agnicAI",
|
|
10
|
+
group: ["transform"],
|
|
11
|
+
version: 1.0,
|
|
12
|
+
description: "Access various language models through AgnicPay AI Gateway with X402 payment support. Use this node in regular workflows to call AI models.",
|
|
13
|
+
defaults: {
|
|
14
|
+
name: "AgnicAI",
|
|
15
|
+
},
|
|
16
|
+
icon: "file:AgnicAI.png",
|
|
17
|
+
inputs: [n8n_workflow_1.NodeConnectionTypes.Main],
|
|
18
|
+
outputs: [n8n_workflow_1.NodeConnectionTypes.Main],
|
|
19
|
+
credentials: [
|
|
20
|
+
{
|
|
21
|
+
name: "agnicWalletOAuth2Api",
|
|
22
|
+
required: false,
|
|
23
|
+
displayOptions: {
|
|
24
|
+
show: {
|
|
25
|
+
authentication: ["oAuth2"],
|
|
26
|
+
},
|
|
27
|
+
},
|
|
28
|
+
},
|
|
29
|
+
{
|
|
30
|
+
name: "agnicWalletApi",
|
|
31
|
+
required: false,
|
|
32
|
+
displayOptions: {
|
|
33
|
+
show: {
|
|
34
|
+
authentication: ["apiKey"],
|
|
35
|
+
},
|
|
36
|
+
},
|
|
37
|
+
},
|
|
38
|
+
],
|
|
39
|
+
properties: [
|
|
40
|
+
{
|
|
41
|
+
displayName: "Authentication",
|
|
42
|
+
name: "authentication",
|
|
43
|
+
type: "options",
|
|
44
|
+
options: [
|
|
45
|
+
{
|
|
46
|
+
name: "OAuth2",
|
|
47
|
+
value: "oAuth2",
|
|
48
|
+
description: "Recommended: Connect your account",
|
|
49
|
+
},
|
|
50
|
+
{
|
|
51
|
+
name: "API Key",
|
|
52
|
+
value: "apiKey",
|
|
53
|
+
description: "For CI/CD or programmatic access",
|
|
54
|
+
},
|
|
55
|
+
],
|
|
56
|
+
default: "oAuth2",
|
|
57
|
+
description: "How to authenticate with AgnicWallet",
|
|
58
|
+
},
|
|
59
|
+
{
|
|
60
|
+
displayName: "Model",
|
|
61
|
+
name: "model",
|
|
62
|
+
type: "options",
|
|
63
|
+
typeOptions: {
|
|
64
|
+
allowCustomValues: true,
|
|
65
|
+
},
|
|
66
|
+
options: [
|
|
67
|
+
{
|
|
68
|
+
name: "Simple (GPT-4o Mini)",
|
|
69
|
+
value: "openai/gpt-4o-mini",
|
|
70
|
+
description: "Simple and fast default option - GPT-4o Mini",
|
|
71
|
+
},
|
|
72
|
+
{
|
|
73
|
+
name: "OpenAI - GPT-4o",
|
|
74
|
+
value: "openai/gpt-4o",
|
|
75
|
+
description: "OpenAI's latest GPT-4o model",
|
|
76
|
+
},
|
|
77
|
+
{
|
|
78
|
+
name: "OpenAI - GPT-4o Mini",
|
|
79
|
+
value: "openai/gpt-4o-mini",
|
|
80
|
+
description: "Fast and efficient GPT-4o Mini",
|
|
81
|
+
},
|
|
82
|
+
{
|
|
83
|
+
name: "OpenAI - GPT-4 Turbo",
|
|
84
|
+
value: "openai/gpt-4-turbo",
|
|
85
|
+
description: "GPT-4 Turbo with extended context",
|
|
86
|
+
},
|
|
87
|
+
{
|
|
88
|
+
name: "OpenAI - GPT-4",
|
|
89
|
+
value: "openai/gpt-4",
|
|
90
|
+
description: "OpenAI GPT-4",
|
|
91
|
+
},
|
|
92
|
+
{
|
|
93
|
+
name: "OpenAI - GPT-3.5 Turbo",
|
|
94
|
+
value: "openai/gpt-3.5-turbo",
|
|
95
|
+
description: "Fast GPT-3.5 Turbo model",
|
|
96
|
+
},
|
|
97
|
+
{
|
|
98
|
+
name: "Anthropic - Claude 3.5 Sonnet",
|
|
99
|
+
value: "anthropic/claude-3.5-sonnet",
|
|
100
|
+
description: "Anthropic's Claude 3.5 Sonnet",
|
|
101
|
+
},
|
|
102
|
+
{
|
|
103
|
+
name: "Anthropic - Claude 3 Opus",
|
|
104
|
+
value: "anthropic/claude-3-opus",
|
|
105
|
+
description: "Anthropic's Claude 3 Opus",
|
|
106
|
+
},
|
|
107
|
+
{
|
|
108
|
+
name: "Anthropic - Claude 3 Sonnet",
|
|
109
|
+
value: "anthropic/claude-3-sonnet",
|
|
110
|
+
description: "Anthropic's Claude 3 Sonnet",
|
|
111
|
+
},
|
|
112
|
+
{
|
|
113
|
+
name: "Anthropic - Claude 3 Haiku",
|
|
114
|
+
value: "anthropic/claude-3-haiku",
|
|
115
|
+
description: "Fast Claude 3 Haiku model",
|
|
116
|
+
},
|
|
117
|
+
{
|
|
118
|
+
name: "Google - Gemini Pro 1.5",
|
|
119
|
+
value: "google/gemini-pro-1.5",
|
|
120
|
+
description: "Google Gemini Pro 1.5",
|
|
121
|
+
},
|
|
122
|
+
{
|
|
123
|
+
name: "Google - Gemini Pro",
|
|
124
|
+
value: "google/gemini-pro",
|
|
125
|
+
description: "Google Gemini Pro",
|
|
126
|
+
},
|
|
127
|
+
{
|
|
128
|
+
name: "Google - Gemini Flash 1.5",
|
|
129
|
+
value: "google/gemini-flash-1.5",
|
|
130
|
+
description: "Fast Gemini Flash 1.5",
|
|
131
|
+
},
|
|
132
|
+
{
|
|
133
|
+
name: "Meta - Llama 3.1 405B",
|
|
134
|
+
value: "meta-llama/llama-3.1-405b-instruct",
|
|
135
|
+
description: "Meta Llama 3.1 405B Instruct",
|
|
136
|
+
},
|
|
137
|
+
{
|
|
138
|
+
name: "Meta - Llama 3.1 70B",
|
|
139
|
+
value: "meta-llama/llama-3.1-70b-instruct",
|
|
140
|
+
description: "Meta Llama 3.1 70B Instruct",
|
|
141
|
+
},
|
|
142
|
+
{
|
|
143
|
+
name: "Meta - Llama 3 70B",
|
|
144
|
+
value: "meta-llama/llama-3-70b-instruct",
|
|
145
|
+
description: "Meta Llama 3 70B Instruct",
|
|
146
|
+
},
|
|
147
|
+
{
|
|
148
|
+
name: "Mistral AI - Mistral Large",
|
|
149
|
+
value: "mistralai/mistral-large",
|
|
150
|
+
description: "Mistral AI Large model",
|
|
151
|
+
},
|
|
152
|
+
{
|
|
153
|
+
name: "Mistral AI - Mixtral 8x7B",
|
|
154
|
+
value: "mistralai/mixtral-8x7b-instruct",
|
|
155
|
+
description: "Mistral Mixtral 8x7B Instruct",
|
|
156
|
+
},
|
|
157
|
+
{
|
|
158
|
+
name: "Mistral AI - Mistral 7B",
|
|
159
|
+
value: "mistralai/mistral-7b-instruct",
|
|
160
|
+
description: "Mistral 7B Instruct",
|
|
161
|
+
},
|
|
162
|
+
{
|
|
163
|
+
name: "Cohere - Command R+",
|
|
164
|
+
value: "cohere/command-r-plus",
|
|
165
|
+
description: "Cohere Command R+",
|
|
166
|
+
},
|
|
167
|
+
{
|
|
168
|
+
name: "Perplexity - Sonar",
|
|
169
|
+
value: "perplexity/sonar",
|
|
170
|
+
description: "Perplexity Sonar model",
|
|
171
|
+
},
|
|
172
|
+
{
|
|
173
|
+
name: "xAI - Grok Beta",
|
|
174
|
+
value: "x-ai/grok-beta",
|
|
175
|
+
description: "xAI Grok Beta",
|
|
176
|
+
},
|
|
177
|
+
],
|
|
178
|
+
default: "openai/gpt-4o-mini",
|
|
179
|
+
description: "Select a model or type any OpenRouter model ID. See https://openrouter.ai/models for all available models. Examples: 'openai/gpt-4o', 'anthropic/claude-3.5-sonnet', 'google/gemini-pro-1.5'",
|
|
180
|
+
},
|
|
181
|
+
{
|
|
182
|
+
displayName: "Messages",
|
|
183
|
+
name: "messages",
|
|
184
|
+
type: "fixedCollection",
|
|
185
|
+
typeOptions: {
|
|
186
|
+
multipleValues: true,
|
|
187
|
+
},
|
|
188
|
+
default: {},
|
|
189
|
+
options: [
|
|
190
|
+
{
|
|
191
|
+
name: "message",
|
|
192
|
+
displayName: "Message",
|
|
193
|
+
values: [
|
|
194
|
+
{
|
|
195
|
+
displayName: "Role",
|
|
196
|
+
name: "role",
|
|
197
|
+
type: "options",
|
|
198
|
+
options: [
|
|
199
|
+
{
|
|
200
|
+
name: "System",
|
|
201
|
+
value: "system",
|
|
202
|
+
description: "System message to set behavior",
|
|
203
|
+
},
|
|
204
|
+
{
|
|
205
|
+
name: "User",
|
|
206
|
+
value: "user",
|
|
207
|
+
description: "User message",
|
|
208
|
+
},
|
|
209
|
+
{
|
|
210
|
+
name: "Assistant",
|
|
211
|
+
value: "assistant",
|
|
212
|
+
description: "Assistant message (for conversation history)",
|
|
213
|
+
},
|
|
214
|
+
],
|
|
215
|
+
default: "user",
|
|
216
|
+
description: "The role of the message",
|
|
217
|
+
},
|
|
218
|
+
{
|
|
219
|
+
displayName: "Content",
|
|
220
|
+
name: "content",
|
|
221
|
+
type: "string",
|
|
222
|
+
default: "",
|
|
223
|
+
typeOptions: {
|
|
224
|
+
rows: 4,
|
|
225
|
+
},
|
|
226
|
+
description: "The content of the message",
|
|
227
|
+
},
|
|
228
|
+
],
|
|
229
|
+
},
|
|
230
|
+
],
|
|
231
|
+
description: "The messages to send to the model",
|
|
232
|
+
},
|
|
233
|
+
{
|
|
234
|
+
displayName: "Options",
|
|
235
|
+
name: "options",
|
|
236
|
+
type: "collection",
|
|
237
|
+
placeholder: "Add Option",
|
|
238
|
+
default: {},
|
|
239
|
+
options: [
|
|
240
|
+
{
|
|
241
|
+
displayName: "Temperature",
|
|
242
|
+
name: "temperature",
|
|
243
|
+
type: "number",
|
|
244
|
+
typeOptions: {
|
|
245
|
+
minValue: 0,
|
|
246
|
+
maxValue: 2,
|
|
247
|
+
numberStepSize: 0.1,
|
|
248
|
+
},
|
|
249
|
+
default: 1,
|
|
250
|
+
description: "Controls randomness. Lower values make output more deterministic",
|
|
251
|
+
},
|
|
252
|
+
{
|
|
253
|
+
displayName: "Max Tokens",
|
|
254
|
+
name: "max_tokens",
|
|
255
|
+
type: "number",
|
|
256
|
+
typeOptions: {
|
|
257
|
+
minValue: 1,
|
|
258
|
+
},
|
|
259
|
+
default: 2048,
|
|
260
|
+
description: "Maximum number of tokens to generate",
|
|
261
|
+
},
|
|
262
|
+
{
|
|
263
|
+
displayName: "Top P",
|
|
264
|
+
name: "top_p",
|
|
265
|
+
type: "number",
|
|
266
|
+
typeOptions: {
|
|
267
|
+
minValue: 0,
|
|
268
|
+
maxValue: 1,
|
|
269
|
+
numberStepSize: 0.1,
|
|
270
|
+
},
|
|
271
|
+
default: 1,
|
|
272
|
+
description: "Nucleus sampling: consider tokens with top_p probability mass",
|
|
273
|
+
},
|
|
274
|
+
{
|
|
275
|
+
displayName: "Frequency Penalty",
|
|
276
|
+
name: "frequency_penalty",
|
|
277
|
+
type: "number",
|
|
278
|
+
typeOptions: {
|
|
279
|
+
minValue: -2,
|
|
280
|
+
maxValue: 2,
|
|
281
|
+
numberStepSize: 0.1,
|
|
282
|
+
},
|
|
283
|
+
default: 0,
|
|
284
|
+
description: "Penalize tokens based on their frequency in the text so far",
|
|
285
|
+
},
|
|
286
|
+
{
|
|
287
|
+
displayName: "Presence Penalty",
|
|
288
|
+
name: "presence_penalty",
|
|
289
|
+
type: "number",
|
|
290
|
+
typeOptions: {
|
|
291
|
+
minValue: -2,
|
|
292
|
+
maxValue: 2,
|
|
293
|
+
numberStepSize: 0.1,
|
|
294
|
+
},
|
|
295
|
+
default: 0,
|
|
296
|
+
description: "Penalize tokens based on whether they appear in the text so far",
|
|
297
|
+
},
|
|
298
|
+
],
|
|
299
|
+
description: "Additional options for the chat completion",
|
|
300
|
+
},
|
|
301
|
+
],
|
|
302
|
+
};
|
|
303
|
+
}
|
|
304
|
+
async execute() {
|
|
305
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l;
|
|
306
|
+
const items = this.getInputData();
|
|
307
|
+
const returnData = [];
|
|
308
|
+
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
|
309
|
+
try {
|
|
310
|
+
// Get authentication type
|
|
311
|
+
const authentication = this.getNodeParameter("authentication", itemIndex);
|
|
312
|
+
// Get authentication header
|
|
313
|
+
let authHeader;
|
|
314
|
+
if (authentication === "oAuth2") {
|
|
315
|
+
// OAuth2 authentication
|
|
316
|
+
const credentials = (await this.getCredentials("agnicWalletOAuth2Api", itemIndex));
|
|
317
|
+
authHeader = `Bearer ${String(credentials.oauthTokenData.access_token)}`;
|
|
318
|
+
}
|
|
319
|
+
else {
|
|
320
|
+
// API Key authentication
|
|
321
|
+
const credentials = await this.getCredentials("agnicWalletApi", itemIndex);
|
|
322
|
+
const { apiToken } = credentials;
|
|
323
|
+
authHeader = `Bearer ${String(apiToken)}`;
|
|
324
|
+
}
|
|
325
|
+
// Get model parameter (supports both dropdown selection and custom input)
|
|
326
|
+
const modelParam = this.getNodeParameter("model", itemIndex);
|
|
327
|
+
const model = modelParam === null || modelParam === void 0 ? void 0 : modelParam.trim();
|
|
328
|
+
if (!model || model === "") {
|
|
329
|
+
throw new n8n_workflow_1.NodeOperationError(this.getNode(), "Model must be specified. Enter an OpenRouter model ID (e.g., 'openai/gpt-4o' or 'anthropic/claude-3.5-sonnet'). See https://openrouter.ai/models for all available models.", { itemIndex });
|
|
330
|
+
}
|
|
331
|
+
// Get messages
|
|
332
|
+
const messagesConfig = this.getNodeParameter("messages", itemIndex, {});
|
|
333
|
+
if (!messagesConfig.message || messagesConfig.message.length === 0) {
|
|
334
|
+
throw new n8n_workflow_1.NodeOperationError(this.getNode(), "At least one message is required", { itemIndex });
|
|
335
|
+
}
|
|
336
|
+
const messages = messagesConfig.message.map((msg) => ({
|
|
337
|
+
role: msg.role,
|
|
338
|
+
content: msg.content,
|
|
339
|
+
}));
|
|
340
|
+
// Get options
|
|
341
|
+
const options = this.getNodeParameter("options", itemIndex, {});
|
|
342
|
+
// Build request body
|
|
343
|
+
const requestBody = {
|
|
344
|
+
model: model.trim(),
|
|
345
|
+
messages,
|
|
346
|
+
};
|
|
347
|
+
if (options.temperature !== undefined) {
|
|
348
|
+
requestBody.temperature = options.temperature;
|
|
349
|
+
}
|
|
350
|
+
if (options.max_tokens !== undefined) {
|
|
351
|
+
requestBody.max_tokens = options.max_tokens;
|
|
352
|
+
}
|
|
353
|
+
if (options.top_p !== undefined) {
|
|
354
|
+
requestBody.top_p = options.top_p;
|
|
355
|
+
}
|
|
356
|
+
if (options.frequency_penalty !== undefined) {
|
|
357
|
+
requestBody.frequency_penalty = options.frequency_penalty;
|
|
358
|
+
}
|
|
359
|
+
if (options.presence_penalty !== undefined) {
|
|
360
|
+
requestBody.presence_penalty = options.presence_penalty;
|
|
361
|
+
}
|
|
362
|
+
// Make request to AgnicPay AI Gateway
|
|
363
|
+
const apiUrl = "https://api.agnicpay.xyz/v1/chat/completions";
|
|
364
|
+
(_a = this.logger) === null || _a === void 0 ? void 0 : _a.info(`[AgnicAI] Calling AgnicPay AI Gateway with model: ${model}`);
|
|
365
|
+
const response = await this.helpers.httpRequest({
|
|
366
|
+
method: "POST",
|
|
367
|
+
url: apiUrl,
|
|
368
|
+
headers: {
|
|
369
|
+
"Content-Type": "application/json",
|
|
370
|
+
Authorization: authHeader,
|
|
371
|
+
},
|
|
372
|
+
body: requestBody,
|
|
373
|
+
json: true,
|
|
374
|
+
});
|
|
375
|
+
// Format response
|
|
376
|
+
const formattedResponse = {
|
|
377
|
+
...response,
|
|
378
|
+
content: ((_d = (_c = (_b = response.choices) === null || _b === void 0 ? void 0 : _b[0]) === null || _c === void 0 ? void 0 : _c.message) === null || _d === void 0 ? void 0 : _d.content) ||
|
|
379
|
+
((_f = (_e = response.choices) === null || _e === void 0 ? void 0 : _e[0]) === null || _f === void 0 ? void 0 : _f.text) ||
|
|
380
|
+
response.content,
|
|
381
|
+
role: ((_j = (_h = (_g = response.choices) === null || _g === void 0 ? void 0 : _g[0]) === null || _h === void 0 ? void 0 : _h.message) === null || _j === void 0 ? void 0 : _j.role) || "assistant",
|
|
382
|
+
};
|
|
383
|
+
returnData.push({
|
|
384
|
+
json: formattedResponse,
|
|
385
|
+
pairedItem: {
|
|
386
|
+
item: itemIndex,
|
|
387
|
+
},
|
|
388
|
+
});
|
|
389
|
+
}
|
|
390
|
+
catch (error) {
|
|
391
|
+
const errorMessage = error instanceof Error ? error.message : "Unknown error occurred";
|
|
392
|
+
// Extract more detailed error information if available
|
|
393
|
+
let detailedError = errorMessage;
|
|
394
|
+
if (error && typeof error === "object" && "response" in error) {
|
|
395
|
+
const responseError = error;
|
|
396
|
+
if ((_k = responseError.response) === null || _k === void 0 ? void 0 : _k.body) {
|
|
397
|
+
detailedError = JSON.stringify(responseError.response.body);
|
|
398
|
+
}
|
|
399
|
+
else if ((_l = responseError.response) === null || _l === void 0 ? void 0 : _l.statusCode) {
|
|
400
|
+
detailedError = `HTTP ${responseError.response.statusCode}: ${errorMessage}`;
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
if (this.continueOnFail()) {
|
|
404
|
+
returnData.push({
|
|
405
|
+
json: {
|
|
406
|
+
error: detailedError,
|
|
407
|
+
},
|
|
408
|
+
pairedItem: {
|
|
409
|
+
item: itemIndex,
|
|
410
|
+
},
|
|
411
|
+
});
|
|
412
|
+
continue;
|
|
413
|
+
}
|
|
414
|
+
throw new n8n_workflow_1.NodeOperationError(this.getNode(), detailedError, {
|
|
415
|
+
itemIndex,
|
|
416
|
+
});
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
return [returnData];
|
|
420
|
+
}
|
|
421
|
+
}
|
|
422
|
+
exports.AgnicAI = AgnicAI;
|
|
Binary file
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { INodeType, INodeTypeDescription, ISupplyDataFunctions, SupplyData } from "n8n-workflow";
|
|
2
|
+
/**
|
|
3
|
+
* AgnicAI Chat Model Node for n8n
|
|
4
|
+
*
|
|
5
|
+
* Uses LangChain's ChatOpenAI class with AgnicPay's OpenAI-compatible endpoint.
|
|
6
|
+
* This approach is identical to how n8n's built-in OpenAI Chat Model works,
|
|
7
|
+
* just pointing to AgnicPay's AI Gateway instead.
|
|
8
|
+
*/
|
|
9
|
+
export declare class AgnicAILanguageModel implements INodeType {
|
|
10
|
+
description: INodeTypeDescription;
|
|
11
|
+
supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData>;
|
|
12
|
+
}
|
|
@@ -0,0 +1,365 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.AgnicAILanguageModel = void 0;
|
|
4
|
+
const n8n_workflow_1 = require("n8n-workflow");
|
|
5
|
+
const openai_1 = require("@langchain/openai");
|
|
6
|
+
const base_1 = require("@langchain/core/callbacks/base");
|
|
7
|
+
/**
|
|
8
|
+
* Custom LLM Tracing callback for AgnicAI
|
|
9
|
+
* This enables the spinning indicator and AI Agent logging
|
|
10
|
+
* Mirrors n8n's internal N8nLlmTracing implementation
|
|
11
|
+
*/
|
|
12
|
+
class AgnicLlmTracing extends base_1.BaseCallbackHandler {
|
|
13
|
+
constructor(executionFunctions) {
|
|
14
|
+
super();
|
|
15
|
+
this.name = "AgnicLlmTracing";
|
|
16
|
+
// This flag makes LangChain wait for handlers before continuing
|
|
17
|
+
this.awaitHandlers = true;
|
|
18
|
+
this.connectionType = n8n_workflow_1.NodeConnectionTypes.AiLanguageModel;
|
|
19
|
+
this.runsMap = {};
|
|
20
|
+
this.executionFunctions = executionFunctions;
|
|
21
|
+
}
|
|
22
|
+
async handleLLMStart(llm, prompts, runId) {
|
|
23
|
+
const options = llm.kwargs || llm;
|
|
24
|
+
// Add input data to n8n's execution context
|
|
25
|
+
// This triggers the spinning indicator
|
|
26
|
+
const { index } = this.executionFunctions.addInputData(this.connectionType, [[{ json: { messages: prompts, options } }]]);
|
|
27
|
+
this.runsMap[runId] = {
|
|
28
|
+
index,
|
|
29
|
+
options,
|
|
30
|
+
messages: prompts,
|
|
31
|
+
};
|
|
32
|
+
// Log AI event for the AI Agent's log panel
|
|
33
|
+
this.logAiEvent("ai-llm-generated-output-started", {
|
|
34
|
+
messages: prompts,
|
|
35
|
+
options,
|
|
36
|
+
});
|
|
37
|
+
}
|
|
38
|
+
async handleLLMEnd(output, runId) {
|
|
39
|
+
var _a;
|
|
40
|
+
const runDetails = (_a = this.runsMap[runId]) !== null && _a !== void 0 ? _a : { index: 0 };
|
|
41
|
+
// Parse the response
|
|
42
|
+
const generations = output.generations.map((gen) => gen.map((g) => ({ text: g.text, generationInfo: g.generationInfo })));
|
|
43
|
+
const response = {
|
|
44
|
+
generations,
|
|
45
|
+
llmOutput: output.llmOutput,
|
|
46
|
+
};
|
|
47
|
+
// Add output data to n8n's execution context
|
|
48
|
+
// This stops the spinning indicator and shows success
|
|
49
|
+
this.executionFunctions.addOutputData(this.connectionType, runDetails.index, [[{ json: response }]]);
|
|
50
|
+
// Log AI event for the AI Agent's log panel
|
|
51
|
+
this.logAiEvent("ai-llm-generated-output", {
|
|
52
|
+
messages: runDetails.messages,
|
|
53
|
+
options: runDetails.options,
|
|
54
|
+
response,
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
async handleLLMError(error, runId) {
|
|
58
|
+
var _a;
|
|
59
|
+
const runDetails = (_a = this.runsMap[runId]) !== null && _a !== void 0 ? _a : { index: 0 };
|
|
60
|
+
// Add error output
|
|
61
|
+
this.executionFunctions.addOutputData(this.connectionType, runDetails.index, new n8n_workflow_1.NodeOperationError(this.executionFunctions.getNode(), error, {
|
|
62
|
+
functionality: "configuration-node",
|
|
63
|
+
}));
|
|
64
|
+
// Log AI error event
|
|
65
|
+
this.logAiEvent("ai-llm-errored", {
|
|
66
|
+
error: error.message || String(error),
|
|
67
|
+
runId,
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
logAiEvent(event, data) {
|
|
71
|
+
var _a, _b;
|
|
72
|
+
try {
|
|
73
|
+
(_b = (_a = this.executionFunctions).logAiEvent) === null || _b === void 0 ? void 0 : _b.call(_a, event, data ? (0, n8n_workflow_1.jsonStringify)(data) : undefined);
|
|
74
|
+
}
|
|
75
|
+
catch {
|
|
76
|
+
// Silently ignore if logAiEvent is not available
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* AgnicAI Chat Model Node for n8n
|
|
82
|
+
*
|
|
83
|
+
* Uses LangChain's ChatOpenAI class with AgnicPay's OpenAI-compatible endpoint.
|
|
84
|
+
* This approach is identical to how n8n's built-in OpenAI Chat Model works,
|
|
85
|
+
* just pointing to AgnicPay's AI Gateway instead.
|
|
86
|
+
*/
|
|
87
|
+
class AgnicAILanguageModel {
|
|
88
|
+
constructor() {
|
|
89
|
+
this.description = {
|
|
90
|
+
displayName: "AgnicAI Chat Model",
|
|
91
|
+
name: "lmChatAgnicAI",
|
|
92
|
+
icon: "file:AgnicAILanguageModel.png",
|
|
93
|
+
group: ["transform"],
|
|
94
|
+
version: [1, 1.1],
|
|
95
|
+
description: "Chat model using AgnicPay AI Gateway with X402 payment support",
|
|
96
|
+
defaults: {
|
|
97
|
+
name: "AgnicAI Chat Model",
|
|
98
|
+
},
|
|
99
|
+
codex: {
|
|
100
|
+
categories: ["AI"],
|
|
101
|
+
subcategories: {
|
|
102
|
+
AI: ["Language Models", "Root Nodes"],
|
|
103
|
+
"Language Models": ["Chat Models (Recommended)"],
|
|
104
|
+
},
|
|
105
|
+
resources: {
|
|
106
|
+
primaryDocumentation: [
|
|
107
|
+
{
|
|
108
|
+
url: "https://www.agnicpay.xyz/ai-gateway",
|
|
109
|
+
},
|
|
110
|
+
],
|
|
111
|
+
},
|
|
112
|
+
},
|
|
113
|
+
inputs: [],
|
|
114
|
+
outputs: [n8n_workflow_1.NodeConnectionTypes.AiLanguageModel],
|
|
115
|
+
outputNames: ["Model"],
|
|
116
|
+
credentials: [
|
|
117
|
+
{
|
|
118
|
+
name: "agnicWalletOAuth2Api",
|
|
119
|
+
required: false,
|
|
120
|
+
displayOptions: {
|
|
121
|
+
show: {
|
|
122
|
+
authentication: ["oAuth2"],
|
|
123
|
+
},
|
|
124
|
+
},
|
|
125
|
+
},
|
|
126
|
+
{
|
|
127
|
+
name: "agnicWalletApi",
|
|
128
|
+
required: false,
|
|
129
|
+
displayOptions: {
|
|
130
|
+
show: {
|
|
131
|
+
authentication: ["apiKey"],
|
|
132
|
+
},
|
|
133
|
+
},
|
|
134
|
+
},
|
|
135
|
+
],
|
|
136
|
+
properties: [
|
|
137
|
+
{
|
|
138
|
+
displayName: "Authentication",
|
|
139
|
+
name: "authentication",
|
|
140
|
+
type: "options",
|
|
141
|
+
options: [
|
|
142
|
+
{
|
|
143
|
+
name: "OAuth2",
|
|
144
|
+
value: "oAuth2",
|
|
145
|
+
description: "Recommended: Connect your account",
|
|
146
|
+
},
|
|
147
|
+
{
|
|
148
|
+
name: "API Key",
|
|
149
|
+
value: "apiKey",
|
|
150
|
+
description: "For CI/CD or programmatic access",
|
|
151
|
+
},
|
|
152
|
+
],
|
|
153
|
+
default: "apiKey",
|
|
154
|
+
description: "How to authenticate with AgnicWallet",
|
|
155
|
+
},
|
|
156
|
+
{
|
|
157
|
+
displayName: "Model",
|
|
158
|
+
name: "model",
|
|
159
|
+
type: "options",
|
|
160
|
+
typeOptions: {
|
|
161
|
+
allowCustomValues: true,
|
|
162
|
+
},
|
|
163
|
+
options: [
|
|
164
|
+
{
|
|
165
|
+
name: "GPT-4o Mini (Fast & Affordable)",
|
|
166
|
+
value: "openai/gpt-4o-mini",
|
|
167
|
+
},
|
|
168
|
+
{
|
|
169
|
+
name: "GPT-4o (Best Quality)",
|
|
170
|
+
value: "openai/gpt-4o",
|
|
171
|
+
},
|
|
172
|
+
{
|
|
173
|
+
name: "GPT-4 Turbo",
|
|
174
|
+
value: "openai/gpt-4-turbo",
|
|
175
|
+
},
|
|
176
|
+
{
|
|
177
|
+
name: "GPT-3.5 Turbo",
|
|
178
|
+
value: "openai/gpt-3.5-turbo",
|
|
179
|
+
},
|
|
180
|
+
{
|
|
181
|
+
name: "Claude 3.5 Sonnet",
|
|
182
|
+
value: "anthropic/claude-3.5-sonnet",
|
|
183
|
+
},
|
|
184
|
+
{
|
|
185
|
+
name: "Claude 3 Opus",
|
|
186
|
+
value: "anthropic/claude-3-opus",
|
|
187
|
+
},
|
|
188
|
+
{
|
|
189
|
+
name: "Claude 3 Haiku",
|
|
190
|
+
value: "anthropic/claude-3-haiku",
|
|
191
|
+
},
|
|
192
|
+
{
|
|
193
|
+
name: "Gemini Pro 1.5",
|
|
194
|
+
value: "google/gemini-pro-1.5",
|
|
195
|
+
},
|
|
196
|
+
{
|
|
197
|
+
name: "Gemini Flash 1.5",
|
|
198
|
+
value: "google/gemini-flash-1.5",
|
|
199
|
+
},
|
|
200
|
+
{
|
|
201
|
+
name: "Llama 3.1 70B",
|
|
202
|
+
value: "meta-llama/llama-3.1-70b-instruct",
|
|
203
|
+
},
|
|
204
|
+
{
|
|
205
|
+
name: "Llama 3.1 8B",
|
|
206
|
+
value: "meta-llama/llama-3.1-8b-instruct",
|
|
207
|
+
},
|
|
208
|
+
{
|
|
209
|
+
name: "Mistral Large",
|
|
210
|
+
value: "mistralai/mistral-large",
|
|
211
|
+
},
|
|
212
|
+
{
|
|
213
|
+
name: "Mixtral 8x22B",
|
|
214
|
+
value: "mistralai/mixtral-8x22b-instruct",
|
|
215
|
+
},
|
|
216
|
+
{
|
|
217
|
+
name: "DeepSeek R1",
|
|
218
|
+
value: "deepseek/deepseek-r1",
|
|
219
|
+
},
|
|
220
|
+
{
|
|
221
|
+
name: "DeepSeek Chat",
|
|
222
|
+
value: "deepseek/deepseek-chat",
|
|
223
|
+
},
|
|
224
|
+
{
|
|
225
|
+
name: "Qwen 2.5 72B",
|
|
226
|
+
value: "qwen/qwen-2.5-72b-instruct",
|
|
227
|
+
},
|
|
228
|
+
],
|
|
229
|
+
default: "openai/gpt-4o-mini",
|
|
230
|
+
description: "Select a model or type a custom OpenRouter model ID. See https://openrouter.ai/models for all available models.",
|
|
231
|
+
},
|
|
232
|
+
{
|
|
233
|
+
displayName: "Options",
|
|
234
|
+
name: "options",
|
|
235
|
+
type: "collection",
|
|
236
|
+
placeholder: "Add Option",
|
|
237
|
+
default: {},
|
|
238
|
+
options: [
|
|
239
|
+
{
|
|
240
|
+
displayName: "Temperature",
|
|
241
|
+
name: "temperature",
|
|
242
|
+
type: "number",
|
|
243
|
+
typeOptions: {
|
|
244
|
+
minValue: 0,
|
|
245
|
+
maxValue: 2,
|
|
246
|
+
numberStepSize: 0.1,
|
|
247
|
+
},
|
|
248
|
+
default: 0.7,
|
|
249
|
+
description: "Controls randomness: Lower = more focused and deterministic",
|
|
250
|
+
},
|
|
251
|
+
{
|
|
252
|
+
displayName: "Max Tokens",
|
|
253
|
+
name: "maxTokens",
|
|
254
|
+
type: "number",
|
|
255
|
+
typeOptions: {
|
|
256
|
+
minValue: 1,
|
|
257
|
+
},
|
|
258
|
+
default: 2048,
|
|
259
|
+
description: "Maximum number of tokens to generate",
|
|
260
|
+
},
|
|
261
|
+
{
|
|
262
|
+
displayName: "Top P",
|
|
263
|
+
name: "topP",
|
|
264
|
+
type: "number",
|
|
265
|
+
typeOptions: {
|
|
266
|
+
minValue: 0,
|
|
267
|
+
maxValue: 1,
|
|
268
|
+
numberStepSize: 0.1,
|
|
269
|
+
},
|
|
270
|
+
default: 1,
|
|
271
|
+
description: "Nucleus sampling: considers tokens with top_p probability mass",
|
|
272
|
+
},
|
|
273
|
+
{
|
|
274
|
+
displayName: "Frequency Penalty",
|
|
275
|
+
name: "frequencyPenalty",
|
|
276
|
+
type: "number",
|
|
277
|
+
typeOptions: {
|
|
278
|
+
minValue: -2,
|
|
279
|
+
maxValue: 2,
|
|
280
|
+
numberStepSize: 0.1,
|
|
281
|
+
},
|
|
282
|
+
default: 0,
|
|
283
|
+
description: "Penalizes new tokens based on frequency in text so far",
|
|
284
|
+
},
|
|
285
|
+
{
|
|
286
|
+
displayName: "Presence Penalty",
|
|
287
|
+
name: "presencePenalty",
|
|
288
|
+
type: "number",
|
|
289
|
+
typeOptions: {
|
|
290
|
+
minValue: -2,
|
|
291
|
+
maxValue: 2,
|
|
292
|
+
numberStepSize: 0.1,
|
|
293
|
+
},
|
|
294
|
+
default: 0,
|
|
295
|
+
description: "Penalizes new tokens based on presence in text so far",
|
|
296
|
+
},
|
|
297
|
+
{
|
|
298
|
+
displayName: "Timeout",
|
|
299
|
+
name: "timeout",
|
|
300
|
+
type: "number",
|
|
301
|
+
default: 60000,
|
|
302
|
+
description: "Request timeout in milliseconds",
|
|
303
|
+
},
|
|
304
|
+
],
|
|
305
|
+
},
|
|
306
|
+
],
|
|
307
|
+
};
|
|
308
|
+
}
|
|
309
|
+
async supplyData(itemIndex) {
|
|
310
|
+
var _a, _b;
|
|
311
|
+
// Get authentication type and credentials
|
|
312
|
+
const authentication = this.getNodeParameter("authentication", itemIndex);
|
|
313
|
+
let apiKey;
|
|
314
|
+
try {
|
|
315
|
+
if (authentication === "oAuth2") {
|
|
316
|
+
const credentials = (await this.getCredentials("agnicWalletOAuth2Api", itemIndex));
|
|
317
|
+
apiKey = (_a = credentials.oauthTokenData) === null || _a === void 0 ? void 0 : _a.access_token;
|
|
318
|
+
if (!apiKey) {
|
|
319
|
+
throw new Error("OAuth2 access token not found. Please reconnect your AgnicWallet account.");
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
else {
|
|
323
|
+
const credentials = await this.getCredentials("agnicWalletApi", itemIndex);
|
|
324
|
+
apiKey = credentials.apiToken;
|
|
325
|
+
if (!apiKey) {
|
|
326
|
+
throw new Error("API Key not found. Please configure your AgnicWallet API credentials.");
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
catch (error) {
|
|
331
|
+
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
332
|
+
throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Authentication failed: ${errorMsg}`, { itemIndex });
|
|
333
|
+
}
|
|
334
|
+
// Get model parameter
|
|
335
|
+
const model = this.getNodeParameter("model", itemIndex);
|
|
336
|
+
if (!(model === null || model === void 0 ? void 0 : model.trim())) {
|
|
337
|
+
throw new n8n_workflow_1.NodeOperationError(this.getNode(), "Model must be specified. Select from dropdown or enter a custom OpenRouter model ID.", { itemIndex });
|
|
338
|
+
}
|
|
339
|
+
// Get options
|
|
340
|
+
const options = this.getNodeParameter("options", itemIndex, {});
|
|
341
|
+
// Create ChatOpenAI instance pointing to AgnicPay's endpoint
|
|
342
|
+
// Pass our custom tracing callback to enable spinning indicator and logging
|
|
343
|
+
const chatModel = new openai_1.ChatOpenAI({
|
|
344
|
+
apiKey,
|
|
345
|
+
model: model.trim(),
|
|
346
|
+
temperature: options.temperature,
|
|
347
|
+
maxTokens: options.maxTokens,
|
|
348
|
+
topP: options.topP,
|
|
349
|
+
frequencyPenalty: options.frequencyPenalty,
|
|
350
|
+
presencePenalty: options.presencePenalty,
|
|
351
|
+
timeout: (_b = options.timeout) !== null && _b !== void 0 ? _b : 60000,
|
|
352
|
+
maxRetries: 2,
|
|
353
|
+
configuration: {
|
|
354
|
+
baseURL: "https://api.agnicpay.xyz/v1",
|
|
355
|
+
},
|
|
356
|
+
// Add our custom tracing callback for spinning indicator and AI Agent logging
|
|
357
|
+
callbacks: [new AgnicLlmTracing(this)],
|
|
358
|
+
});
|
|
359
|
+
// Return in the same format as n8n's built-in OpenAI Chat Model
|
|
360
|
+
return {
|
|
361
|
+
response: chatModel,
|
|
362
|
+
};
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
exports.AgnicAILanguageModel = AgnicAILanguageModel;
|
|
Binary file
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 60 60">
|
|
2
|
+
<rect width="60" height="60" fill="none"/>
|
|
3
|
+
<path d="M30 10 L50 25 L50 35 L30 50 L10 35 L10 25 Z" fill="#6366f1" stroke="#4f46e5" stroke-width="2"/>
|
|
4
|
+
<circle cx="30" cy="30" r="8" fill="#ffffff"/>
|
|
5
|
+
<path d="M25 30 L28 33 L35 26" stroke="#6366f1" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" fill="none"/>
|
|
6
|
+
</svg>
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { INodeType, INodeTypeDescription, ISupplyDataFunctions, IExecuteFunctions, SupplyData, INodeExecutionData } from "n8n-workflow";
|
|
2
|
+
/**
|
|
3
|
+
* AgnicMCPTool - MCP Client for AgnicPay
|
|
4
|
+
*
|
|
5
|
+
* This is a supply-only AI tool node that connects to the AgnicPay MCP server
|
|
6
|
+
* and provides X402 payment tools to AI Agents via the MCP protocol.
|
|
7
|
+
*
|
|
8
|
+
* This node cannot be executed directly - it only supplies tools to AI Agents.
|
|
9
|
+
*/
|
|
10
|
+
export declare class AgnicMCPTool implements INodeType {
|
|
11
|
+
description: INodeTypeDescription;
|
|
12
|
+
/**
|
|
13
|
+
* Execute method for direct tool invocation.
|
|
14
|
+
* This is called when input data is passed directly to this node.
|
|
15
|
+
*/
|
|
16
|
+
execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]>;
|
|
17
|
+
/**
|
|
18
|
+
* Supply MCP tools to AI Agent.
|
|
19
|
+
* This is the main method that provides tools to the AI Agent.
|
|
20
|
+
*/
|
|
21
|
+
supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData>;
|
|
22
|
+
}
|
|
@@ -0,0 +1,368 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.AgnicMCPTool = void 0;
|
|
4
|
+
const n8n_workflow_1 = require("n8n-workflow");
|
|
5
|
+
const index_js_1 = require("@modelcontextprotocol/sdk/client/index.js");
|
|
6
|
+
const streamableHttp_js_1 = require("@modelcontextprotocol/sdk/client/streamableHttp.js");
|
|
7
|
+
const tools_1 = require("@langchain/core/tools");
|
|
8
|
+
const agents_1 = require("langchain/agents");
|
|
9
|
+
const zod_1 = require("zod");
|
|
10
|
+
const json_schema_to_zod_1 = require("@n8n/json-schema-to-zod");
|
|
11
|
+
/**
|
|
12
|
+
* Toolkit class that wraps MCP tools for n8n AI Agent
|
|
13
|
+
* Extends Toolkit from langchain/agents for proper serialization
|
|
14
|
+
*/
|
|
15
|
+
class AgnicMcpToolkit extends agents_1.Toolkit {
|
|
16
|
+
constructor(tools) {
|
|
17
|
+
super();
|
|
18
|
+
this.tools = tools;
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
// Pre-configured AgnicPay MCP endpoint (uses HTTP Streamable transport)
|
|
22
|
+
const AGNIC_MCP_ENDPOINT = "https://mcp.agnicpay.xyz/sse";
|
|
23
|
+
/**
|
|
24
|
+
* Convert JSON Schema to Zod schema using n8n's library
|
|
25
|
+
* Returns actual Zod schema objects (not strings)
|
|
26
|
+
*/
|
|
27
|
+
function convertJsonSchemaToZod(schema) {
|
|
28
|
+
if (!schema || typeof schema !== "object") {
|
|
29
|
+
return zod_1.z.object({});
|
|
30
|
+
}
|
|
31
|
+
try {
|
|
32
|
+
// @n8n/json-schema-to-zod returns actual Zod objects, not strings
|
|
33
|
+
const zodSchema = (0, json_schema_to_zod_1.jsonSchemaToZod)(schema);
|
|
34
|
+
// Ensure we return an object schema for structured tools
|
|
35
|
+
if (zodSchema instanceof zod_1.z.ZodObject) {
|
|
36
|
+
return zodSchema;
|
|
37
|
+
}
|
|
38
|
+
// Wrap non-object schemas in an object
|
|
39
|
+
return zod_1.z.object({ value: zodSchema });
|
|
40
|
+
}
|
|
41
|
+
catch {
|
|
42
|
+
// Fallback to empty object schema if conversion fails
|
|
43
|
+
return zod_1.z.object({});
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* Convert an MCP tool to a LangChain DynamicStructuredTool
|
|
48
|
+
*/
|
|
49
|
+
function mcpToolToDynamicTool(tool, callTool) {
|
|
50
|
+
// Convert JSON Schema to Zod schema using proper library
|
|
51
|
+
const zodSchema = convertJsonSchemaToZod(tool.inputSchema);
|
|
52
|
+
// Use type assertion to avoid deep type instantiation issues with DynamicStructuredTool
|
|
53
|
+
const toolConfig = {
|
|
54
|
+
name: tool.name,
|
|
55
|
+
description: tool.description || `MCP tool: ${tool.name}`,
|
|
56
|
+
schema: zodSchema,
|
|
57
|
+
func: async (input) => {
|
|
58
|
+
try {
|
|
59
|
+
const result = await callTool(tool.name, input);
|
|
60
|
+
if (typeof result === "string") {
|
|
61
|
+
return result;
|
|
62
|
+
}
|
|
63
|
+
return JSON.stringify(result);
|
|
64
|
+
}
|
|
65
|
+
catch (error) {
|
|
66
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
67
|
+
return `Error calling ${tool.name}: ${errorMessage}`;
|
|
68
|
+
}
|
|
69
|
+
},
|
|
70
|
+
// Required metadata for proper tool serialization in n8n
|
|
71
|
+
metadata: { isFromToolkit: true },
|
|
72
|
+
};
|
|
73
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
74
|
+
return new tools_1.DynamicStructuredTool(toolConfig);
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* AgnicMCPTool - MCP Client for AgnicPay
|
|
78
|
+
*
|
|
79
|
+
* This is a supply-only AI tool node that connects to the AgnicPay MCP server
|
|
80
|
+
* and provides X402 payment tools to AI Agents via the MCP protocol.
|
|
81
|
+
*
|
|
82
|
+
* This node cannot be executed directly - it only supplies tools to AI Agents.
|
|
83
|
+
*/
|
|
84
|
+
class AgnicMCPTool {
|
|
85
|
+
constructor() {
|
|
86
|
+
this.description = {
|
|
87
|
+
displayName: "Agnic MCP Tool",
|
|
88
|
+
name: "agnicMcpTool",
|
|
89
|
+
icon: "file:AgnicMCPTool.png",
|
|
90
|
+
group: ["output"],
|
|
91
|
+
version: 1,
|
|
92
|
+
description: "MCP client for AgnicPay - X402 payment tools for AI Agents",
|
|
93
|
+
defaults: {
|
|
94
|
+
name: "Agnic MCP Tool",
|
|
95
|
+
},
|
|
96
|
+
// Supply-only AI tool configuration
|
|
97
|
+
inputs: [],
|
|
98
|
+
outputs: [{ type: n8n_workflow_1.NodeConnectionTypes.AiTool, displayName: "Tools" }],
|
|
99
|
+
codex: {
|
|
100
|
+
categories: ["AI"],
|
|
101
|
+
subcategories: {
|
|
102
|
+
AI: ["Tools"],
|
|
103
|
+
},
|
|
104
|
+
resources: {
|
|
105
|
+
primaryDocumentation: [
|
|
106
|
+
{
|
|
107
|
+
url: "https://www.agnicpay.xyz/mcp",
|
|
108
|
+
},
|
|
109
|
+
],
|
|
110
|
+
},
|
|
111
|
+
},
|
|
112
|
+
credentials: [
|
|
113
|
+
{
|
|
114
|
+
name: "agnicWalletOAuth2Api",
|
|
115
|
+
required: false,
|
|
116
|
+
displayOptions: {
|
|
117
|
+
show: {
|
|
118
|
+
authentication: ["oAuth2"],
|
|
119
|
+
},
|
|
120
|
+
},
|
|
121
|
+
},
|
|
122
|
+
{
|
|
123
|
+
name: "agnicWalletApi",
|
|
124
|
+
required: false,
|
|
125
|
+
displayOptions: {
|
|
126
|
+
show: {
|
|
127
|
+
authentication: ["apiKey"],
|
|
128
|
+
},
|
|
129
|
+
},
|
|
130
|
+
},
|
|
131
|
+
],
|
|
132
|
+
properties: [
|
|
133
|
+
{
|
|
134
|
+
displayName: "Authentication",
|
|
135
|
+
name: "authentication",
|
|
136
|
+
type: "options",
|
|
137
|
+
default: "apiKey",
|
|
138
|
+
options: [
|
|
139
|
+
{
|
|
140
|
+
name: "OAuth2",
|
|
141
|
+
value: "oAuth2",
|
|
142
|
+
description: "Recommended: Connect your account",
|
|
143
|
+
},
|
|
144
|
+
{
|
|
145
|
+
name: "API Key",
|
|
146
|
+
value: "apiKey",
|
|
147
|
+
description: "For CI/CD or programmatic access",
|
|
148
|
+
},
|
|
149
|
+
],
|
|
150
|
+
description: "How to authenticate with AgnicWallet",
|
|
151
|
+
},
|
|
152
|
+
{
|
|
153
|
+
displayName: "Connects to AgnicPay MCP server. Tools are discovered automatically and include: make X402 API requests, check balance, view payment history, and discover APIs.",
|
|
154
|
+
name: "notice",
|
|
155
|
+
type: "notice",
|
|
156
|
+
default: "",
|
|
157
|
+
},
|
|
158
|
+
],
|
|
159
|
+
};
|
|
160
|
+
}
|
|
161
|
+
/**
|
|
162
|
+
* Execute method for direct tool invocation.
|
|
163
|
+
* This is called when input data is passed directly to this node.
|
|
164
|
+
*/
|
|
165
|
+
async execute() {
|
|
166
|
+
var _a;
|
|
167
|
+
const node = this.getNode();
|
|
168
|
+
const items = this.getInputData();
|
|
169
|
+
const returnData = [];
|
|
170
|
+
// Get authentication
|
|
171
|
+
const authentication = this.getNodeParameter("authentication", 0);
|
|
172
|
+
let accessToken;
|
|
173
|
+
try {
|
|
174
|
+
if (authentication === "oAuth2") {
|
|
175
|
+
const creds = (await this.getCredentials("agnicWalletOAuth2Api"));
|
|
176
|
+
accessToken = (_a = creds === null || creds === void 0 ? void 0 : creds.oauthTokenData) === null || _a === void 0 ? void 0 : _a.access_token;
|
|
177
|
+
}
|
|
178
|
+
else {
|
|
179
|
+
const creds = (await this.getCredentials("agnicWalletApi"));
|
|
180
|
+
accessToken = creds === null || creds === void 0 ? void 0 : creds.apiToken;
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
catch {
|
|
184
|
+
throw new n8n_workflow_1.NodeOperationError(node, "Failed to load AgnicWallet credentials.");
|
|
185
|
+
}
|
|
186
|
+
if (!accessToken) {
|
|
187
|
+
throw new n8n_workflow_1.NodeOperationError(node, "Missing AgnicWallet authentication token.");
|
|
188
|
+
}
|
|
189
|
+
// Connect to MCP server
|
|
190
|
+
const transport = new streamableHttp_js_1.StreamableHTTPClientTransport(new URL(AGNIC_MCP_ENDPOINT), {
|
|
191
|
+
requestInit: { headers: { Authorization: `Bearer ${accessToken}` } },
|
|
192
|
+
});
|
|
193
|
+
const client = new index_js_1.Client({ name: "agnic-mcp-client", version: "1.0.0" }, { capabilities: {} });
|
|
194
|
+
try {
|
|
195
|
+
await client.connect(transport);
|
|
196
|
+
const toolsResult = await client.listTools();
|
|
197
|
+
const mcpTools = toolsResult.tools || [];
|
|
198
|
+
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
|
199
|
+
const item = items[itemIndex];
|
|
200
|
+
// Expect input to have a 'tool' property with the tool name
|
|
201
|
+
if (!item.json.tool || typeof item.json.tool !== "string") {
|
|
202
|
+
throw new n8n_workflow_1.NodeOperationError(node, "Tool name not found in item.json.tool", { itemIndex });
|
|
203
|
+
}
|
|
204
|
+
const toolName = item.json.tool;
|
|
205
|
+
const matchingTool = mcpTools.find((t) => t.name === toolName);
|
|
206
|
+
if (!matchingTool) {
|
|
207
|
+
throw new n8n_workflow_1.NodeOperationError(node, `Tool "${toolName}" not found`, {
|
|
208
|
+
itemIndex,
|
|
209
|
+
});
|
|
210
|
+
}
|
|
211
|
+
// Extract tool arguments (everything except 'tool' property)
|
|
212
|
+
const { tool: _, ...toolArguments } = item.json;
|
|
213
|
+
const result = await client.callTool({
|
|
214
|
+
name: toolName,
|
|
215
|
+
arguments: toolArguments,
|
|
216
|
+
});
|
|
217
|
+
// Extract text content from result
|
|
218
|
+
let responseContent = result;
|
|
219
|
+
if (result.content && Array.isArray(result.content)) {
|
|
220
|
+
const textContent = result.content.find((c) => c.type === "text");
|
|
221
|
+
if (textContent && "text" in textContent) {
|
|
222
|
+
responseContent = textContent.text;
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
returnData.push({
|
|
226
|
+
json: { response: responseContent },
|
|
227
|
+
pairedItem: { item: itemIndex },
|
|
228
|
+
});
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
finally {
|
|
232
|
+
try {
|
|
233
|
+
await client.close();
|
|
234
|
+
}
|
|
235
|
+
catch {
|
|
236
|
+
// Ignore cleanup errors
|
|
237
|
+
}
|
|
238
|
+
try {
|
|
239
|
+
await transport.close();
|
|
240
|
+
}
|
|
241
|
+
catch {
|
|
242
|
+
// Ignore cleanup errors
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
return [returnData];
|
|
246
|
+
}
|
|
247
|
+
/**
|
|
248
|
+
* Supply MCP tools to AI Agent.
|
|
249
|
+
* This is the main method that provides tools to the AI Agent.
|
|
250
|
+
*/
|
|
251
|
+
async supplyData(itemIndex) {
|
|
252
|
+
var _a;
|
|
253
|
+
// ─────────────────────────────────────────────
|
|
254
|
+
// Authentication
|
|
255
|
+
// ─────────────────────────────────────────────
|
|
256
|
+
const authentication = this.getNodeParameter("authentication", itemIndex);
|
|
257
|
+
let accessToken;
|
|
258
|
+
try {
|
|
259
|
+
if (authentication === "oAuth2") {
|
|
260
|
+
const creds = (await this.getCredentials("agnicWalletOAuth2Api", itemIndex));
|
|
261
|
+
accessToken = (_a = creds === null || creds === void 0 ? void 0 : creds.oauthTokenData) === null || _a === void 0 ? void 0 : _a.access_token;
|
|
262
|
+
}
|
|
263
|
+
else {
|
|
264
|
+
const creds = (await this.getCredentials("agnicWalletApi", itemIndex));
|
|
265
|
+
accessToken = creds === null || creds === void 0 ? void 0 : creds.apiToken;
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
catch (err) {
|
|
269
|
+
throw new n8n_workflow_1.NodeOperationError(this.getNode(), "Failed to load AgnicWallet credentials. Please configure your credentials.", { itemIndex });
|
|
270
|
+
}
|
|
271
|
+
if (!accessToken) {
|
|
272
|
+
throw new n8n_workflow_1.NodeOperationError(this.getNode(), "Missing AgnicWallet authentication token. Please check your credentials configuration.", { itemIndex });
|
|
273
|
+
}
|
|
274
|
+
// ─────────────────────────────────────────────
|
|
275
|
+
// MCP Client Setup
|
|
276
|
+
// ─────────────────────────────────────────────
|
|
277
|
+
let client;
|
|
278
|
+
let transport;
|
|
279
|
+
try {
|
|
280
|
+
// Create HTTP Streamable transport with authentication
|
|
281
|
+
// This transport uses POST requests and accepts both JSON and SSE responses
|
|
282
|
+
transport = new streamableHttp_js_1.StreamableHTTPClientTransport(new URL(AGNIC_MCP_ENDPOINT), {
|
|
283
|
+
requestInit: {
|
|
284
|
+
headers: {
|
|
285
|
+
Authorization: `Bearer ${accessToken}`,
|
|
286
|
+
},
|
|
287
|
+
},
|
|
288
|
+
});
|
|
289
|
+
// Create MCP client
|
|
290
|
+
client = new index_js_1.Client({ name: "agnic-mcp-client", version: "1.0.0" }, { capabilities: {} });
|
|
291
|
+
// Connect to MCP server
|
|
292
|
+
await client.connect(transport);
|
|
293
|
+
// ─────────────────────────────────────────────
|
|
294
|
+
// Discover and wrap MCP tools
|
|
295
|
+
// ─────────────────────────────────────────────
|
|
296
|
+
const toolsResult = await client.listTools();
|
|
297
|
+
const mcpTools = toolsResult.tools || [];
|
|
298
|
+
if (mcpTools.length === 0) {
|
|
299
|
+
throw new n8n_workflow_1.NodeOperationError(this.getNode(), "No tools available from AgnicPay MCP server. Please check your authentication and try again.", { itemIndex });
|
|
300
|
+
}
|
|
301
|
+
// Create a tool caller function
|
|
302
|
+
const callTool = async (name, args) => {
|
|
303
|
+
if (!client) {
|
|
304
|
+
throw new Error("MCP client is not connected");
|
|
305
|
+
}
|
|
306
|
+
const result = await client.callTool({
|
|
307
|
+
name,
|
|
308
|
+
arguments: args,
|
|
309
|
+
});
|
|
310
|
+
// Extract content from the result
|
|
311
|
+
if (result.content && Array.isArray(result.content)) {
|
|
312
|
+
const textContent = result.content.find((c) => c.type === "text");
|
|
313
|
+
if (textContent && "text" in textContent) {
|
|
314
|
+
return textContent.text;
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
return result;
|
|
318
|
+
};
|
|
319
|
+
// Convert MCP tools to LangChain DynamicStructuredTools
|
|
320
|
+
const langchainTools = mcpTools.map((tool) => mcpToolToDynamicTool(tool, callTool));
|
|
321
|
+
// Wrap tools in a Toolkit for n8n AI Agent compatibility
|
|
322
|
+
const toolkit = new AgnicMcpToolkit(langchainTools);
|
|
323
|
+
// Store references for cleanup
|
|
324
|
+
const clientRef = client;
|
|
325
|
+
const transportRef = transport;
|
|
326
|
+
// Return toolkit with cleanup function
|
|
327
|
+
return {
|
|
328
|
+
response: toolkit,
|
|
329
|
+
closeFunction: async () => {
|
|
330
|
+
try {
|
|
331
|
+
await clientRef.close();
|
|
332
|
+
}
|
|
333
|
+
catch {
|
|
334
|
+
// Ignore cleanup errors
|
|
335
|
+
}
|
|
336
|
+
try {
|
|
337
|
+
await transportRef.close();
|
|
338
|
+
}
|
|
339
|
+
catch {
|
|
340
|
+
// Ignore cleanup errors
|
|
341
|
+
}
|
|
342
|
+
},
|
|
343
|
+
};
|
|
344
|
+
}
|
|
345
|
+
catch (error) {
|
|
346
|
+
// Clean up on error
|
|
347
|
+
if (client) {
|
|
348
|
+
try {
|
|
349
|
+
await client.close();
|
|
350
|
+
}
|
|
351
|
+
catch {
|
|
352
|
+
// Ignore cleanup errors
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
if (transport) {
|
|
356
|
+
try {
|
|
357
|
+
await transport.close();
|
|
358
|
+
}
|
|
359
|
+
catch {
|
|
360
|
+
// Ignore cleanup errors
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
364
|
+
throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Failed to connect to AgnicPay MCP server: ${errorMessage}`, { itemIndex });
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
}
|
|
368
|
+
exports.AgnicMCPTool = AgnicMCPTool;
|
|
Binary file
|
|
Binary file
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
<?xml version="1.0" standalone="no"?>
|
|
2
|
+
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN"
|
|
3
|
+
"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
|
|
4
|
+
<svg version="1.0" xmlns="http://www.w3.org/2000/svg"
|
|
5
|
+
width="44.000000pt" height="45.000000pt" viewBox="0 0 44.000000 45.000000"
|
|
6
|
+
preserveAspectRatio="xMidYMid meet">
|
|
7
|
+
|
|
8
|
+
<g transform="translate(0.000000,45.000000) scale(0.100000,-0.100000)"
|
|
9
|
+
fill="#000000" stroke="none">
|
|
10
|
+
<path d="M175 380 l-39 -40 42 -43 42 -42 42 42 42 43 -39 40 c-21 22 -42 40
|
|
11
|
+
-45 40 -3 0 -24 -18 -45 -40z"/>
|
|
12
|
+
<path d="M55 260 l-39 -40 42 -43 42 -42 42 42 42 43 -39 40 c-21 22 -42 40
|
|
13
|
+
-45 40 -3 0 -24 -18 -45 -40z"/>
|
|
14
|
+
<path d="M295 260 l-39 -40 42 -43 42 -42 42 42 42 43 -39 40 c-21 22 -42 40
|
|
15
|
+
-45 40 -3 0 -24 -18 -45 -40z"/>
|
|
16
|
+
<path d="M175 140 l-39 -40 42 -43 42 -42 42 42 42 43 -39 40 c-21 22 -42 40
|
|
17
|
+
-45 40 -3 0 -24 -18 -45 -40z"/>
|
|
18
|
+
</g>
|
|
19
|
+
</svg>
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "n8n-nodes-agnicwallet",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.7",
|
|
4
4
|
"description": "n8n community node for AgnicWallet - automated Web3 payments for X402 APIs",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"n8n-community-node-package",
|
|
@@ -15,7 +15,10 @@
|
|
|
15
15
|
"automation",
|
|
16
16
|
"workflow",
|
|
17
17
|
"base",
|
|
18
|
-
"solana"
|
|
18
|
+
"solana",
|
|
19
|
+
"mcp",
|
|
20
|
+
"model-context-protocol",
|
|
21
|
+
"ai-agent"
|
|
19
22
|
],
|
|
20
23
|
"license": "MIT",
|
|
21
24
|
"homepage": "https://github.com/agnicpay/n8n-X402-AgnicWallet#readme",
|
|
@@ -48,11 +51,22 @@
|
|
|
48
51
|
"dist/credentials/AgnicWalletOAuth2Api.credentials.js"
|
|
49
52
|
],
|
|
50
53
|
"nodes": [
|
|
51
|
-
"dist/nodes/X402HttpRequest/X402HttpRequest.node.js"
|
|
54
|
+
"dist/nodes/X402HttpRequest/X402HttpRequest.node.js",
|
|
55
|
+
"dist/nodes/AgnicAILanguageModel/AgnicAILanguageModel.node.js",
|
|
56
|
+
"dist/nodes/AgnicAI/AgnicAI.node.js",
|
|
57
|
+
"dist/nodes/AgnicMCPTool/AgnicMCPTool.node.js"
|
|
52
58
|
]
|
|
53
59
|
},
|
|
60
|
+
"dependencies": {
|
|
61
|
+
"@modelcontextprotocol/sdk": "^1.24.0",
|
|
62
|
+
"@n8n/json-schema-to-zod": "^1.6.0",
|
|
63
|
+
"zod": "^3.23.0"
|
|
64
|
+
},
|
|
54
65
|
"devDependencies": {
|
|
66
|
+
"@langchain/core": "^0.3.68",
|
|
67
|
+
"@langchain/openai": "^0.6.16",
|
|
55
68
|
"@types/node": "^20.10.0",
|
|
69
|
+
"langchain": "^0.3.33",
|
|
56
70
|
"@typescript-eslint/parser": "^6.13.0",
|
|
57
71
|
"eslint": "^8.54.0",
|
|
58
72
|
"eslint-plugin-n8n-nodes-base": "^1.16.1",
|
|
@@ -62,6 +76,8 @@
|
|
|
62
76
|
"typescript": "^5.3.0"
|
|
63
77
|
},
|
|
64
78
|
"peerDependencies": {
|
|
79
|
+
"@langchain/core": "*",
|
|
80
|
+
"langchain": "*",
|
|
65
81
|
"n8n-workflow": "*"
|
|
66
82
|
}
|
|
67
83
|
}
|