n8n-nodes-agnicwallet 1.0.13 → 1.0.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -174,703 +174,11 @@ var AgnicAILanguageModel = class {
174
174
  name: "model",
175
175
  type: "options",
176
176
  typeOptions: {
177
+ loadOptionsMethod: "getModels",
177
178
  allowCustomValues: true
178
179
  },
179
- options: [
180
- // ===== RECOMMENDED MODELS =====
181
- {
182
- name: "\u2B50 OpenAI: GPT-4o Mini (Recommended)",
183
- value: "openai/gpt-4o-mini"
184
- },
185
- {
186
- name: "\u2B50 Anthropic: Claude 3.5 Sonnet (Recommended)",
187
- value: "anthropic/claude-3.5-sonnet"
188
- },
189
- {
190
- name: "\u2B50 Google: Gemini 2.0 Flash (Recommended)",
191
- value: "google/gemini-2.0-flash-001"
192
- },
193
- {
194
- name: "\u2B50 Meta: Llama 3.3 70B (Recommended)",
195
- value: "meta-llama/llama-3.3-70b-instruct"
196
- },
197
- {
198
- name: "\u2B50 DeepSeek: Chat V3 (Recommended - Affordable)",
199
- value: "deepseek/deepseek-chat"
200
- },
201
- // ===== OpenAI Models =====
202
- {
203
- name: "OpenAI: GPT-4.1",
204
- value: "openai/gpt-4.1"
205
- },
206
- {
207
- name: "OpenAI: GPT-4.1 Mini",
208
- value: "openai/gpt-4.1-mini"
209
- },
210
- {
211
- name: "OpenAI: GPT-4.1 Nano",
212
- value: "openai/gpt-4.1-nano"
213
- },
214
- {
215
- name: "OpenAI: GPT-4o",
216
- value: "openai/gpt-4o"
217
- },
218
- {
219
- name: "OpenAI: GPT-4o Mini",
220
- value: "openai/gpt-4o-mini"
221
- },
222
- {
223
- name: "OpenAI: GPT-4o 2024-11-20",
224
- value: "openai/gpt-4o-2024-11-20"
225
- },
226
- {
227
- name: "OpenAI: GPT-4o 2024-08-06",
228
- value: "openai/gpt-4o-2024-08-06"
229
- },
230
- {
231
- name: "OpenAI: GPT-4o 2024-05-13",
232
- value: "openai/gpt-4o-2024-05-13"
233
- },
234
- {
235
- name: "OpenAI: GPT-4o Mini 2024-07-18",
236
- value: "openai/gpt-4o-mini-2024-07-18"
237
- },
238
- {
239
- name: "OpenAI: GPT-4 Turbo",
240
- value: "openai/gpt-4-turbo"
241
- },
242
- {
243
- name: "OpenAI: GPT-4 Turbo Preview",
244
- value: "openai/gpt-4-turbo-preview"
245
- },
246
- {
247
- name: "OpenAI: GPT-4 1106 Preview",
248
- value: "openai/gpt-4-1106-preview"
249
- },
250
- {
251
- name: "OpenAI: GPT-4",
252
- value: "openai/gpt-4"
253
- },
254
- {
255
- name: "OpenAI: GPT-4 32K",
256
- value: "openai/gpt-4-32k"
257
- },
258
- {
259
- name: "OpenAI: GPT-3.5 Turbo",
260
- value: "openai/gpt-3.5-turbo"
261
- },
262
- {
263
- name: "OpenAI: GPT-3.5 Turbo 16K",
264
- value: "openai/gpt-3.5-turbo-16k"
265
- },
266
- {
267
- name: "OpenAI: GPT-3.5 Turbo 0125",
268
- value: "openai/gpt-3.5-turbo-0125"
269
- },
270
- {
271
- name: "OpenAI: GPT-3.5 Turbo 1106",
272
- value: "openai/gpt-3.5-turbo-1106"
273
- },
274
- {
275
- name: "OpenAI: o1",
276
- value: "openai/o1"
277
- },
278
- {
279
- name: "OpenAI: o1 Mini",
280
- value: "openai/o1-mini"
281
- },
282
- {
283
- name: "OpenAI: o1 Preview",
284
- value: "openai/o1-preview"
285
- },
286
- {
287
- name: "OpenAI: o3 Mini",
288
- value: "openai/o3-mini"
289
- },
290
- {
291
- name: "OpenAI: o3 Mini High",
292
- value: "openai/o3-mini-high"
293
- },
294
- {
295
- name: "OpenAI: o4 Mini",
296
- value: "openai/o4-mini"
297
- },
298
- {
299
- name: "OpenAI: o4 Mini High",
300
- value: "openai/o4-mini-high"
301
- },
302
- // ===== Anthropic Models =====
303
- {
304
- name: "Anthropic: Claude Sonnet 4",
305
- value: "anthropic/claude-sonnet-4"
306
- },
307
- {
308
- name: "Anthropic: Claude Opus 4",
309
- value: "anthropic/claude-opus-4"
310
- },
311
- {
312
- name: "Anthropic: Claude 3.7 Sonnet",
313
- value: "anthropic/claude-3.7-sonnet"
314
- },
315
- {
316
- name: "Anthropic: Claude 3.5 Sonnet",
317
- value: "anthropic/claude-3.5-sonnet"
318
- },
319
- {
320
- name: "Anthropic: Claude 3.5 Sonnet 2024-10-22",
321
- value: "anthropic/claude-3.5-sonnet-20241022"
322
- },
323
- {
324
- name: "Anthropic: Claude 3.5 Haiku",
325
- value: "anthropic/claude-3.5-haiku"
326
- },
327
- {
328
- name: "Anthropic: Claude 3.5 Haiku 2024-10-22",
329
- value: "anthropic/claude-3.5-haiku-20241022"
330
- },
331
- {
332
- name: "Anthropic: Claude 3 Opus",
333
- value: "anthropic/claude-3-opus"
334
- },
335
- {
336
- name: "Anthropic: Claude 3 Opus 2024-02-29",
337
- value: "anthropic/claude-3-opus-20240229"
338
- },
339
- {
340
- name: "Anthropic: Claude 3 Sonnet",
341
- value: "anthropic/claude-3-sonnet"
342
- },
343
- {
344
- name: "Anthropic: Claude 3 Haiku",
345
- value: "anthropic/claude-3-haiku"
346
- },
347
- {
348
- name: "Anthropic: Claude 3 Haiku 2024-03-07",
349
- value: "anthropic/claude-3-haiku-20240307"
350
- },
351
- // ===== Google Models =====
352
- {
353
- name: "Google: Gemini 3 Flash Preview",
354
- value: "google/gemini-3-flash-preview"
355
- },
356
- {
357
- name: "Google: Gemini 2.5 Pro Preview",
358
- value: "google/gemini-2.5-pro-preview"
359
- },
360
- {
361
- name: "Google: Gemini 2.5 Flash Preview",
362
- value: "google/gemini-2.5-flash-preview"
363
- },
364
- {
365
- name: "Google: Gemini 2.0 Flash",
366
- value: "google/gemini-2.0-flash-001"
367
- },
368
- {
369
- name: "Google: Gemini 2.0 Flash Lite",
370
- value: "google/gemini-2.0-flash-lite-001"
371
- },
372
- {
373
- name: "Google: Gemini 2.0 Flash Exp",
374
- value: "google/gemini-2.0-flash-exp"
375
- },
376
- {
377
- name: "Google: Gemini 2.0 Flash Thinking Exp",
378
- value: "google/gemini-2.0-flash-thinking-exp"
379
- },
380
- {
381
- name: "Google: Gemini Pro 1.5",
382
- value: "google/gemini-pro-1.5"
383
- },
384
- {
385
- name: "Google: Gemini Flash 1.5",
386
- value: "google/gemini-flash-1.5"
387
- },
388
- {
389
- name: "Google: Gemini Flash 1.5 8B",
390
- value: "google/gemini-flash-1.5-8b"
391
- },
392
- {
393
- name: "Google: Gemini Pro",
394
- value: "google/gemini-pro"
395
- },
396
- {
397
- name: "Google: Gemma 3 27B",
398
- value: "google/gemma-3-27b-it"
399
- },
400
- {
401
- name: "Google: Gemma 3 12B",
402
- value: "google/gemma-3-12b-it"
403
- },
404
- {
405
- name: "Google: Gemma 3 4B",
406
- value: "google/gemma-3-4b-it"
407
- },
408
- {
409
- name: "Google: Gemma 3 1B",
410
- value: "google/gemma-3-1b-it"
411
- },
412
- {
413
- name: "Google: Gemma 2 27B",
414
- value: "google/gemma-2-27b-it"
415
- },
416
- {
417
- name: "Google: Gemma 2 9B",
418
- value: "google/gemma-2-9b-it"
419
- },
420
- // ===== Meta Llama Models =====
421
- {
422
- name: "Meta: Llama 4 Maverick",
423
- value: "meta-llama/llama-4-maverick"
424
- },
425
- {
426
- name: "Meta: Llama 4 Scout",
427
- value: "meta-llama/llama-4-scout"
428
- },
429
- {
430
- name: "Meta: Llama 3.3 70B Instruct",
431
- value: "meta-llama/llama-3.3-70b-instruct"
432
- },
433
- {
434
- name: "Meta: Llama 3.2 90B Vision Instruct",
435
- value: "meta-llama/llama-3.2-90b-vision-instruct"
436
- },
437
- {
438
- name: "Meta: Llama 3.2 11B Vision Instruct",
439
- value: "meta-llama/llama-3.2-11b-vision-instruct"
440
- },
441
- {
442
- name: "Meta: Llama 3.2 3B Instruct",
443
- value: "meta-llama/llama-3.2-3b-instruct"
444
- },
445
- {
446
- name: "Meta: Llama 3.2 1B Instruct",
447
- value: "meta-llama/llama-3.2-1b-instruct"
448
- },
449
- {
450
- name: "Meta: Llama 3.1 405B Instruct",
451
- value: "meta-llama/llama-3.1-405b-instruct"
452
- },
453
- {
454
- name: "Meta: Llama 3.1 70B Instruct",
455
- value: "meta-llama/llama-3.1-70b-instruct"
456
- },
457
- {
458
- name: "Meta: Llama 3.1 8B Instruct",
459
- value: "meta-llama/llama-3.1-8b-instruct"
460
- },
461
- {
462
- name: "Meta: Llama 3 70B Instruct",
463
- value: "meta-llama/llama-3-70b-instruct"
464
- },
465
- {
466
- name: "Meta: Llama 3 8B Instruct",
467
- value: "meta-llama/llama-3-8b-instruct"
468
- },
469
- // ===== Mistral Models =====
470
- {
471
- name: "Mistral: Large 2411",
472
- value: "mistralai/mistral-large-2411"
473
- },
474
- {
475
- name: "Mistral: Large 2407",
476
- value: "mistralai/mistral-large-2407"
477
- },
478
- {
479
- name: "Mistral: Large",
480
- value: "mistralai/mistral-large"
481
- },
482
- {
483
- name: "Mistral: Medium",
484
- value: "mistralai/mistral-medium"
485
- },
486
- {
487
- name: "Mistral: Small",
488
- value: "mistralai/mistral-small"
489
- },
490
- {
491
- name: "Mistral: Small 2503",
492
- value: "mistralai/mistral-small-2503"
493
- },
494
- {
495
- name: "Mistral: Small 2501",
496
- value: "mistralai/mistral-small-2501"
497
- },
498
- {
499
- name: "Mistral: Small 2409",
500
- value: "mistralai/mistral-small-2409"
501
- },
502
- {
503
- name: "Mistral: Small Creative",
504
- value: "mistralai/mistral-small-creative"
505
- },
506
- {
507
- name: "Mistral: Nemo",
508
- value: "mistralai/mistral-nemo"
509
- },
510
- {
511
- name: "Mistral: Mixtral 8x22B Instruct",
512
- value: "mistralai/mixtral-8x22b-instruct"
513
- },
514
- {
515
- name: "Mistral: Mixtral 8x7B Instruct",
516
- value: "mistralai/mixtral-8x7b-instruct"
517
- },
518
- {
519
- name: "Mistral: Pixtral Large",
520
- value: "mistralai/pixtral-large-latest"
521
- },
522
- {
523
- name: "Mistral: Pixtral 12B",
524
- value: "mistralai/pixtral-12b"
525
- },
526
- {
527
- name: "Mistral: Codestral",
528
- value: "mistralai/codestral-latest"
529
- },
530
- {
531
- name: "Mistral: Ministral 3B",
532
- value: "mistralai/ministral-3b"
533
- },
534
- {
535
- name: "Mistral: Ministral 8B",
536
- value: "mistralai/ministral-8b"
537
- },
538
- // ===== DeepSeek Models =====
539
- {
540
- name: "DeepSeek: R1",
541
- value: "deepseek/deepseek-r1"
542
- },
543
- {
544
- name: "DeepSeek: R1 0528",
545
- value: "deepseek/deepseek-r1-0528"
546
- },
547
- {
548
- name: "DeepSeek: R1 Distill Llama 70B",
549
- value: "deepseek/deepseek-r1-distill-llama-70b"
550
- },
551
- {
552
- name: "DeepSeek: R1 Distill Qwen 32B",
553
- value: "deepseek/deepseek-r1-distill-qwen-32b"
554
- },
555
- {
556
- name: "DeepSeek: R1 Distill Qwen 14B",
557
- value: "deepseek/deepseek-r1-distill-qwen-14b"
558
- },
559
- {
560
- name: "DeepSeek: Chat V3",
561
- value: "deepseek/deepseek-chat"
562
- },
563
- {
564
- name: "DeepSeek: Chat V3 0324",
565
- value: "deepseek/deepseek-chat-v3-0324"
566
- },
567
- {
568
- name: "DeepSeek: Coder",
569
- value: "deepseek/deepseek-coder"
570
- },
571
- {
572
- name: "DeepSeek: Prover V2",
573
- value: "deepseek/deepseek-prover-v2"
574
- },
575
- // ===== Qwen Models =====
576
- {
577
- name: "Qwen: Qwen3 235B A22B",
578
- value: "qwen/qwen3-235b-a22b"
579
- },
580
- {
581
- name: "Qwen: Qwen3 32B",
582
- value: "qwen/qwen3-32b"
583
- },
584
- {
585
- name: "Qwen: Qwen3 30B A3B",
586
- value: "qwen/qwen3-30b-a3b"
587
- },
588
- {
589
- name: "Qwen: Qwen3 14B",
590
- value: "qwen/qwen3-14b"
591
- },
592
- {
593
- name: "Qwen: Qwen3 8B",
594
- value: "qwen/qwen3-8b"
595
- },
596
- {
597
- name: "Qwen: Qwen3 4B",
598
- value: "qwen/qwen3-4b"
599
- },
600
- {
601
- name: "Qwen: Qwen3 1.7B",
602
- value: "qwen/qwen3-1.7b"
603
- },
604
- {
605
- name: "Qwen: Qwen 2.5 72B Instruct",
606
- value: "qwen/qwen-2.5-72b-instruct"
607
- },
608
- {
609
- name: "Qwen: Qwen 2.5 32B Instruct",
610
- value: "qwen/qwen-2.5-32b-instruct"
611
- },
612
- {
613
- name: "Qwen: Qwen 2.5 14B Instruct",
614
- value: "qwen/qwen-2.5-14b-instruct"
615
- },
616
- {
617
- name: "Qwen: Qwen 2.5 7B Instruct",
618
- value: "qwen/qwen-2.5-7b-instruct"
619
- },
620
- {
621
- name: "Qwen: Qwen 2.5 Coder 32B Instruct",
622
- value: "qwen/qwen-2.5-coder-32b-instruct"
623
- },
624
- {
625
- name: "Qwen: Qwen 2.5 Coder 7B Instruct",
626
- value: "qwen/qwen-2.5-coder-7b-instruct"
627
- },
628
- {
629
- name: "Qwen: QwQ 32B Preview",
630
- value: "qwen/qwq-32b-preview"
631
- },
632
- {
633
- name: "Qwen: QwQ 32B",
634
- value: "qwen/qwq-32b"
635
- },
636
- {
637
- name: "Qwen: Qwen 2 VL 72B Instruct",
638
- value: "qwen/qwen-2-vl-72b-instruct"
639
- },
640
- {
641
- name: "Qwen: Qwen 2 VL 7B Instruct",
642
- value: "qwen/qwen-2-vl-7b-instruct"
643
- },
644
- // ===== Cohere Models =====
645
- {
646
- name: "Cohere: Command R+",
647
- value: "cohere/command-r-plus"
648
- },
649
- {
650
- name: "Cohere: Command R+ 08-2024",
651
- value: "cohere/command-r-plus-08-2024"
652
- },
653
- {
654
- name: "Cohere: Command R+ 04-2024",
655
- value: "cohere/command-r-plus-04-2024"
656
- },
657
- {
658
- name: "Cohere: Command R",
659
- value: "cohere/command-r"
660
- },
661
- {
662
- name: "Cohere: Command R 08-2024",
663
- value: "cohere/command-r-08-2024"
664
- },
665
- {
666
- name: "Cohere: Command R 03-2024",
667
- value: "cohere/command-r-03-2024"
668
- },
669
- {
670
- name: "Cohere: Command A",
671
- value: "cohere/command-a"
672
- },
673
- // ===== xAI (Grok) Models =====
674
- {
675
- name: "xAI: Grok 3",
676
- value: "x-ai/grok-3"
677
- },
678
- {
679
- name: "xAI: Grok 3 Fast",
680
- value: "x-ai/grok-3-fast"
681
- },
682
- {
683
- name: "xAI: Grok 3 Mini",
684
- value: "x-ai/grok-3-mini"
685
- },
686
- {
687
- name: "xAI: Grok 3 Mini Fast",
688
- value: "x-ai/grok-3-mini-fast"
689
- },
690
- {
691
- name: "xAI: Grok 2",
692
- value: "x-ai/grok-2"
693
- },
694
- {
695
- name: "xAI: Grok 2 1212",
696
- value: "x-ai/grok-2-1212"
697
- },
698
- {
699
- name: "xAI: Grok 2 Vision 1212",
700
- value: "x-ai/grok-2-vision-1212"
701
- },
702
- {
703
- name: "xAI: Grok Beta",
704
- value: "x-ai/grok-beta"
705
- },
706
- // ===== NVIDIA Models =====
707
- {
708
- name: "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
709
- value: "nvidia/llama-3.1-nemotron-70b-instruct"
710
- },
711
- {
712
- name: "NVIDIA: Llama 3.3 Nemotron Super 49B V1",
713
- value: "nvidia/llama-3.3-nemotron-super-49b-v1"
714
- },
715
- // ===== Microsoft Models =====
716
- {
717
- name: "Microsoft: Phi-4",
718
- value: "microsoft/phi-4"
719
- },
720
- {
721
- name: "Microsoft: Phi-4 Multimodal Instruct",
722
- value: "microsoft/phi-4-multimodal-instruct"
723
- },
724
- {
725
- name: "Microsoft: MAI DS R1",
726
- value: "microsoft/mai-ds-r1"
727
- },
728
- // ===== Amazon Models =====
729
- {
730
- name: "Amazon: Nova Pro 1.0",
731
- value: "amazon/nova-pro-v1"
732
- },
733
- {
734
- name: "Amazon: Nova Lite 1.0",
735
- value: "amazon/nova-lite-v1"
736
- },
737
- {
738
- name: "Amazon: Nova Micro 1.0",
739
- value: "amazon/nova-micro-v1"
740
- },
741
- // ===== Perplexity Models =====
742
- {
743
- name: "Perplexity: Sonar Deep Research",
744
- value: "perplexity/sonar-deep-research"
745
- },
746
- {
747
- name: "Perplexity: Sonar Pro",
748
- value: "perplexity/sonar-pro"
749
- },
750
- {
751
- name: "Perplexity: Sonar",
752
- value: "perplexity/sonar"
753
- },
754
- {
755
- name: "Perplexity: Sonar Reasoning Pro",
756
- value: "perplexity/sonar-reasoning-pro"
757
- },
758
- {
759
- name: "Perplexity: Sonar Reasoning",
760
- value: "perplexity/sonar-reasoning"
761
- },
762
- // ===== Nous Research Models =====
763
- {
764
- name: "Nous: Hermes 3 405B Instruct",
765
- value: "nousresearch/hermes-3-llama-3.1-405b"
766
- },
767
- {
768
- name: "Nous: Hermes 3 70B Instruct",
769
- value: "nousresearch/hermes-3-llama-3.1-70b"
770
- },
771
- // ===== 01.AI Models =====
772
- {
773
- name: "01.AI: Yi Large",
774
- value: "01-ai/yi-large"
775
- },
776
- {
777
- name: "01.AI: Yi Large FC",
778
- value: "01-ai/yi-large-fc"
779
- },
780
- {
781
- name: "01.AI: Yi Large Turbo",
782
- value: "01-ai/yi-large-turbo"
783
- },
784
- // ===== Inflection Models =====
785
- {
786
- name: "Inflection: Inflection 3 Pi",
787
- value: "inflection/inflection-3-pi"
788
- },
789
- {
790
- name: "Inflection: Inflection 3 Productivity",
791
- value: "inflection/inflection-3-productivity"
792
- },
793
- // ===== AI21 Models =====
794
- {
795
- name: "AI21: Jamba 1.5 Large",
796
- value: "ai21/jamba-1.5-large"
797
- },
798
- {
799
- name: "AI21: Jamba 1.5 Mini",
800
- value: "ai21/jamba-1.5-mini"
801
- },
802
- // ===== Databricks Models =====
803
- {
804
- name: "Databricks: DBRX Instruct",
805
- value: "databricks/dbrx-instruct"
806
- },
807
- // ===== Fireworks Models =====
808
- {
809
- name: "Fireworks: Firellama 405B Instruct",
810
- value: "fireworks/firellama-405b-instruct"
811
- },
812
- // ===== Groq Models =====
813
- {
814
- name: "Groq: Llama 3.3 70B Versatile",
815
- value: "groq/llama-3.3-70b-versatile"
816
- },
817
- {
818
- name: "Groq: Llama 3.1 8B Instant",
819
- value: "groq/llama-3.1-8b-instant"
820
- },
821
- // ===== Cognitive Computations Models =====
822
- {
823
- name: "Cognitive Computations: Dolphin 3.0 R1 Mistral 24B",
824
- value: "cognitivecomputations/dolphin-3.0-r1-mistral-24b"
825
- },
826
- {
827
- name: "Cognitive Computations: Dolphin 3.0 Mistral 24B",
828
- value: "cognitivecomputations/dolphin-3.0-mistral-24b"
829
- },
830
- // ===== FREE MODELS (with tool support) =====
831
- {
832
- name: "[FREE] NVIDIA: Nemotron 3 Nano 30B",
833
- value: "nvidia/nemotron-3-nano-30b-a3b:free"
834
- },
835
- {
836
- name: "[FREE] Xiaomi: MiMo V2 Flash",
837
- value: "xiaomi/mimo-v2-flash:free"
838
- },
839
- {
840
- name: "[FREE] Meta: Llama 3.1 8B Instruct",
841
- value: "meta-llama/llama-3.1-8b-instruct:free"
842
- },
843
- {
844
- name: "[FREE] Meta: Llama 3.2 3B Instruct",
845
- value: "meta-llama/llama-3.2-3b-instruct:free"
846
- },
847
- {
848
- name: "[FREE] Qwen: Qwen3 8B",
849
- value: "qwen/qwen3-8b:free"
850
- },
851
- {
852
- name: "[FREE] Qwen: Qwen3 4B",
853
- value: "qwen/qwen3-4b:free"
854
- },
855
- {
856
- name: "[FREE] Qwen: Qwen 2.5 7B Instruct",
857
- value: "qwen/qwen-2.5-7b-instruct:free"
858
- },
859
- {
860
- name: "[FREE] Qwen: Qwen 2.5 Coder 7B Instruct",
861
- value: "qwen/qwen-2.5-coder-7b-instruct:free"
862
- },
863
- {
864
- name: "[FREE] Google: Gemma 2 9B",
865
- value: "google/gemma-2-9b-it:free"
866
- },
867
- {
868
- name: "[FREE] Mistral: Mistral Small 3.1 24B",
869
- value: "mistralai/mistral-small-3.1-24b-instruct:free"
870
- }
871
- ],
872
180
  default: "openai/gpt-4o-mini",
873
- description: "Select a model or type a custom OpenRouter model ID. See https://openrouter.ai/models for all available models."
181
+ description: "Select a model or type a custom model ID. Models are loaded dynamically from AgnicPay API."
874
182
  },
875
183
  {
876
184
  displayName: "Options",
@@ -948,6 +256,39 @@ var AgnicAILanguageModel = class {
948
256
  }
949
257
  ]
950
258
  };
259
+ /**
260
+ * Methods for dynamic option loading
261
+ */
262
+ this.methods = {
263
+ loadOptions: {
264
+ async getModels() {
265
+ const fallbackModels = [
266
+ { name: "\u2B50 OpenAI: GPT-4o Mini (Recommended)", value: "openai/gpt-4o-mini" },
267
+ { name: "\u2B50 Anthropic: Claude 3.5 Sonnet", value: "anthropic/claude-3.5-sonnet" },
268
+ { name: "\u2B50 Google: Gemini 2.0 Flash", value: "google/gemini-2.0-flash-001" },
269
+ { name: "\u2B50 Meta: Llama 3.3 70B", value: "meta-llama/llama-3.3-70b-instruct" },
270
+ { name: "\u2B50 DeepSeek: Chat V3 (Affordable)", value: "deepseek/deepseek-chat" },
271
+ { name: "[FREE] Meta: Llama 3.1 8B", value: "meta-llama/llama-3.1-8b-instruct:free" }
272
+ ];
273
+ try {
274
+ const response = await this.helpers.httpRequest({
275
+ method: "GET",
276
+ url: "https://api.agnicpay.xyz/v1/models",
277
+ timeout: 1e4
278
+ });
279
+ if (!response?.data || !Array.isArray(response.data) || response.data.length === 0) {
280
+ return fallbackModels;
281
+ }
282
+ return response.data.map((model) => ({
283
+ name: model.name || model.id,
284
+ value: model.id
285
+ }));
286
+ } catch {
287
+ return fallbackModels;
288
+ }
289
+ }
290
+ }
291
+ };
951
292
  }
952
293
  static {
953
294
  __name(this, "AgnicAILanguageModel");
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "n8n-nodes-agnicwallet",
3
- "version": "1.0.13",
3
+ "version": "1.0.14",
4
4
  "description": "n8n community node for AgnicWallet - automated Web3 payments for X402 APIs",
5
5
  "keywords": [
6
6
  "n8n-community-node-package",
@@ -37,6 +37,8 @@
37
37
  "scripts": {
38
38
  "build": "node esbuild.config.js && gulp build:icons",
39
39
  "dev": "tsc --watch",
40
+ "lint": "eslint nodes credentials --ext .ts",
41
+ "lint:fix": "eslint nodes credentials --ext .ts --fix",
40
42
  "format": "prettier nodes credentials --write",
41
43
  "format:check": "prettier nodes credentials --check",
42
44
  "prepublishOnly": "npm run build"
@@ -1,22 +0,0 @@
1
- import { INodeType, INodeTypeDescription, ISupplyDataFunctions, IExecuteFunctions, SupplyData, INodeExecutionData } from "n8n-workflow";
2
- /**
3
- * AgnicMCPTool - MCP Client for AgnicPay
4
- *
5
- * This is a supply-only AI tool node that connects to the AgnicPay MCP server
6
- * and provides X402 payment tools to AI Agents via the MCP protocol.
7
- *
8
- * This node cannot be executed directly - it only supplies tools to AI Agents.
9
- */
10
- export declare class AgnicMCPTool implements INodeType {
11
- description: INodeTypeDescription;
12
- /**
13
- * Execute method for direct tool invocation.
14
- * This is called when input data is passed directly to this node.
15
- */
16
- execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]>;
17
- /**
18
- * Supply MCP tools to AI Agent.
19
- * This is the main method that provides tools to the AI Agent.
20
- */
21
- supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData>;
22
- }