n8n-nodes-github-copilot 3.38.25 → 3.38.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/dist/credentials/GitHubCopilotApi.credentials.d.ts +1 -1
  2. package/dist/credentials/GitHubCopilotApi.credentials.js +25 -25
  3. package/dist/nodes/GitHubCopilot/GitHubCopilot.node.d.ts +1 -1
  4. package/dist/nodes/GitHubCopilot/GitHubCopilot.node.js +166 -166
  5. package/dist/nodes/GitHubCopilotAuthHelper/GitHubCopilotAuthHelper.node.d.ts +1 -1
  6. package/dist/nodes/GitHubCopilotAuthHelper/GitHubCopilotAuthHelper.node.js +539 -539
  7. package/dist/nodes/GitHubCopilotChatAPI/GitHubCopilotChatAPI.node.d.ts +1 -1
  8. package/dist/nodes/GitHubCopilotChatAPI/GitHubCopilotChatAPI.node.js +46 -44
  9. package/dist/nodes/GitHubCopilotChatAPI/nodeProperties.d.ts +1 -1
  10. package/dist/nodes/GitHubCopilotChatAPI/nodeProperties.js +82 -82
  11. package/dist/nodes/GitHubCopilotChatAPI/utils/helpers.d.ts +2 -2
  12. package/dist/nodes/GitHubCopilotChatAPI/utils/helpers.js +26 -26
  13. package/dist/nodes/GitHubCopilotChatAPI/utils/imageProcessor.d.ts +2 -2
  14. package/dist/nodes/GitHubCopilotChatAPI/utils/imageProcessor.js +12 -12
  15. package/dist/nodes/GitHubCopilotChatAPI/utils/index.d.ts +4 -4
  16. package/dist/nodes/GitHubCopilotChatAPI/utils/mediaDetection.d.ts +3 -3
  17. package/dist/nodes/GitHubCopilotChatAPI/utils/mediaDetection.js +19 -19
  18. package/dist/nodes/GitHubCopilotChatAPI/utils/modelCapabilities.d.ts +1 -1
  19. package/dist/nodes/GitHubCopilotChatAPI/utils/modelCapabilities.js +23 -23
  20. package/dist/nodes/GitHubCopilotChatAPI/utils/types.d.ts +5 -5
  21. package/dist/nodes/GitHubCopilotChatModel/GitHubCopilotChatModel.node.d.ts +1 -1
  22. package/dist/nodes/GitHubCopilotChatModel/GitHubCopilotChatModel.node.js +115 -106
  23. package/dist/nodes/GitHubCopilotEmbeddings/GitHubCopilotEmbeddings.node.d.ts +1 -1
  24. package/dist/nodes/GitHubCopilotEmbeddings/GitHubCopilotEmbeddings.node.js +114 -114
  25. package/dist/nodes/GitHubCopilotOpenAI/GitHubCopilotOpenAI.node.d.ts +1 -1
  26. package/dist/nodes/GitHubCopilotOpenAI/GitHubCopilotOpenAI.node.js +74 -69
  27. package/dist/nodes/GitHubCopilotOpenAI/nodeProperties.d.ts +1 -1
  28. package/dist/nodes/GitHubCopilotOpenAI/nodeProperties.js +181 -181
  29. package/dist/nodes/GitHubCopilotOpenAI/utils/index.d.ts +2 -2
  30. package/dist/nodes/GitHubCopilotOpenAI/utils/openaiCompat.d.ts +10 -10
  31. package/dist/nodes/GitHubCopilotOpenAI/utils/openaiCompat.js +53 -53
  32. package/dist/nodes/GitHubCopilotOpenAI/utils/types.d.ts +12 -12
  33. package/dist/nodes/GitHubCopilotTest/GitHubCopilotTest.node.d.ts +1 -1
  34. package/dist/nodes/GitHubCopilotTest/GitHubCopilotTest.node.js +120 -116
  35. package/dist/package.json +1 -1
  36. package/package.json +1 -1
@@ -14,7 +14,7 @@ async function processMediaFile(context, itemIndex, source, mediaFile, mediaUrl,
14
14
  throw new Error(suggestImageConversion(imageResult.mimeType));
15
15
  }
16
16
  return {
17
- type: "image",
17
+ type: 'image',
18
18
  dataUrl: `data:${imageResult.mimeType};base64,${imageResult.data}`,
19
19
  description: `Image file: ${imageResult.filename} (${Math.round(imageResult.size / 1024)}KB, ${imageResult.mimeType})`,
20
20
  mimeType: imageResult.mimeType,
@@ -22,43 +22,43 @@ async function processMediaFile(context, itemIndex, source, mediaFile, mediaUrl,
22
22
  }
23
23
  catch (error) {
24
24
  return {
25
- type: "unknown",
26
- description: `Error processing image file: ${error instanceof Error ? error.message : "Unknown error"}`,
27
- mimeType: "unknown",
25
+ type: 'unknown',
26
+ description: `Error processing image file: ${error instanceof Error ? error.message : 'Unknown error'}`,
27
+ mimeType: 'unknown',
28
28
  };
29
29
  }
30
30
  }
31
31
  function isImageMimeType(mimeType) {
32
- const supportedFormats = ["image/png", "image/jpeg", "image/jpg", "image/gif", "image/webp"];
32
+ const supportedFormats = ['image/png', 'image/jpeg', 'image/jpg', 'image/gif', 'image/webp'];
33
33
  return supportedFormats.includes(mimeType.toLowerCase());
34
34
  }
35
35
  function validateImageFormat(mimeType) {
36
36
  if (!isImageMimeType(mimeType)) {
37
- const supportedFormats = ["PNG", "JPEG", "GIF", "WebP"];
37
+ const supportedFormats = ['PNG', 'JPEG', 'GIF', 'WebP'];
38
38
  return {
39
39
  isValid: false,
40
- error: `Unsupported image format: ${mimeType}. GitHub Copilot API only supports: ${supportedFormats.join(", ")}`,
40
+ error: `Unsupported image format: ${mimeType}. GitHub Copilot API only supports: ${supportedFormats.join(', ')}`,
41
41
  };
42
42
  }
43
43
  return { isValid: true };
44
44
  }
45
45
  function getFileExtensionFromMimeType(mimeType) {
46
46
  const mimeToExt = {
47
- "image/png": "png",
48
- "image/jpeg": "jpg",
49
- "image/jpg": "jpg",
50
- "image/gif": "gif",
51
- "image/webp": "webp",
52
- "image/bmp": "bmp",
53
- "image/tiff": "tiff",
54
- "image/svg+xml": "svg",
47
+ 'image/png': 'png',
48
+ 'image/jpeg': 'jpg',
49
+ 'image/jpg': 'jpg',
50
+ 'image/gif': 'gif',
51
+ 'image/webp': 'webp',
52
+ 'image/bmp': 'bmp',
53
+ 'image/tiff': 'tiff',
54
+ 'image/svg+xml': 'svg',
55
55
  };
56
- return mimeToExt[mimeType.toLowerCase()] || "unknown";
56
+ return mimeToExt[mimeType.toLowerCase()] || 'unknown';
57
57
  }
58
58
  function suggestImageConversion(mimeType) {
59
59
  const ext = getFileExtensionFromMimeType(mimeType);
60
- const supportedFormats = ["PNG", "JPEG", "GIF", "WebP"];
60
+ const supportedFormats = ['PNG', 'JPEG', 'GIF', 'WebP'];
61
61
  return (`Image format ${ext.toUpperCase()} is not supported by GitHub Copilot API. ` +
62
- `Please convert your image to one of these formats: ${supportedFormats.join(", ")}. ` +
63
- "Recommended: Convert to PNG or WebP for best compatibility.");
62
+ `Please convert your image to one of these formats: ${supportedFormats.join(', ')}. ` +
63
+ 'Recommended: Convert to PNG or WebP for best compatibility.');
64
64
  }
@@ -1,4 +1,4 @@
1
- import { ModelCapabilities, ModelValidationResult } from "./types";
1
+ import { ModelCapabilities, ModelValidationResult } from './types';
2
2
  export declare const MODEL_CAPABILITIES: Record<string, ModelCapabilities>;
3
3
  export declare function validateModelCapabilities(model: string, includeImage: boolean, includeAudio: boolean): ModelValidationResult;
4
4
  export declare function getSupportedModels(requireImages?: boolean, requireAudio?: boolean): string[];
@@ -5,65 +5,65 @@ exports.validateModelCapabilities = validateModelCapabilities;
5
5
  exports.getSupportedModels = getSupportedModels;
6
6
  exports.getModelInfo = getModelInfo;
7
7
  exports.MODEL_CAPABILITIES = {
8
- "gpt-5": {
8
+ 'gpt-5': {
9
9
  supportsImages: true,
10
10
  supportsAudio: false,
11
11
  maxContextTokens: 200000,
12
- description: "OpenAI GPT-5 with image support via GitHub Copilot API",
12
+ description: 'OpenAI GPT-5 with image support via GitHub Copilot API',
13
13
  },
14
- "gpt-5-mini": {
14
+ 'gpt-5-mini': {
15
15
  supportsImages: true,
16
16
  supportsAudio: false,
17
17
  maxContextTokens: 128000,
18
- description: "OpenAI GPT-5 Mini with image support via GitHub Copilot API",
18
+ description: 'OpenAI GPT-5 Mini with image support via GitHub Copilot API',
19
19
  },
20
- "gpt-4.1-copilot": {
20
+ 'gpt-4.1-copilot': {
21
21
  supportsImages: true,
22
22
  supportsAudio: false,
23
23
  maxContextTokens: 128000,
24
- description: "OpenAI GPT-4.1 with image support via GitHub Copilot API",
24
+ description: 'OpenAI GPT-4.1 with image support via GitHub Copilot API',
25
25
  },
26
- "claude-opus-4.1": {
26
+ 'claude-opus-4.1': {
27
27
  supportsImages: false,
28
28
  supportsAudio: false,
29
29
  maxContextTokens: 200000,
30
- description: "Anthropic Claude Opus 4.1 - Text only via GitHub Copilot API",
30
+ description: 'Anthropic Claude Opus 4.1 - Text only via GitHub Copilot API',
31
31
  },
32
- "claude-3.5-sonnet": {
32
+ 'claude-3.5-sonnet': {
33
33
  supportsImages: false,
34
34
  supportsAudio: false,
35
35
  maxContextTokens: 200000,
36
- description: "Anthropic Claude 3.5 Sonnet - Text only via GitHub Copilot API",
36
+ description: 'Anthropic Claude 3.5 Sonnet - Text only via GitHub Copilot API',
37
37
  },
38
- "gemini-2.5-pro": {
38
+ 'gemini-2.5-pro': {
39
39
  supportsImages: true,
40
40
  supportsAudio: false,
41
41
  maxContextTokens: 1000000,
42
- description: "Google Gemini 2.5 Pro with image support via GitHub Copilot API",
42
+ description: 'Google Gemini 2.5 Pro with image support via GitHub Copilot API',
43
43
  },
44
- "gemini-2.0-flash": {
44
+ 'gemini-2.0-flash': {
45
45
  supportsImages: true,
46
46
  supportsAudio: true,
47
47
  maxContextTokens: 1000000,
48
- description: "Google Gemini 2.0 Flash with multimodal support via GitHub Copilot API",
48
+ description: 'Google Gemini 2.0 Flash with multimodal support via GitHub Copilot API',
49
49
  },
50
- "grok-code-fast-1": {
50
+ 'grok-code-fast-1': {
51
51
  supportsImages: false,
52
52
  supportsAudio: false,
53
53
  maxContextTokens: 128000,
54
- description: "xAI Grok Code Fast 1 - Text only via GitHub Copilot API",
54
+ description: 'xAI Grok Code Fast 1 - Text only via GitHub Copilot API',
55
55
  },
56
56
  o3: {
57
57
  supportsImages: false,
58
58
  supportsAudio: false,
59
59
  maxContextTokens: 200000,
60
- description: "OpenAI o3 - Text only via GitHub Copilot API",
60
+ description: 'OpenAI o3 - Text only via GitHub Copilot API',
61
61
  },
62
- "o3-mini": {
62
+ 'o3-mini': {
63
63
  supportsImages: false,
64
64
  supportsAudio: false,
65
65
  maxContextTokens: 128000,
66
- description: "OpenAI o3-mini - Text only via GitHub Copilot API",
66
+ description: 'OpenAI o3-mini - Text only via GitHub Copilot API',
67
67
  },
68
68
  };
69
69
  function validateModelCapabilities(model, includeImage, includeAudio) {
@@ -85,11 +85,11 @@ function validateModelCapabilities(model, includeImage, includeAudio) {
85
85
  isValid = false;
86
86
  errorMessage = `Model ${model} does not support audio input. Please disable audio upload or choose a different model (e.g., GPT-5, Gemini 2.5 Pro).`;
87
87
  }
88
- if (model.includes("claude") && (includeImage || includeAudio)) {
89
- warnings.push("Claude models typically work best with text-only input via GitHub Copilot API.");
88
+ if (model.includes('claude') && (includeImage || includeAudio)) {
89
+ warnings.push('Claude models typically work best with text-only input via GitHub Copilot API.');
90
90
  }
91
- if (model.includes("grok") && (includeImage || includeAudio)) {
92
- warnings.push("Grok models are optimized for coding tasks and work best with text input.");
91
+ if (model.includes('grok') && (includeImage || includeAudio)) {
92
+ warnings.push('Grok models are optimized for coding tasks and work best with text input.');
93
93
  }
94
94
  return {
95
95
  isValid,
@@ -1,10 +1,10 @@
1
- import { IExecuteFunctions } from "n8n-workflow";
2
- import { CopilotResponse } from "../../../shared/utils/GitHubCopilotApiUtils";
1
+ import { IExecuteFunctions } from 'n8n-workflow';
2
+ import { CopilotResponse } from '../../../shared/utils/GitHubCopilotApiUtils';
3
3
  export { CopilotResponse };
4
4
  export interface ChatMessage {
5
- role: "system" | "user" | "assistant";
5
+ role: 'system' | 'user' | 'assistant';
6
6
  content: string | Array<ChatMessageContent>;
7
- type?: "file";
7
+ type?: 'file';
8
8
  }
9
9
  export interface ChatMessageContent {
10
10
  type: string;
@@ -16,7 +16,7 @@ export interface ChatMessageContent {
16
16
  export interface FileProcessOptions {
17
17
  context: IExecuteFunctions;
18
18
  itemIndex: number;
19
- source: "manual" | "url" | "binary";
19
+ source: 'manual' | 'url' | 'binary';
20
20
  filePath?: string;
21
21
  url?: string;
22
22
  binaryProperty?: string;
@@ -1,4 +1,4 @@
1
- import { INodeType, INodeTypeDescription, ISupplyDataFunctions, SupplyData, ILoadOptionsFunctions, INodePropertyOptions } from "n8n-workflow";
1
+ import { INodeType, INodeTypeDescription, ISupplyDataFunctions, SupplyData, ILoadOptionsFunctions, INodePropertyOptions } from 'n8n-workflow';
2
2
  export declare class GitHubCopilotChatModel implements INodeType {
3
3
  description: INodeTypeDescription;
4
4
  methods: {
@@ -14,12 +14,17 @@ class GitHubCopilotChatOpenAI extends openai_1.ChatOpenAI {
14
14
  this.context = context;
15
15
  this.options = options;
16
16
  }
17
+ invocationParams(options) {
18
+ const params = super.invocationParams(options);
19
+ params.model = this.model;
20
+ return params;
21
+ }
17
22
  async _generate(messages, options) {
18
23
  var _a;
19
24
  if (!messages || messages.length === 0) {
20
- throw new Error("No messages provided for generation");
25
+ throw new Error('No messages provided for generation');
21
26
  }
22
- let copilotMessages = messages.map(msg => {
27
+ let copilotMessages = messages.map((msg) => {
23
28
  let role;
24
29
  switch (msg._getType()) {
25
30
  case 'human':
@@ -55,7 +60,7 @@ class GitHubCopilotChatOpenAI extends openai_1.ChatOpenAI {
55
60
  };
56
61
  });
57
62
  if (this.options.systemMessage && this.options.systemMessage.trim()) {
58
- const hasSystemMessage = copilotMessages.some(msg => msg.role === 'system');
63
+ const hasSystemMessage = copilotMessages.some((msg) => msg.role === 'system');
59
64
  if (!hasSystemMessage) {
60
65
  copilotMessages.unshift({
61
66
  role: 'system',
@@ -64,7 +69,7 @@ class GitHubCopilotChatOpenAI extends openai_1.ChatOpenAI {
64
69
  console.log(`🔧 Added system message from options`);
65
70
  }
66
71
  }
67
- const validMessages = copilotMessages.filter(msg => {
72
+ const validMessages = copilotMessages.filter((msg) => {
68
73
  if (!msg.content || msg.content.trim() === '') {
69
74
  console.warn(`⚠️ Filtering out empty message with role: ${msg.role}`);
70
75
  return false;
@@ -72,7 +77,7 @@ class GitHubCopilotChatOpenAI extends openai_1.ChatOpenAI {
72
77
  return true;
73
78
  });
74
79
  if (validMessages.length === 0) {
75
- throw new Error("No valid messages after filtering empty content");
80
+ throw new Error('No valid messages after filtering empty content');
76
81
  }
77
82
  const requestBody = {
78
83
  model: this.modelName || this.model,
@@ -84,7 +89,7 @@ class GitHubCopilotChatOpenAI extends openai_1.ChatOpenAI {
84
89
  };
85
90
  if (this.options.tools && this.options.tools.length > 0) {
86
91
  requestBody.tools = this.options.tools;
87
- requestBody.tool_choice = this.options.tool_choice || "auto";
92
+ requestBody.tool_choice = this.options.tool_choice || 'auto';
88
93
  console.log(`🔧 Request includes ${this.options.tools.length} tools`);
89
94
  }
90
95
  const startTime = Date.now();
@@ -94,27 +99,31 @@ class GitHubCopilotChatOpenAI extends openai_1.ChatOpenAI {
94
99
  const latency = endTime - startTime;
95
100
  console.log(`⏱️ GitHub Copilot API call completed in ${latency}ms`);
96
101
  if (!response.choices || response.choices.length === 0) {
97
- throw new Error("GitHub Copilot API returned no choices in response");
102
+ throw new Error('GitHub Copilot API returned no choices in response');
98
103
  }
99
104
  const choice = response.choices[0];
100
105
  if (!choice.message) {
101
- throw new Error("GitHub Copilot API returned choice without message");
106
+ throw new Error('GitHub Copilot API returned choice without message');
102
107
  }
103
108
  const langchainMessage = {
104
109
  _getType: () => choice.message.role,
105
- content: choice.message.content || "",
110
+ content: choice.message.content || '',
106
111
  tool_calls: choice.message.tool_calls,
107
112
  };
108
113
  console.log(`📝 Response: role=${choice.message.role}, content_length=${((_a = choice.message.content) === null || _a === void 0 ? void 0 : _a.length) || 0}, finish_reason=${choice.finish_reason}`);
109
114
  return {
110
- generations: [[{
111
- text: choice.message.content || "",
112
- generationInfo: {
113
- finish_reason: choice.finish_reason,
114
- },
115
- message: langchainMessage,
116
- }]],
117
- tokenUsage: response.usage,
115
+ generations: [
116
+ {
117
+ text: choice.message.content || '',
118
+ generationInfo: {
119
+ finish_reason: choice.finish_reason,
120
+ },
121
+ message: langchainMessage,
122
+ },
123
+ ],
124
+ llmOutput: {
125
+ tokenUsage: response.usage,
126
+ },
118
127
  };
119
128
  }
120
129
  catch (error) {
@@ -128,146 +137,146 @@ class GitHubCopilotChatOpenAI extends openai_1.ChatOpenAI {
128
137
  class GitHubCopilotChatModel {
129
138
  constructor() {
130
139
  this.description = {
131
- displayName: "GitHub Copilot Chat Model",
132
- name: "gitHubCopilotChatModel",
133
- icon: "file:../../shared/icons/copilot.svg",
134
- group: ["transform"],
140
+ displayName: 'GitHub Copilot Chat Model',
141
+ name: 'gitHubCopilotChatModel',
142
+ icon: 'file:../../shared/icons/copilot.svg',
143
+ group: ['transform'],
135
144
  version: 1,
136
- description: "GitHub Copilot chat model for AI workflows with full support for tools and function calling - access GPT-5, Claude, Gemini and more using your Copilot subscription",
145
+ description: 'GitHub Copilot chat model for AI workflows with full support for tools and function calling - access GPT-5, Claude, Gemini and more using your Copilot subscription',
137
146
  defaults: {
138
- name: "GitHub Copilot Chat Model",
147
+ name: 'GitHub Copilot Chat Model',
139
148
  },
140
149
  codex: {
141
- categories: ["AI"],
150
+ categories: ['AI'],
142
151
  subcategories: {
143
- AI: ["Language Models", "Root Nodes"],
144
- "Language Models": ["Chat Models (Recommended)"],
152
+ AI: ['Language Models', 'Root Nodes'],
153
+ 'Language Models': ['Chat Models (Recommended)'],
145
154
  },
146
155
  resources: {
147
156
  primaryDocumentation: [
148
157
  {
149
- url: "https://docs.github.com/copilot/using-github-copilot/using-github-copilot-chat",
158
+ url: 'https://docs.github.com/copilot/using-github-copilot/using-github-copilot-chat',
150
159
  },
151
160
  ],
152
161
  },
153
162
  },
154
163
  inputs: [],
155
- outputs: ["ai_languageModel"],
156
- outputNames: ["Model"],
164
+ outputs: ['ai_languageModel'],
165
+ outputNames: ['Model'],
157
166
  credentials: [
158
167
  {
159
- name: "githubCopilotApi",
168
+ name: 'githubCopilotApi',
160
169
  required: true,
161
170
  },
162
171
  ],
163
172
  properties: [
164
173
  ...ModelProperties_1.CHAT_MODEL_PROPERTIES,
165
174
  {
166
- displayName: "Options",
167
- name: "options",
168
- placeholder: "Add Option",
169
- description: "Additional options for the GitHub Copilot model",
170
- type: "collection",
175
+ displayName: 'Options',
176
+ name: 'options',
177
+ placeholder: 'Add Option',
178
+ description: 'Additional options for the GitHub Copilot model',
179
+ type: 'collection',
171
180
  default: {},
172
181
  options: [
173
182
  {
174
- displayName: "Temperature",
175
- name: "temperature",
183
+ displayName: 'Temperature',
184
+ name: 'temperature',
176
185
  default: 0.7,
177
186
  typeOptions: { maxValue: 2, minValue: 0, numberPrecision: 1 },
178
- description: "Controls randomness in output. Lower values make responses more focused.",
179
- type: "number",
187
+ description: 'Controls randomness in output. Lower values make responses more focused.',
188
+ type: 'number',
180
189
  },
181
190
  {
182
- displayName: "Maximum Number of Tokens",
183
- name: "maxTokens",
191
+ displayName: 'Maximum Number of Tokens',
192
+ name: 'maxTokens',
184
193
  default: 1000,
185
- description: "The maximum number of tokens to generate",
186
- type: "number",
194
+ description: 'The maximum number of tokens to generate',
195
+ type: 'number',
187
196
  typeOptions: {
188
197
  maxValue: 32768,
189
198
  },
190
199
  },
191
200
  {
192
- displayName: "Top P",
193
- name: "topP",
201
+ displayName: 'Top P',
202
+ name: 'topP',
194
203
  default: 1,
195
204
  typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 2 },
196
- description: "Controls diversity of output by nucleus sampling",
197
- type: "number",
205
+ description: 'Controls diversity of output by nucleus sampling',
206
+ type: 'number',
198
207
  },
199
208
  {
200
- displayName: "Enable Streaming",
201
- name: "enableStreaming",
202
- type: "boolean",
209
+ displayName: 'Enable Streaming',
210
+ name: 'enableStreaming',
211
+ type: 'boolean',
203
212
  default: false,
204
- description: "Enable streaming responses for real-time output (experimental)",
213
+ description: 'Enable streaming responses for real-time output (experimental)',
205
214
  },
206
215
  {
207
- displayName: "System Message",
208
- name: "systemMessage",
209
- type: "string",
210
- default: "",
211
- description: "System message to set the behavior of the assistant",
216
+ displayName: 'System Message',
217
+ name: 'systemMessage',
218
+ type: 'string',
219
+ default: '',
220
+ description: 'System message to set the behavior of the assistant',
212
221
  typeOptions: {
213
222
  rows: 3,
214
223
  },
215
224
  },
216
225
  {
217
- displayName: "Auto Retry on 403 Error",
218
- name: "enableRetry",
219
- type: "boolean",
226
+ displayName: 'Auto Retry on 403 Error',
227
+ name: 'enableRetry',
228
+ type: 'boolean',
220
229
  default: true,
221
- description: "Automatically retry requests when hitting TPM (Transactions Per Minute) quota limits (HTTP 403)",
230
+ description: 'Automatically retry requests when hitting TPM (Transactions Per Minute) quota limits (HTTP 403)',
222
231
  },
223
232
  {
224
- displayName: "Request Timeout (seconds)",
225
- name: "timeout",
226
- type: "number",
233
+ displayName: 'Request Timeout (seconds)',
234
+ name: 'timeout',
235
+ type: 'number',
227
236
  default: 120,
228
- description: "Maximum time to wait for API response (in seconds)",
237
+ description: 'Maximum time to wait for API response (in seconds)',
229
238
  typeOptions: {
230
239
  minValue: 10,
231
240
  maxValue: 300,
232
241
  },
233
242
  },
234
243
  {
235
- displayName: "Tools (Function Calling)",
236
- name: "tools",
237
- type: "string",
238
- default: "",
239
- description: "Optional: Array of tools/functions available to the model (OpenAI format). Leave empty if not using function calling.",
244
+ displayName: 'Tools (Function Calling)',
245
+ name: 'tools',
246
+ type: 'string',
247
+ default: '',
248
+ description: 'Optional: Array of tools/functions available to the model (OpenAI format). Leave empty if not using function calling.',
240
249
  hint: "JSON array of tool definitions in OpenAI format. Leave this field empty if you don't need function calling.",
241
250
  typeOptions: {
242
251
  rows: 6,
243
252
  },
244
253
  },
245
254
  {
246
- displayName: "Tool Choice",
247
- name: "tool_choice",
248
- type: "options",
255
+ displayName: 'Tool Choice',
256
+ name: 'tool_choice',
257
+ type: 'options',
249
258
  options: [
250
259
  {
251
- name: "Auto",
252
- value: "auto",
253
- description: "Let the model decide when to use tools",
260
+ name: 'Auto',
261
+ value: 'auto',
262
+ description: 'Let the model decide when to use tools',
254
263
  },
255
264
  {
256
- name: "Required",
257
- value: "required",
258
- description: "Force the model to use at least one tool",
265
+ name: 'Required',
266
+ value: 'required',
267
+ description: 'Force the model to use at least one tool',
259
268
  },
260
269
  {
261
- name: "None",
262
- value: "none",
263
- description: "Disable tool usage",
270
+ name: 'None',
271
+ value: 'none',
272
+ description: 'Disable tool usage',
264
273
  },
265
274
  ],
266
- default: "auto",
267
- description: "Control how the model uses tools",
275
+ default: 'auto',
276
+ description: 'Control how the model uses tools',
268
277
  displayOptions: {
269
278
  show: {
270
- tools: ["/.+/"],
279
+ tools: ['/.+/'],
271
280
  },
272
281
  },
273
282
  },
@@ -284,10 +293,10 @@ class GitHubCopilotChatModel {
284
293
  };
285
294
  }
286
295
  async supplyData(itemIndex) {
287
- let model = this.getNodeParameter("model", itemIndex);
288
- if (model === "__manual__") {
289
- const customModel = this.getNodeParameter("customModel", itemIndex);
290
- if (!customModel || customModel.trim() === "") {
296
+ let model = this.getNodeParameter('model', itemIndex);
297
+ if (model === '__manual__') {
298
+ const customModel = this.getNodeParameter('customModel', itemIndex);
299
+ if (!customModel || customModel.trim() === '') {
291
300
  throw new Error("Custom model name is required when selecting '✏️ Enter Custom Model Name'");
292
301
  }
293
302
  model = customModel;
@@ -296,21 +305,21 @@ class GitHubCopilotChatModel {
296
305
  else {
297
306
  console.log(`✅ Using model from list: ${model}`);
298
307
  }
299
- const options = this.getNodeParameter("options", itemIndex, {});
308
+ const options = this.getNodeParameter('options', itemIndex, {});
300
309
  const modelInfo = GitHubCopilotModels_1.GitHubCopilotModelsManager.getModelByValue(model);
301
- const credentials = (await this.getCredentials("githubCopilotApi"));
310
+ const credentials = (await this.getCredentials('githubCopilotApi'));
302
311
  const token = credentials.token;
303
312
  if (!token) {
304
- console.error("❌ Available credential properties:", Object.keys(credentials));
305
- throw new Error("GitHub Copilot: No token found in credentials. Available properties: " +
306
- Object.keys(credentials).join(", "));
313
+ console.error('❌ Available credential properties:', Object.keys(credentials));
314
+ throw new Error('GitHub Copilot: No token found in credentials. Available properties: ' +
315
+ Object.keys(credentials).join(', '));
307
316
  }
308
- const tokenPrefix = token.substring(0, Math.min(4, token.indexOf("_") + 1)) || token.substring(0, 4);
317
+ const tokenPrefix = token.substring(0, Math.min(4, token.indexOf('_') + 1)) || token.substring(0, 4);
309
318
  const tokenSuffix = token.substring(Math.max(0, token.length - 5));
310
319
  console.log(`🔍 GitHub Copilot ChatModel OAuth2 Debug: Using token ${tokenPrefix}...${tokenSuffix}`);
311
- if (!token.startsWith("gho_") &&
312
- !token.startsWith("ghu_") &&
313
- !token.startsWith("github_pat_")) {
320
+ if (!token.startsWith('gho_') &&
321
+ !token.startsWith('ghu_') &&
322
+ !token.startsWith('github_pat_')) {
314
323
  console.warn(`⚠️ Unexpected token format: ${tokenPrefix}...${tokenSuffix}. Trying API call anyway.`);
315
324
  }
316
325
  const safeModel = modelInfo ? model : GitHubCopilotModels_1.DEFAULT_MODELS.GENERAL;
@@ -331,7 +340,7 @@ class GitHubCopilotChatModel {
331
340
  }
332
341
  }
333
342
  catch (error) {
334
- console.log(`⚠️ Failed to parse tools JSON: ${error instanceof Error ? error.message : "Unknown error"}`);
343
+ console.log(`⚠️ Failed to parse tools JSON: ${error instanceof Error ? error.message : 'Unknown error'}`);
335
344
  }
336
345
  }
337
346
  const modelConfig = {
@@ -342,22 +351,22 @@ class GitHubCopilotChatModel {
342
351
  maxRetries: options.enableRetry !== false ? options.maxRetries || 3 : 0,
343
352
  ...(parsedTools.length > 0 && {
344
353
  tools: parsedTools,
345
- tool_choice: options.tool_choice || "auto",
354
+ tool_choice: options.tool_choice || 'auto',
346
355
  }),
347
356
  configuration: {
348
357
  baseURL: GitHubCopilotEndpoints_1.GITHUB_COPILOT_API.BASE_URL,
349
358
  apiKey: token,
350
359
  defaultHeaders: {
351
- "User-Agent": "GitHubCopilotChat/1.0.0 n8n/3.10.1",
352
- Accept: "application/json",
353
- "Editor-Version": `vscode/${minVSCodeVersion}`,
354
- "Editor-Plugin-Version": "copilot-chat/0.12.0",
355
- "X-Request-Id": `n8n-chatmodel-${Date.now()}-${Math.random().toString(36).substring(7)}`,
360
+ 'User-Agent': 'GitHubCopilotChat/1.0.0 n8n/3.10.1',
361
+ Accept: 'application/json',
362
+ 'Editor-Version': `vscode/${minVSCodeVersion}`,
363
+ 'Editor-Plugin-Version': 'copilot-chat/0.12.0',
364
+ 'X-Request-Id': `n8n-chatmodel-${Date.now()}-${Math.random().toString(36).substring(7)}`,
356
365
  ...additionalHeaders,
357
366
  ...(options.enableVision &&
358
367
  (safeModelInfo === null || safeModelInfo === void 0 ? void 0 : safeModelInfo.capabilities.vision) && {
359
- "Copilot-Vision-Request": "true",
360
- "Copilot-Media-Request": "true",
368
+ 'Copilot-Vision-Request': 'true',
369
+ 'Copilot-Media-Request': 'true',
361
370
  }),
362
371
  },
363
372
  },
@@ -1,4 +1,4 @@
1
- import { IExecuteFunctions, INodeExecutionData, INodeType, INodeTypeDescription, ILoadOptionsFunctions, INodePropertyOptions } from "n8n-workflow";
1
+ import { IExecuteFunctions, INodeExecutionData, INodeType, INodeTypeDescription, ILoadOptionsFunctions, INodePropertyOptions } from 'n8n-workflow';
2
2
  export declare class GitHubCopilotEmbeddings implements INodeType {
3
3
  description: INodeTypeDescription;
4
4
  methods: {