n8n-nodes-github-copilot 4.4.17 → 4.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,2 @@
1
+ import { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
2
+ export declare function executeChatCompletion(context: IExecuteFunctions, items: INodeExecutionData[], i: number): Promise<Record<string, any>>;
@@ -0,0 +1,243 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.executeChatCompletion = executeChatCompletion;
4
+ const n8n_workflow_1 = require("n8n-workflow");
5
+ const utils_1 = require("../../GitHubCopilotChatAPI/utils");
6
+ const GitHubCopilotEndpoints_1 = require("../../../shared/utils/GitHubCopilotEndpoints");
7
+ const GitHubCopilotModels_1 = require("../../../shared/models/GitHubCopilotModels");
8
+ const DynamicModelsManager_1 = require("../../../shared/utils/DynamicModelsManager");
9
+ const parseMessages_1 = require("./parseMessages");
10
+ const MODEL_MAPPING = {
11
+ 'gpt-4': 'gpt-4o',
12
+ 'gpt-4o': 'gpt-4o',
13
+ 'gpt-4o-mini': 'gpt-4o-mini',
14
+ 'gpt-4-turbo': 'gpt-4o',
15
+ 'claude-3-5-sonnet': 'claude-3.5-sonnet',
16
+ 'claude-3.5-sonnet-20241022': 'claude-3.5-sonnet',
17
+ o1: 'o1',
18
+ 'o1-preview': 'o1-preview',
19
+ 'o1-mini': 'o1-mini',
20
+ };
21
+ function resolveModel(context, i) {
22
+ const modelSource = context.getNodeParameter('modelSource', i, 'fromList');
23
+ if (modelSource === 'custom') {
24
+ const custom = context.getNodeParameter('customModel', i);
25
+ if (!(custom === null || custom === void 0 ? void 0 : custom.trim()))
26
+ throw new Error("Custom model name is required when using 'Custom (Manual Entry)' mode");
27
+ console.log(`🔧 Using custom model: ${custom}`);
28
+ return custom;
29
+ }
30
+ const selected = context.getNodeParameter('model', i);
31
+ if (selected === '__manual__') {
32
+ const manual = context.getNodeParameter('customModel', i);
33
+ if (!(manual === null || manual === void 0 ? void 0 : manual.trim()))
34
+ throw new Error("Custom model name is required when selecting '✏️ Enter Custom Model Name'");
35
+ console.log(`✏️ Using manually entered model: ${manual}`);
36
+ return manual;
37
+ }
38
+ console.log(`📋 Using model from list: ${selected}`);
39
+ return selected;
40
+ }
41
+ function detectVisionContent(messages) {
42
+ for (const msg of messages) {
43
+ const content = msg.content;
44
+ const type = msg.type;
45
+ if (type === 'file' || type === 'image')
46
+ return true;
47
+ if (typeof content === 'string') {
48
+ const trimmed = content.trim();
49
+ const isDataUrl = /^data:image\/[a-z]+;base64,[A-Za-z0-9+\/=]{100,}/i.test(trimmed);
50
+ if (isDataUrl || trimmed.startsWith('copilot-file://'))
51
+ return true;
52
+ }
53
+ else if (Array.isArray(content)) {
54
+ for (const part of content) {
55
+ if ((part === null || part === void 0 ? void 0 : part.type) === 'image_url' || (part === null || part === void 0 ? void 0 : part.type) === 'image' || (part === null || part === void 0 ? void 0 : part.image_url) || (part === null || part === void 0 ? void 0 : part.type) === 'file') {
56
+ return true;
57
+ }
58
+ }
59
+ }
60
+ }
61
+ return false;
62
+ }
63
+ async function resolveVisionModel(context, copilotModel, advancedOptions, i) {
64
+ var _a, _b;
65
+ const credentials = await context.getCredentials('githubCopilotApi');
66
+ const oauthToken = credentials.oauthToken;
67
+ let supportsVision = DynamicModelsManager_1.DynamicModelsManager.modelSupportsVision(oauthToken, copilotModel);
68
+ if (supportsVision === null) {
69
+ const modelInfo = GitHubCopilotModels_1.GitHubCopilotModelsManager.getModelByValue(copilotModel);
70
+ supportsVision = !!(((_a = modelInfo === null || modelInfo === void 0 ? void 0 : modelInfo.capabilities) === null || _a === void 0 ? void 0 : _a.vision) || ((_b = modelInfo === null || modelInfo === void 0 ? void 0 : modelInfo.capabilities) === null || _b === void 0 ? void 0 : _b.multimodal));
71
+ console.log(`👁️ Vision check for ${copilotModel}: static list, supported=${supportsVision}`);
72
+ }
73
+ else {
74
+ console.log(`👁️ Vision check for ${copilotModel}: API cache, supported=${supportsVision}`);
75
+ }
76
+ if (supportsVision)
77
+ return copilotModel;
78
+ const enableFallback = advancedOptions.enableVisionFallback || false;
79
+ if (!enableFallback) {
80
+ throw new n8n_workflow_1.NodeOperationError(context.getNode(), `Model ${copilotModel} does not support vision. Enable "Vision Fallback" in Advanced Options and select a vision-capable model.`, { itemIndex: i });
81
+ }
82
+ const fallbackRaw = advancedOptions.visionFallbackModel;
83
+ const fallback = fallbackRaw === '__manual__'
84
+ ? advancedOptions.visionFallbackCustomModel
85
+ : fallbackRaw;
86
+ if (!(fallback === null || fallback === void 0 ? void 0 : fallback.trim())) {
87
+ throw new n8n_workflow_1.NodeOperationError(context.getNode(), 'Vision fallback enabled but no fallback model was selected or provided.', { itemIndex: i });
88
+ }
89
+ console.log(`👁️ Model ${copilotModel} lacks vision - falling back to: ${fallback}`);
90
+ return fallback;
91
+ }
92
+ function parseResponseFormat(requestBodyFromJson, advancedOptions) {
93
+ if (requestBodyFromJson === null || requestBodyFromJson === void 0 ? void 0 : requestBodyFromJson.response_format) {
94
+ const rf = requestBodyFromJson.response_format;
95
+ console.log('📋 response_format from JSON body:', JSON.stringify(rf));
96
+ return rf;
97
+ }
98
+ const uiValue = advancedOptions.response_format || 'text';
99
+ if (uiValue && uiValue !== 'text') {
100
+ console.log('📋 response_format from UI:', uiValue);
101
+ return { type: uiValue };
102
+ }
103
+ if (advancedOptions.response_format && typeof advancedOptions.response_format === 'string') {
104
+ try {
105
+ const parsed = JSON.parse(advancedOptions.response_format);
106
+ console.log('📋 response_format from advancedOptions:', JSON.stringify(parsed));
107
+ return parsed;
108
+ }
109
+ catch {
110
+ console.log('⚠️ Failed to parse response_format from advancedOptions');
111
+ }
112
+ }
113
+ return undefined;
114
+ }
115
+ function cleanJsonFromMarkdown(content) {
116
+ const trimmed = content.trim();
117
+ const match = trimmed.match(/^```(?:json)?\s*\n([\s\S]*?)\n```\s*$/);
118
+ if (match === null || match === void 0 ? void 0 : match[1])
119
+ return match[1].trim();
120
+ return trimmed;
121
+ }
122
+ function buildOpenAIResponse(response, model, response_format) {
123
+ const result = {
124
+ id: response.id || `chatcmpl-${Date.now()}`,
125
+ object: response.object || 'chat.completion',
126
+ created: response.created || Math.floor(Date.now() / 1000),
127
+ model,
128
+ choices: response.choices.map((choice, idx) => {
129
+ var _a;
130
+ console.log(`\n📝 Processing choice ${idx}: role=${choice.message.role}`);
131
+ let processedContent = choice.message.content;
132
+ if (choice.message.content != null && (response_format === null || response_format === void 0 ? void 0 : response_format.type) === 'json_object') {
133
+ processedContent = cleanJsonFromMarkdown(choice.message.content);
134
+ }
135
+ const choiceObj = {
136
+ index: choice.index,
137
+ message: {
138
+ role: choice.message.role,
139
+ content: processedContent,
140
+ refusal: choice.message.refusal || null,
141
+ annotations: choice.message.annotations || [],
142
+ },
143
+ logprobs: choice.logprobs || null,
144
+ finish_reason: choice.finish_reason,
145
+ };
146
+ if ((_a = choice.message.tool_calls) === null || _a === void 0 ? void 0 : _a.length) {
147
+ choiceObj.message.tool_calls = choice.message.tool_calls;
148
+ }
149
+ return choiceObj;
150
+ }),
151
+ usage: response.usage || { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 },
152
+ };
153
+ if (response.system_fingerprint) {
154
+ result.system_fingerprint = response.system_fingerprint;
155
+ }
156
+ return result;
157
+ }
158
+ async function executeChatCompletion(context, items, i) {
159
+ var _a, _b, _c, _d, _e, _f;
160
+ const model = resolveModel(context, i);
161
+ const { messages, requestBodyFromJson } = await (0, parseMessages_1.parseMessages)(context, items, i);
162
+ const advancedOptions = context.getNodeParameter('advancedOptions', i, {});
163
+ let max_tokens = advancedOptions.max_tokens || 4096;
164
+ if (!max_tokens || max_tokens <= 0 || isNaN(max_tokens))
165
+ max_tokens = 4096;
166
+ const temperature = (_a = advancedOptions.temperature) !== null && _a !== void 0 ? _a : 1;
167
+ const top_p = (_b = advancedOptions.top_p) !== null && _b !== void 0 ? _b : 1;
168
+ const frequency_penalty = (_c = advancedOptions.frequency_penalty) !== null && _c !== void 0 ? _c : 0;
169
+ const presence_penalty = (_d = advancedOptions.presence_penalty) !== null && _d !== void 0 ? _d : 0;
170
+ const seed = advancedOptions.seed || 0;
171
+ const stream = (_e = advancedOptions.stream) !== null && _e !== void 0 ? _e : false;
172
+ const user = advancedOptions.user || undefined;
173
+ const stop = advancedOptions.stop || undefined;
174
+ let parsedTools = [];
175
+ const tools = advancedOptions.tools;
176
+ if (tools) {
177
+ try {
178
+ if (Array.isArray(tools) && tools.length > 0) {
179
+ parsedTools = tools;
180
+ }
181
+ else if (typeof tools === 'string' && tools.trim()) {
182
+ const parsed = JSON.parse(tools);
183
+ if (Array.isArray(parsed) && parsed.length > 0)
184
+ parsedTools = parsed;
185
+ }
186
+ }
187
+ catch {
188
+ console.log('⚠️ Failed to parse tools, ignoring');
189
+ }
190
+ }
191
+ const response_format = parseResponseFormat(requestBodyFromJson, advancedOptions);
192
+ let copilotModel = MODEL_MAPPING[model] || model;
193
+ const hasVisionContent = detectVisionContent(messages);
194
+ if (hasVisionContent) {
195
+ copilotModel = await resolveVisionModel(context, copilotModel, advancedOptions, i);
196
+ }
197
+ const requestBody = {
198
+ model: copilotModel,
199
+ messages,
200
+ stream,
201
+ temperature,
202
+ max_tokens,
203
+ };
204
+ if (top_p !== 1)
205
+ requestBody.top_p = top_p;
206
+ if (frequency_penalty !== 0)
207
+ requestBody.frequency_penalty = frequency_penalty;
208
+ if (presence_penalty !== 0)
209
+ requestBody.presence_penalty = presence_penalty;
210
+ if (user)
211
+ requestBody.user = user;
212
+ if (stop) {
213
+ try {
214
+ requestBody.stop = JSON.parse(stop);
215
+ }
216
+ catch {
217
+ requestBody.stop = stop;
218
+ }
219
+ }
220
+ if (parsedTools.length > 0) {
221
+ requestBody.tools = parsedTools;
222
+ const tool_choice = advancedOptions.tool_choice || 'auto';
223
+ if (tool_choice !== 'auto')
224
+ requestBody.tool_choice = tool_choice;
225
+ }
226
+ if (response_format)
227
+ requestBody.response_format = response_format;
228
+ if (seed > 0)
229
+ requestBody.seed = seed;
230
+ console.log(`🚀 Sending request: model=${copilotModel}, messages=${messages.length}, vision=${hasVisionContent}`);
231
+ let response;
232
+ try {
233
+ response = await (0, utils_1.makeApiRequest)(context, GitHubCopilotEndpoints_1.GITHUB_COPILOT_API.ENDPOINTS.CHAT_COMPLETIONS, requestBody, hasVisionContent);
234
+ }
235
+ catch (error) {
236
+ const errorMsg = error instanceof Error ? error.message : String(error);
237
+ throw new n8n_workflow_1.NodeOperationError(context.getNode(), `${errorMsg}\n\n🤖 Model used: ${copilotModel}`);
238
+ }
239
+ const retriesUsed = ((_f = response._retryMetadata) === null || _f === void 0 ? void 0 : _f.retries) || 0;
240
+ if (retriesUsed > 0)
241
+ console.log(`ℹ️ Request completed with ${retriesUsed} retry(ies)`);
242
+ return buildOpenAIResponse(response, model, response_format);
243
+ }
@@ -0,0 +1,2 @@
1
+ import { IDataObject, IExecuteFunctions } from 'n8n-workflow';
2
+ export declare function executeListModels(context: IExecuteFunctions, i: number): Promise<IDataObject>;
@@ -0,0 +1,15 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.executeListModels = executeListModels;
4
+ const GitHubCopilotEndpoints_1 = require("../../../shared/utils/GitHubCopilotEndpoints");
5
+ const modelsApi_1 = require("../utils/modelsApi");
6
+ async function executeListModels(context, i) {
7
+ const filter = context.getNodeParameter('modelsFilter', i, 'enabled');
8
+ const credentials = await context.getCredentials('githubCopilotApi', i);
9
+ const token = credentials.token;
10
+ if (!token)
11
+ throw new Error(GitHubCopilotEndpoints_1.GITHUB_COPILOT_API.ERRORS.CREDENTIALS_REQUIRED);
12
+ if (!GitHubCopilotEndpoints_1.GitHubCopilotEndpoints.validateToken(token))
13
+ throw new Error(GitHubCopilotEndpoints_1.GITHUB_COPILOT_API.ERRORS.INVALID_TOKEN);
14
+ return (0, modelsApi_1.fetchModelsOpenAIFormat)(token, filter);
15
+ }
@@ -0,0 +1,17 @@
1
+ import { IDataObject, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
2
+ export interface ParsedMessages {
3
+ messages: Array<{
4
+ role: string;
5
+ content: any;
6
+ }>;
7
+ requestBodyFromJson: IDataObject | undefined;
8
+ }
9
+ export declare function validateMessages(messages: Array<{
10
+ role: string;
11
+ content: any;
12
+ }>, context: IExecuteFunctions, itemIndex: number): void;
13
+ export declare function normalizeMessages(messages: Array<{
14
+ role: string;
15
+ content: any;
16
+ }>): void;
17
+ export declare function parseMessages(context: IExecuteFunctions, items: INodeExecutionData[], i: number): Promise<ParsedMessages>;
@@ -0,0 +1,149 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.validateMessages = validateMessages;
4
+ exports.normalizeMessages = normalizeMessages;
5
+ exports.parseMessages = parseMessages;
6
+ const n8n_workflow_1 = require("n8n-workflow");
7
+ const utils_1 = require("../../GitHubCopilotChatAPI/utils");
8
+ function parseJsonMode(messagesJson) {
9
+ let parsed;
10
+ if (typeof messagesJson === 'object') {
11
+ parsed = messagesJson;
12
+ console.log('📥 Received messages as direct object/array (no parsing needed)');
13
+ }
14
+ else {
15
+ parsed = JSON.parse(messagesJson);
16
+ console.log('📥 Parsed messages from JSON string');
17
+ }
18
+ if (Array.isArray(parsed)) {
19
+ return { messages: parsed, requestBodyFromJson: undefined };
20
+ }
21
+ else if (parsed.messages && Array.isArray(parsed.messages)) {
22
+ console.log('📥 Full OpenAI request body received:', JSON.stringify(parsed, null, 2));
23
+ return { messages: parsed.messages, requestBodyFromJson: parsed };
24
+ }
25
+ return { messages: parsed, requestBodyFromJson: undefined };
26
+ }
27
+ async function processBinaryMessage(context, items, msg, message, itemIndex) {
28
+ var _a;
29
+ const keyToUse = msg.binaryPropertyName || 'data';
30
+ const binaryKeyData = items[itemIndex].binary;
31
+ if (!(binaryKeyData === null || binaryKeyData === void 0 ? void 0 : binaryKeyData[keyToUse])) {
32
+ const available = binaryKeyData ? Object.keys(binaryKeyData).join(', ') : 'none';
33
+ throw new n8n_workflow_1.NodeOperationError(context.getNode(), `Binary property '${keyToUse}' not found. Available binary properties: ${available}`, { itemIndex });
34
+ }
35
+ try {
36
+ const binaryData = binaryKeyData[keyToUse];
37
+ let mimeType = binaryData.mimeType || 'application/octet-stream';
38
+ const buffer = await context.helpers.getBinaryDataBuffer(itemIndex, keyToUse);
39
+ if (!mimeType.startsWith('image/')) {
40
+ const detected = (0, utils_1.getImageMimeType)(buffer);
41
+ if (detected !== 'application/octet-stream') {
42
+ mimeType = detected;
43
+ }
44
+ else {
45
+ console.warn(`⚠️ Could not detect image type for '${keyToUse}', using image/jpeg fallback`);
46
+ if (mimeType === 'application/octet-stream')
47
+ mimeType = 'image/jpeg';
48
+ }
49
+ }
50
+ if (!mimeType.startsWith('image/')) {
51
+ console.warn(`⚠️ Forcing '${mimeType}' to 'image/jpeg' for schema compliance`);
52
+ mimeType = 'image/jpeg';
53
+ }
54
+ const base64 = buffer.toString('base64');
55
+ const dataUrl = `data:${mimeType};base64,${base64}`;
56
+ const contentArray = [];
57
+ const caption = msg.caption;
58
+ if (caption === null || caption === void 0 ? void 0 : caption.trim()) {
59
+ contentArray.push({ type: 'text', text: caption });
60
+ }
61
+ else if (((_a = message.content) === null || _a === void 0 ? void 0 : _a.trim()) &&
62
+ message.content !== '[object Object]') {
63
+ contentArray.push({ type: 'text', text: message.content });
64
+ }
65
+ contentArray.push({ type: 'image_url', image_url: { url: dataUrl, detail: 'auto' } });
66
+ message.role = 'user';
67
+ message.content = contentArray;
68
+ delete message.type;
69
+ console.log(`📎 Attached binary file '${keyToUse}' (${mimeType}) as image_url`);
70
+ }
71
+ catch (err) {
72
+ if (err instanceof n8n_workflow_1.NodeOperationError)
73
+ throw err;
74
+ const errorMessage = err instanceof Error ? err.message : String(err);
75
+ throw new n8n_workflow_1.NodeOperationError(context.getNode(), `Failed to read binary file '${keyToUse}': ${errorMessage}`, { itemIndex });
76
+ }
77
+ }
78
+ async function parseManualMode(context, items, i) {
79
+ const messagesParam = context.getNodeParameter('messages', i, { message: [] });
80
+ console.log('📥 Manual mode - messagesParam:', JSON.stringify(messagesParam, null, 2));
81
+ const messages = [];
82
+ if (messagesParam.message && Array.isArray(messagesParam.message)) {
83
+ for (const msg of messagesParam.message) {
84
+ const message = {
85
+ role: msg.role,
86
+ content: msg.content,
87
+ };
88
+ const msgType = msg.type || 'text';
89
+ if (msgType === 'file_binary') {
90
+ await processBinaryMessage(context, items, msg, message, i);
91
+ }
92
+ else if (msgType === 'file') {
93
+ message.type = 'file';
94
+ }
95
+ messages.push(message);
96
+ }
97
+ }
98
+ console.log('📥 Manual mode - parsed messages:', JSON.stringify(messages, null, 2));
99
+ return messages;
100
+ }
101
+ function validateMessages(messages, context, itemIndex) {
102
+ for (const msg of messages) {
103
+ if (Array.isArray(msg.content)) {
104
+ for (const contentItem of msg.content) {
105
+ if (contentItem.type === 'file') {
106
+ throw new n8n_workflow_1.NodeOperationError(context.getNode(), `❌ GitHub Copilot API Error: File attachments cannot be used inside 'content' array.\n\n` +
107
+ `✅ CORRECT FORMAT (GitHub Copilot - message level):\n` +
108
+ `[{"role": "user", "content": "data:image/png;base64,...", "type": "file"}]`, { itemIndex });
109
+ }
110
+ }
111
+ }
112
+ }
113
+ }
114
+ function normalizeMessages(messages) {
115
+ for (let idx = 0; idx < messages.length; idx++) {
116
+ const msg = messages[idx];
117
+ if (msg.content !== null &&
118
+ msg.content !== undefined &&
119
+ typeof msg.content === 'object' &&
120
+ !Array.isArray(msg.content)) {
121
+ msg.content = JSON.stringify(msg.content, null, 2);
122
+ console.log(`🔄 Auto-converted message[${idx}].content from object to JSON string`);
123
+ }
124
+ }
125
+ }
126
+ async function parseMessages(context, items, i) {
127
+ const messagesInputMode = context.getNodeParameter('messagesInputMode', i, 'manual');
128
+ let result;
129
+ if (messagesInputMode === 'json') {
130
+ const messagesJson = context.getNodeParameter('messagesJson', i, '[]');
131
+ try {
132
+ result = parseJsonMode(messagesJson);
133
+ }
134
+ catch (error) {
135
+ throw new Error(`Failed to parse messages JSON: ${error instanceof Error ? error.message : 'Unknown error'}`);
136
+ }
137
+ }
138
+ else {
139
+ const messages = await parseManualMode(context, items, i);
140
+ result = { messages, requestBodyFromJson: undefined };
141
+ }
142
+ if (result.messages.length === 0) {
143
+ result.messages.push({ role: 'user', content: 'Hello! How can you help me?' });
144
+ }
145
+ console.log('📤 Final messages being sent to API:', JSON.stringify(result.messages, null, 2));
146
+ validateMessages(result.messages, context, i);
147
+ normalizeMessages(result.messages);
148
+ return result;
149
+ }