n8n-nodes-github-copilot 3.38.25 → 3.38.35
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/credentials/GitHubCopilotApi.credentials.d.ts +1 -1
- package/dist/credentials/GitHubCopilotApi.credentials.js +25 -25
- package/dist/nodes/GitHubCopilot/GitHubCopilot.node.d.ts +1 -1
- package/dist/nodes/GitHubCopilot/GitHubCopilot.node.js +166 -166
- package/dist/nodes/GitHubCopilotAuthHelper/GitHubCopilotAuthHelper.node.d.ts +1 -1
- package/dist/nodes/GitHubCopilotAuthHelper/GitHubCopilotAuthHelper.node.js +539 -539
- package/dist/nodes/GitHubCopilotChatAPI/GitHubCopilotChatAPI.node.d.ts +1 -1
- package/dist/nodes/GitHubCopilotChatAPI/GitHubCopilotChatAPI.node.js +46 -44
- package/dist/nodes/GitHubCopilotChatAPI/nodeProperties.d.ts +1 -1
- package/dist/nodes/GitHubCopilotChatAPI/nodeProperties.js +82 -82
- package/dist/nodes/GitHubCopilotChatAPI/utils/helpers.d.ts +2 -2
- package/dist/nodes/GitHubCopilotChatAPI/utils/helpers.js +26 -26
- package/dist/nodes/GitHubCopilotChatAPI/utils/imageProcessor.d.ts +2 -2
- package/dist/nodes/GitHubCopilotChatAPI/utils/imageProcessor.js +12 -12
- package/dist/nodes/GitHubCopilotChatAPI/utils/index.d.ts +4 -4
- package/dist/nodes/GitHubCopilotChatAPI/utils/mediaDetection.d.ts +3 -3
- package/dist/nodes/GitHubCopilotChatAPI/utils/mediaDetection.js +19 -19
- package/dist/nodes/GitHubCopilotChatAPI/utils/modelCapabilities.d.ts +1 -1
- package/dist/nodes/GitHubCopilotChatAPI/utils/modelCapabilities.js +23 -23
- package/dist/nodes/GitHubCopilotChatAPI/utils/types.d.ts +5 -5
- package/dist/nodes/GitHubCopilotChatModel/GitHubCopilotChatModel.node.d.ts +2 -2
- package/dist/nodes/GitHubCopilotChatModel/GitHubCopilotChatModel.node.js +198 -125
- package/dist/nodes/GitHubCopilotEmbeddings/GitHubCopilotEmbeddings.node.d.ts +1 -1
- package/dist/nodes/GitHubCopilotEmbeddings/GitHubCopilotEmbeddings.node.js +114 -114
- package/dist/nodes/GitHubCopilotOpenAI/GitHubCopilotOpenAI.node.d.ts +1 -1
- package/dist/nodes/GitHubCopilotOpenAI/GitHubCopilotOpenAI.node.js +74 -69
- package/dist/nodes/GitHubCopilotOpenAI/nodeProperties.d.ts +1 -1
- package/dist/nodes/GitHubCopilotOpenAI/nodeProperties.js +181 -181
- package/dist/nodes/GitHubCopilotOpenAI/utils/index.d.ts +2 -2
- package/dist/nodes/GitHubCopilotOpenAI/utils/openaiCompat.d.ts +10 -10
- package/dist/nodes/GitHubCopilotOpenAI/utils/openaiCompat.js +53 -53
- package/dist/nodes/GitHubCopilotOpenAI/utils/types.d.ts +12 -12
- package/dist/nodes/GitHubCopilotSpeech/GitHubCopilotSpeech.node.d.ts +10 -4
- package/dist/nodes/GitHubCopilotSpeech/GitHubCopilotSpeech.node.js +8 -7
- package/dist/nodes/GitHubCopilotTest/GitHubCopilotTest.node.d.ts +6 -1
- package/dist/nodes/GitHubCopilotTest/GitHubCopilotTest.node.js +253 -116
- package/dist/package.json +1 -1
- package/dist/shared/models/GitHubCopilotModels.js +19 -2
- package/dist/shared/models/ModelVersionRequirements.js +5 -0
- package/dist/shared/utils/DynamicModelsManager.d.ts +9 -0
- package/dist/shared/utils/DynamicModelsManager.js +48 -6
- package/dist/shared/utils/FileChunkingApiUtils.d.ts +2 -2
- package/dist/shared/utils/FileChunkingApiUtils.js +15 -15
- package/dist/shared/utils/FileOptimizationUtils.d.ts +2 -2
- package/dist/shared/utils/FileOptimizationUtils.js +20 -20
- package/dist/shared/utils/GitHubCopilotApiUtils.js +6 -2
- package/dist/shared/utils/GitHubCopilotEndpoints.js +10 -1
- package/package.json +1 -1
|
@@ -7,39 +7,39 @@ exports.parseOpenAIRequest = parseOpenAIRequest;
|
|
|
7
7
|
exports.debugLog = debugLog;
|
|
8
8
|
function mapOpenAIModelToCopilot(openaiModel) {
|
|
9
9
|
const modelMappings = {
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
10
|
+
'gpt-4': 'gpt-4o',
|
|
11
|
+
'gpt-4o': 'gpt-4o',
|
|
12
|
+
'gpt-4o-mini': 'gpt-4o-mini',
|
|
13
|
+
'gpt-4-turbo': 'gpt-4o',
|
|
14
|
+
'gpt-3.5-turbo': 'gpt-4o-mini',
|
|
15
|
+
'claude-3-5-sonnet': 'claude-3.5-sonnet',
|
|
16
|
+
'claude-3-haiku': 'claude-3-haiku',
|
|
17
|
+
'claude-3-opus': 'claude-3-opus',
|
|
18
|
+
'gemini-1.5-pro': 'gemini-1.5-pro',
|
|
19
|
+
'gemini-1.5-flash': 'gemini-1.5-flash',
|
|
20
|
+
'o1-preview': 'o1-preview',
|
|
21
|
+
'o1-mini': 'o1-mini',
|
|
22
22
|
};
|
|
23
|
-
return modelMappings[openaiModel] ||
|
|
23
|
+
return modelMappings[openaiModel] || 'gpt-4o';
|
|
24
24
|
}
|
|
25
25
|
function convertOpenAIMessagesToCopilot(messages) {
|
|
26
|
-
let systemMessage =
|
|
26
|
+
let systemMessage = '';
|
|
27
27
|
const userMessages = [];
|
|
28
28
|
const assistantMessages = [];
|
|
29
29
|
for (const msg of messages) {
|
|
30
30
|
switch (msg.role) {
|
|
31
|
-
case
|
|
32
|
-
systemMessage += (systemMessage ?
|
|
31
|
+
case 'system':
|
|
32
|
+
systemMessage += (systemMessage ? '\n\n' : '') + msg.content;
|
|
33
33
|
break;
|
|
34
|
-
case
|
|
34
|
+
case 'user':
|
|
35
35
|
userMessages.push(msg.content);
|
|
36
36
|
break;
|
|
37
|
-
case
|
|
37
|
+
case 'assistant':
|
|
38
38
|
assistantMessages.push(msg.content);
|
|
39
39
|
break;
|
|
40
40
|
}
|
|
41
41
|
}
|
|
42
|
-
let conversationContext =
|
|
42
|
+
let conversationContext = '';
|
|
43
43
|
const maxLength = Math.max(userMessages.length, assistantMessages.length);
|
|
44
44
|
for (let i = 0; i < maxLength - 1; i++) {
|
|
45
45
|
if (i < userMessages.length - 1) {
|
|
@@ -49,7 +49,7 @@ function convertOpenAIMessagesToCopilot(messages) {
|
|
|
49
49
|
conversationContext += `Assistant: ${assistantMessages[i]}\n`;
|
|
50
50
|
}
|
|
51
51
|
}
|
|
52
|
-
const finalUserMessage = userMessages[userMessages.length - 1] ||
|
|
52
|
+
const finalUserMessage = userMessages[userMessages.length - 1] || '';
|
|
53
53
|
const message = conversationContext
|
|
54
54
|
? `${conversationContext}\nUser: ${finalUserMessage}`
|
|
55
55
|
: finalUserMessage;
|
|
@@ -62,14 +62,14 @@ function convertCopilotResponseToOpenAI(copilotResponse, model) {
|
|
|
62
62
|
const timestamp = Math.floor(Date.now() / 1000);
|
|
63
63
|
return {
|
|
64
64
|
id: `chatcmpl-${timestamp}-${Math.random().toString(36).substr(2, 9)}`,
|
|
65
|
-
object:
|
|
65
|
+
object: 'chat.completion',
|
|
66
66
|
created: timestamp,
|
|
67
67
|
model: model,
|
|
68
68
|
choices: [
|
|
69
69
|
{
|
|
70
70
|
index: 0,
|
|
71
71
|
message: {
|
|
72
|
-
role:
|
|
72
|
+
role: 'assistant',
|
|
73
73
|
content: copilotResponse.message,
|
|
74
74
|
tool_calls: copilotResponse.tool_calls,
|
|
75
75
|
},
|
|
@@ -85,39 +85,39 @@ function convertCopilotResponseToOpenAI(copilotResponse, model) {
|
|
|
85
85
|
}
|
|
86
86
|
function mapFinishReason(copilotReason) {
|
|
87
87
|
switch (copilotReason) {
|
|
88
|
-
case
|
|
89
|
-
case
|
|
90
|
-
return
|
|
91
|
-
case
|
|
92
|
-
case
|
|
93
|
-
return
|
|
94
|
-
case
|
|
95
|
-
case
|
|
96
|
-
return
|
|
97
|
-
case
|
|
98
|
-
case
|
|
99
|
-
return
|
|
88
|
+
case 'stop':
|
|
89
|
+
case 'end_turn':
|
|
90
|
+
return 'stop';
|
|
91
|
+
case 'max_tokens':
|
|
92
|
+
case 'length':
|
|
93
|
+
return 'length';
|
|
94
|
+
case 'tool_calls':
|
|
95
|
+
case 'function_call':
|
|
96
|
+
return 'tool_calls';
|
|
97
|
+
case 'content_filter':
|
|
98
|
+
case 'safety':
|
|
99
|
+
return 'content_filter';
|
|
100
100
|
default:
|
|
101
|
-
return
|
|
101
|
+
return 'stop';
|
|
102
102
|
}
|
|
103
103
|
}
|
|
104
104
|
function parseOpenAIRequest(context, itemIndex) {
|
|
105
|
-
const model = context.getNodeParameter(
|
|
106
|
-
const messagesParam = context.getNodeParameter(
|
|
105
|
+
const model = context.getNodeParameter('model', itemIndex, 'gpt-4o');
|
|
106
|
+
const messagesParam = context.getNodeParameter('messages', itemIndex, {
|
|
107
107
|
message: [],
|
|
108
108
|
});
|
|
109
|
-
const tools = context.getNodeParameter(
|
|
110
|
-
const toolChoice = context.getNodeParameter(
|
|
111
|
-
const responseFormat = context.getNodeParameter(
|
|
112
|
-
const temperature = context.getNodeParameter(
|
|
113
|
-
const maxTokens = context.getNodeParameter(
|
|
114
|
-
const topP = context.getNodeParameter(
|
|
115
|
-
const frequencyPenalty = context.getNodeParameter(
|
|
116
|
-
const presencePenalty = context.getNodeParameter(
|
|
117
|
-
const stop = context.getNodeParameter(
|
|
118
|
-
const stream = context.getNodeParameter(
|
|
119
|
-
const seed = context.getNodeParameter(
|
|
120
|
-
const user = context.getNodeParameter(
|
|
109
|
+
const tools = context.getNodeParameter('tools', itemIndex, '');
|
|
110
|
+
const toolChoice = context.getNodeParameter('tool_choice', itemIndex, 'auto');
|
|
111
|
+
const responseFormat = context.getNodeParameter('response_format', itemIndex, 'text');
|
|
112
|
+
const temperature = context.getNodeParameter('temperature', itemIndex, 1);
|
|
113
|
+
const maxTokens = context.getNodeParameter('max_tokens', itemIndex, '');
|
|
114
|
+
const topP = context.getNodeParameter('top_p', itemIndex, 1);
|
|
115
|
+
const frequencyPenalty = context.getNodeParameter('frequency_penalty', itemIndex, 0);
|
|
116
|
+
const presencePenalty = context.getNodeParameter('presence_penalty', itemIndex, 0);
|
|
117
|
+
const stop = context.getNodeParameter('stop', itemIndex, '');
|
|
118
|
+
const stream = context.getNodeParameter('stream', itemIndex, false);
|
|
119
|
+
const seed = context.getNodeParameter('seed', itemIndex, '');
|
|
120
|
+
const user = context.getNodeParameter('user', itemIndex, '');
|
|
121
121
|
const messages = [];
|
|
122
122
|
if (messagesParam.message && Array.isArray(messagesParam.message)) {
|
|
123
123
|
for (const msg of messagesParam.message) {
|
|
@@ -142,10 +142,10 @@ function parseOpenAIRequest(context, itemIndex) {
|
|
|
142
142
|
request.tool_choice = toolChoice;
|
|
143
143
|
}
|
|
144
144
|
catch (error) {
|
|
145
|
-
throw new Error(`Invalid tools JSON: ${error instanceof Error ? error.message :
|
|
145
|
+
throw new Error(`Invalid tools JSON: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
|
146
146
|
}
|
|
147
147
|
}
|
|
148
|
-
if (responseFormat !==
|
|
148
|
+
if (responseFormat !== 'text') {
|
|
149
149
|
request.response_format = { type: responseFormat };
|
|
150
150
|
}
|
|
151
151
|
if (maxTokens) {
|
|
@@ -168,8 +168,8 @@ function parseOpenAIRequest(context, itemIndex) {
|
|
|
168
168
|
return request;
|
|
169
169
|
}
|
|
170
170
|
function debugLog(context, itemIndex, message, data) {
|
|
171
|
-
const advancedOptions = context.getNodeParameter(
|
|
171
|
+
const advancedOptions = context.getNodeParameter('advancedOptions', itemIndex, {});
|
|
172
172
|
if (advancedOptions.debugMode) {
|
|
173
|
-
console.log(`[GitHub Copilot OpenAI Debug] ${message}`, data ? JSON.stringify(data, null, 2) :
|
|
173
|
+
console.log(`[GitHub Copilot OpenAI Debug] ${message}`, data ? JSON.stringify(data, null, 2) : '');
|
|
174
174
|
}
|
|
175
175
|
}
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { IDataObject, IExecuteFunctions } from
|
|
2
|
-
import { CopilotResponse } from
|
|
1
|
+
import { IDataObject, IExecuteFunctions } from 'n8n-workflow';
|
|
2
|
+
import { CopilotResponse } from '../../../shared/utils/GitHubCopilotApiUtils';
|
|
3
3
|
export { CopilotResponse };
|
|
4
4
|
export interface OpenAIMessage {
|
|
5
|
-
role:
|
|
5
|
+
role: 'system' | 'user' | 'assistant' | 'tool';
|
|
6
6
|
content: string;
|
|
7
7
|
name?: string;
|
|
8
8
|
tool_calls?: ToolCall[];
|
|
@@ -10,14 +10,14 @@ export interface OpenAIMessage {
|
|
|
10
10
|
}
|
|
11
11
|
export interface ToolCall {
|
|
12
12
|
id: string;
|
|
13
|
-
type:
|
|
13
|
+
type: 'function';
|
|
14
14
|
function: {
|
|
15
15
|
name: string;
|
|
16
16
|
arguments: string;
|
|
17
17
|
};
|
|
18
18
|
}
|
|
19
19
|
export interface OpenAITool {
|
|
20
|
-
type:
|
|
20
|
+
type: 'function';
|
|
21
21
|
function: {
|
|
22
22
|
name: string;
|
|
23
23
|
description: string;
|
|
@@ -28,14 +28,14 @@ export interface OpenAIRequest {
|
|
|
28
28
|
model: string;
|
|
29
29
|
messages: OpenAIMessage[];
|
|
30
30
|
tools?: OpenAITool[];
|
|
31
|
-
tool_choice?:
|
|
32
|
-
type:
|
|
31
|
+
tool_choice?: 'auto' | 'none' | 'required' | {
|
|
32
|
+
type: 'function';
|
|
33
33
|
function: {
|
|
34
34
|
name: string;
|
|
35
35
|
};
|
|
36
36
|
};
|
|
37
37
|
response_format?: {
|
|
38
|
-
type:
|
|
38
|
+
type: 'text' | 'json_object';
|
|
39
39
|
};
|
|
40
40
|
temperature?: number;
|
|
41
41
|
max_tokens?: number;
|
|
@@ -49,17 +49,17 @@ export interface OpenAIRequest {
|
|
|
49
49
|
}
|
|
50
50
|
export interface OpenAIResponse {
|
|
51
51
|
id: string;
|
|
52
|
-
object:
|
|
52
|
+
object: 'chat.completion';
|
|
53
53
|
created: number;
|
|
54
54
|
model: string;
|
|
55
55
|
choices: Array<{
|
|
56
56
|
index: number;
|
|
57
57
|
message: {
|
|
58
|
-
role:
|
|
58
|
+
role: 'assistant';
|
|
59
59
|
content: string | null;
|
|
60
60
|
tool_calls?: ToolCall[];
|
|
61
61
|
};
|
|
62
|
-
finish_reason:
|
|
62
|
+
finish_reason: 'stop' | 'length' | 'tool_calls' | 'content_filter';
|
|
63
63
|
}>;
|
|
64
64
|
usage: {
|
|
65
65
|
prompt_tokens: number;
|
|
@@ -70,7 +70,7 @@ export interface OpenAIResponse {
|
|
|
70
70
|
export interface FileProcessOptions {
|
|
71
71
|
context: IExecuteFunctions;
|
|
72
72
|
itemIndex: number;
|
|
73
|
-
source:
|
|
73
|
+
source: 'manual' | 'url' | 'binary';
|
|
74
74
|
filePath?: string;
|
|
75
75
|
url?: string;
|
|
76
76
|
binaryProperty?: string;
|
|
@@ -1,8 +1,14 @@
|
|
|
1
|
-
import { IExecuteFunctions,
|
|
1
|
+
import { IExecuteFunctions, INodeType, INodeTypeDescription, INodeExecutionData } from 'n8n-workflow';
|
|
2
|
+
interface ISpeechOptions {
|
|
3
|
+
temperature?: number;
|
|
4
|
+
maxTokens?: number;
|
|
5
|
+
timeout?: number;
|
|
6
|
+
}
|
|
2
7
|
export declare class GitHubCopilotSpeech implements INodeType {
|
|
3
8
|
description: INodeTypeDescription;
|
|
4
9
|
execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]>;
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
10
|
+
static transcribeWithMicrosoftSpeech(audioBuffer: Buffer, format: string, language: string, oauthToken: string, options: ISpeechOptions, context: IExecuteFunctions): Promise<string>;
|
|
11
|
+
static detectAudioFormat(buffer: Buffer): string;
|
|
12
|
+
static isSupportedFormat(format: string): boolean;
|
|
8
13
|
}
|
|
14
|
+
export {};
|
|
@@ -212,8 +212,7 @@ class GitHubCopilotSpeech {
|
|
|
212
212
|
const returnData = [];
|
|
213
213
|
const operation = this.getNodeParameter('operation', 0);
|
|
214
214
|
const audioSource = this.getNodeParameter('audioSource', 0);
|
|
215
|
-
const credentials = await this.getCredentials('gitHubCopilotApi');
|
|
216
|
-
const tokenManager = new OAuthTokenManager_1.OAuthTokenManager();
|
|
215
|
+
const credentials = (await this.getCredentials('gitHubCopilotApi'));
|
|
217
216
|
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
|
218
217
|
try {
|
|
219
218
|
let audioBuffer;
|
|
@@ -245,7 +244,9 @@ class GitHubCopilotSpeech {
|
|
|
245
244
|
const language = this.getNodeParameter('language', itemIndex);
|
|
246
245
|
const formatParam = this.getNodeParameter('audioFormat', itemIndex);
|
|
247
246
|
const options = this.getNodeParameter('options', itemIndex);
|
|
248
|
-
const actualFormat = formatParam === 'auto'
|
|
247
|
+
const actualFormat = formatParam === 'auto'
|
|
248
|
+
? GitHubCopilotSpeech.detectAudioFormat(audioBuffer)
|
|
249
|
+
: formatParam;
|
|
249
250
|
if (!GitHubCopilotSpeech.isSupportedFormat(actualFormat)) {
|
|
250
251
|
throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Unsupported audio format: ${actualFormat}. Supported: wav, mp3, m4a, flac, ogg`);
|
|
251
252
|
}
|
|
@@ -297,19 +298,19 @@ class GitHubCopilotSpeech {
|
|
|
297
298
|
static async transcribeWithMicrosoftSpeech(audioBuffer, format, language, oauthToken, options, context) {
|
|
298
299
|
const endpoint = 'https://speech.microsoft.com/speech/recognition/conversation/cognitiveservices/v1';
|
|
299
300
|
const headers = {
|
|
300
|
-
|
|
301
|
+
Authorization: `Bearer ${oauthToken}`,
|
|
301
302
|
'User-Agent': 'GitHub-Copilot/1.0 (n8n-node)',
|
|
302
303
|
'Editor-Version': 'vscode/1.95.0',
|
|
303
304
|
'Editor-Plugin-Version': 'copilot/1.0.0',
|
|
304
305
|
'Content-Type': `audio/${format}; codecs=audio/pcm; samplerate=16000`,
|
|
305
|
-
|
|
306
|
+
Accept: 'application/json',
|
|
306
307
|
};
|
|
307
308
|
if (language !== 'auto') {
|
|
308
309
|
headers['Accept-Language'] = language;
|
|
309
310
|
}
|
|
310
311
|
const timeout = (options === null || options === void 0 ? void 0 : options.timeout) || 30;
|
|
311
312
|
try {
|
|
312
|
-
|
|
313
|
+
await context.helpers.httpRequest({
|
|
313
314
|
method: 'POST',
|
|
314
315
|
url: endpoint,
|
|
315
316
|
headers,
|
|
@@ -327,7 +328,7 @@ class GitHubCopilotSpeech {
|
|
|
327
328
|
if (buffer.length >= 12 && buffer.toString('ascii', 0, 4) === 'RIFF') {
|
|
328
329
|
return 'wav';
|
|
329
330
|
}
|
|
330
|
-
if (buffer.length >= 3 && buffer[0] ===
|
|
331
|
+
if (buffer.length >= 3 && buffer[0] === 0xff && (buffer[1] & 0xe0) === 0xe0) {
|
|
331
332
|
return 'mp3';
|
|
332
333
|
}
|
|
333
334
|
if (buffer.length >= 12 && buffer.toString('ascii', 4, 8) === 'ftyp') {
|
|
@@ -1,5 +1,10 @@
|
|
|
1
|
-
import { IExecuteFunctions, INodeExecutionData, INodeType, INodeTypeDescription } from
|
|
1
|
+
import { IExecuteFunctions, INodeExecutionData, INodeType, INodeTypeDescription, ILoadOptionsFunctions, INodePropertyOptions } from 'n8n-workflow';
|
|
2
2
|
export declare class GitHubCopilotTest implements INodeType {
|
|
3
3
|
description: INodeTypeDescription;
|
|
4
|
+
methods: {
|
|
5
|
+
loadOptions: {
|
|
6
|
+
getModels(this: ILoadOptionsFunctions): Promise<INodePropertyOptions[]>;
|
|
7
|
+
};
|
|
8
|
+
};
|
|
4
9
|
execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]>;
|
|
5
10
|
}
|