n8n-nodes-github-copilot 3.38.25 → 3.38.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/dist/credentials/GitHubCopilotApi.credentials.d.ts +1 -1
  2. package/dist/credentials/GitHubCopilotApi.credentials.js +25 -25
  3. package/dist/nodes/GitHubCopilot/GitHubCopilot.node.d.ts +1 -1
  4. package/dist/nodes/GitHubCopilot/GitHubCopilot.node.js +166 -166
  5. package/dist/nodes/GitHubCopilotAuthHelper/GitHubCopilotAuthHelper.node.d.ts +1 -1
  6. package/dist/nodes/GitHubCopilotAuthHelper/GitHubCopilotAuthHelper.node.js +539 -539
  7. package/dist/nodes/GitHubCopilotChatAPI/GitHubCopilotChatAPI.node.d.ts +1 -1
  8. package/dist/nodes/GitHubCopilotChatAPI/GitHubCopilotChatAPI.node.js +46 -44
  9. package/dist/nodes/GitHubCopilotChatAPI/nodeProperties.d.ts +1 -1
  10. package/dist/nodes/GitHubCopilotChatAPI/nodeProperties.js +82 -82
  11. package/dist/nodes/GitHubCopilotChatAPI/utils/helpers.d.ts +2 -2
  12. package/dist/nodes/GitHubCopilotChatAPI/utils/helpers.js +26 -26
  13. package/dist/nodes/GitHubCopilotChatAPI/utils/imageProcessor.d.ts +2 -2
  14. package/dist/nodes/GitHubCopilotChatAPI/utils/imageProcessor.js +12 -12
  15. package/dist/nodes/GitHubCopilotChatAPI/utils/index.d.ts +4 -4
  16. package/dist/nodes/GitHubCopilotChatAPI/utils/mediaDetection.d.ts +3 -3
  17. package/dist/nodes/GitHubCopilotChatAPI/utils/mediaDetection.js +19 -19
  18. package/dist/nodes/GitHubCopilotChatAPI/utils/modelCapabilities.d.ts +1 -1
  19. package/dist/nodes/GitHubCopilotChatAPI/utils/modelCapabilities.js +23 -23
  20. package/dist/nodes/GitHubCopilotChatAPI/utils/types.d.ts +5 -5
  21. package/dist/nodes/GitHubCopilotChatModel/GitHubCopilotChatModel.node.d.ts +1 -1
  22. package/dist/nodes/GitHubCopilotChatModel/GitHubCopilotChatModel.node.js +115 -106
  23. package/dist/nodes/GitHubCopilotEmbeddings/GitHubCopilotEmbeddings.node.d.ts +1 -1
  24. package/dist/nodes/GitHubCopilotEmbeddings/GitHubCopilotEmbeddings.node.js +114 -114
  25. package/dist/nodes/GitHubCopilotOpenAI/GitHubCopilotOpenAI.node.d.ts +1 -1
  26. package/dist/nodes/GitHubCopilotOpenAI/GitHubCopilotOpenAI.node.js +74 -69
  27. package/dist/nodes/GitHubCopilotOpenAI/nodeProperties.d.ts +1 -1
  28. package/dist/nodes/GitHubCopilotOpenAI/nodeProperties.js +181 -181
  29. package/dist/nodes/GitHubCopilotOpenAI/utils/index.d.ts +2 -2
  30. package/dist/nodes/GitHubCopilotOpenAI/utils/openaiCompat.d.ts +10 -10
  31. package/dist/nodes/GitHubCopilotOpenAI/utils/openaiCompat.js +53 -53
  32. package/dist/nodes/GitHubCopilotOpenAI/utils/types.d.ts +12 -12
  33. package/dist/nodes/GitHubCopilotTest/GitHubCopilotTest.node.d.ts +1 -1
  34. package/dist/nodes/GitHubCopilotTest/GitHubCopilotTest.node.js +120 -116
  35. package/dist/package.json +1 -1
  36. package/package.json +1 -1
@@ -7,39 +7,39 @@ exports.parseOpenAIRequest = parseOpenAIRequest;
7
7
  exports.debugLog = debugLog;
8
8
  function mapOpenAIModelToCopilot(openaiModel) {
9
9
  const modelMappings = {
10
- "gpt-4": "gpt-4o",
11
- "gpt-4o": "gpt-4o",
12
- "gpt-4o-mini": "gpt-4o-mini",
13
- "gpt-4-turbo": "gpt-4o",
14
- "gpt-3.5-turbo": "gpt-4o-mini",
15
- "claude-3-5-sonnet": "claude-3.5-sonnet",
16
- "claude-3-haiku": "claude-3-haiku",
17
- "claude-3-opus": "claude-3-opus",
18
- "gemini-1.5-pro": "gemini-1.5-pro",
19
- "gemini-1.5-flash": "gemini-1.5-flash",
20
- "o1-preview": "o1-preview",
21
- "o1-mini": "o1-mini",
10
+ 'gpt-4': 'gpt-4o',
11
+ 'gpt-4o': 'gpt-4o',
12
+ 'gpt-4o-mini': 'gpt-4o-mini',
13
+ 'gpt-4-turbo': 'gpt-4o',
14
+ 'gpt-3.5-turbo': 'gpt-4o-mini',
15
+ 'claude-3-5-sonnet': 'claude-3.5-sonnet',
16
+ 'claude-3-haiku': 'claude-3-haiku',
17
+ 'claude-3-opus': 'claude-3-opus',
18
+ 'gemini-1.5-pro': 'gemini-1.5-pro',
19
+ 'gemini-1.5-flash': 'gemini-1.5-flash',
20
+ 'o1-preview': 'o1-preview',
21
+ 'o1-mini': 'o1-mini',
22
22
  };
23
- return modelMappings[openaiModel] || "gpt-4o";
23
+ return modelMappings[openaiModel] || 'gpt-4o';
24
24
  }
25
25
  function convertOpenAIMessagesToCopilot(messages) {
26
- let systemMessage = "";
26
+ let systemMessage = '';
27
27
  const userMessages = [];
28
28
  const assistantMessages = [];
29
29
  for (const msg of messages) {
30
30
  switch (msg.role) {
31
- case "system":
32
- systemMessage += (systemMessage ? "\n\n" : "") + msg.content;
31
+ case 'system':
32
+ systemMessage += (systemMessage ? '\n\n' : '') + msg.content;
33
33
  break;
34
- case "user":
34
+ case 'user':
35
35
  userMessages.push(msg.content);
36
36
  break;
37
- case "assistant":
37
+ case 'assistant':
38
38
  assistantMessages.push(msg.content);
39
39
  break;
40
40
  }
41
41
  }
42
- let conversationContext = "";
42
+ let conversationContext = '';
43
43
  const maxLength = Math.max(userMessages.length, assistantMessages.length);
44
44
  for (let i = 0; i < maxLength - 1; i++) {
45
45
  if (i < userMessages.length - 1) {
@@ -49,7 +49,7 @@ function convertOpenAIMessagesToCopilot(messages) {
49
49
  conversationContext += `Assistant: ${assistantMessages[i]}\n`;
50
50
  }
51
51
  }
52
- const finalUserMessage = userMessages[userMessages.length - 1] || "";
52
+ const finalUserMessage = userMessages[userMessages.length - 1] || '';
53
53
  const message = conversationContext
54
54
  ? `${conversationContext}\nUser: ${finalUserMessage}`
55
55
  : finalUserMessage;
@@ -62,14 +62,14 @@ function convertCopilotResponseToOpenAI(copilotResponse, model) {
62
62
  const timestamp = Math.floor(Date.now() / 1000);
63
63
  return {
64
64
  id: `chatcmpl-${timestamp}-${Math.random().toString(36).substr(2, 9)}`,
65
- object: "chat.completion",
65
+ object: 'chat.completion',
66
66
  created: timestamp,
67
67
  model: model,
68
68
  choices: [
69
69
  {
70
70
  index: 0,
71
71
  message: {
72
- role: "assistant",
72
+ role: 'assistant',
73
73
  content: copilotResponse.message,
74
74
  tool_calls: copilotResponse.tool_calls,
75
75
  },
@@ -85,39 +85,39 @@ function convertCopilotResponseToOpenAI(copilotResponse, model) {
85
85
  }
86
86
  function mapFinishReason(copilotReason) {
87
87
  switch (copilotReason) {
88
- case "stop":
89
- case "end_turn":
90
- return "stop";
91
- case "max_tokens":
92
- case "length":
93
- return "length";
94
- case "tool_calls":
95
- case "function_call":
96
- return "tool_calls";
97
- case "content_filter":
98
- case "safety":
99
- return "content_filter";
88
+ case 'stop':
89
+ case 'end_turn':
90
+ return 'stop';
91
+ case 'max_tokens':
92
+ case 'length':
93
+ return 'length';
94
+ case 'tool_calls':
95
+ case 'function_call':
96
+ return 'tool_calls';
97
+ case 'content_filter':
98
+ case 'safety':
99
+ return 'content_filter';
100
100
  default:
101
- return "stop";
101
+ return 'stop';
102
102
  }
103
103
  }
104
104
  function parseOpenAIRequest(context, itemIndex) {
105
- const model = context.getNodeParameter("model", itemIndex, "gpt-4o");
106
- const messagesParam = context.getNodeParameter("messages", itemIndex, {
105
+ const model = context.getNodeParameter('model', itemIndex, 'gpt-4o');
106
+ const messagesParam = context.getNodeParameter('messages', itemIndex, {
107
107
  message: [],
108
108
  });
109
- const tools = context.getNodeParameter("tools", itemIndex, "");
110
- const toolChoice = context.getNodeParameter("tool_choice", itemIndex, "auto");
111
- const responseFormat = context.getNodeParameter("response_format", itemIndex, "text");
112
- const temperature = context.getNodeParameter("temperature", itemIndex, 1);
113
- const maxTokens = context.getNodeParameter("max_tokens", itemIndex, "");
114
- const topP = context.getNodeParameter("top_p", itemIndex, 1);
115
- const frequencyPenalty = context.getNodeParameter("frequency_penalty", itemIndex, 0);
116
- const presencePenalty = context.getNodeParameter("presence_penalty", itemIndex, 0);
117
- const stop = context.getNodeParameter("stop", itemIndex, "");
118
- const stream = context.getNodeParameter("stream", itemIndex, false);
119
- const seed = context.getNodeParameter("seed", itemIndex, "");
120
- const user = context.getNodeParameter("user", itemIndex, "");
109
+ const tools = context.getNodeParameter('tools', itemIndex, '');
110
+ const toolChoice = context.getNodeParameter('tool_choice', itemIndex, 'auto');
111
+ const responseFormat = context.getNodeParameter('response_format', itemIndex, 'text');
112
+ const temperature = context.getNodeParameter('temperature', itemIndex, 1);
113
+ const maxTokens = context.getNodeParameter('max_tokens', itemIndex, '');
114
+ const topP = context.getNodeParameter('top_p', itemIndex, 1);
115
+ const frequencyPenalty = context.getNodeParameter('frequency_penalty', itemIndex, 0);
116
+ const presencePenalty = context.getNodeParameter('presence_penalty', itemIndex, 0);
117
+ const stop = context.getNodeParameter('stop', itemIndex, '');
118
+ const stream = context.getNodeParameter('stream', itemIndex, false);
119
+ const seed = context.getNodeParameter('seed', itemIndex, '');
120
+ const user = context.getNodeParameter('user', itemIndex, '');
121
121
  const messages = [];
122
122
  if (messagesParam.message && Array.isArray(messagesParam.message)) {
123
123
  for (const msg of messagesParam.message) {
@@ -142,10 +142,10 @@ function parseOpenAIRequest(context, itemIndex) {
142
142
  request.tool_choice = toolChoice;
143
143
  }
144
144
  catch (error) {
145
- throw new Error(`Invalid tools JSON: ${error instanceof Error ? error.message : "Unknown error"}`);
145
+ throw new Error(`Invalid tools JSON: ${error instanceof Error ? error.message : 'Unknown error'}`);
146
146
  }
147
147
  }
148
- if (responseFormat !== "text") {
148
+ if (responseFormat !== 'text') {
149
149
  request.response_format = { type: responseFormat };
150
150
  }
151
151
  if (maxTokens) {
@@ -168,8 +168,8 @@ function parseOpenAIRequest(context, itemIndex) {
168
168
  return request;
169
169
  }
170
170
  function debugLog(context, itemIndex, message, data) {
171
- const advancedOptions = context.getNodeParameter("advancedOptions", itemIndex, {});
171
+ const advancedOptions = context.getNodeParameter('advancedOptions', itemIndex, {});
172
172
  if (advancedOptions.debugMode) {
173
- console.log(`[GitHub Copilot OpenAI Debug] ${message}`, data ? JSON.stringify(data, null, 2) : "");
173
+ console.log(`[GitHub Copilot OpenAI Debug] ${message}`, data ? JSON.stringify(data, null, 2) : '');
174
174
  }
175
175
  }
@@ -1,8 +1,8 @@
1
- import { IDataObject, IExecuteFunctions } from "n8n-workflow";
2
- import { CopilotResponse } from "../../../shared/utils/GitHubCopilotApiUtils";
1
+ import { IDataObject, IExecuteFunctions } from 'n8n-workflow';
2
+ import { CopilotResponse } from '../../../shared/utils/GitHubCopilotApiUtils';
3
3
  export { CopilotResponse };
4
4
  export interface OpenAIMessage {
5
- role: "system" | "user" | "assistant" | "tool";
5
+ role: 'system' | 'user' | 'assistant' | 'tool';
6
6
  content: string;
7
7
  name?: string;
8
8
  tool_calls?: ToolCall[];
@@ -10,14 +10,14 @@ export interface OpenAIMessage {
10
10
  }
11
11
  export interface ToolCall {
12
12
  id: string;
13
- type: "function";
13
+ type: 'function';
14
14
  function: {
15
15
  name: string;
16
16
  arguments: string;
17
17
  };
18
18
  }
19
19
  export interface OpenAITool {
20
- type: "function";
20
+ type: 'function';
21
21
  function: {
22
22
  name: string;
23
23
  description: string;
@@ -28,14 +28,14 @@ export interface OpenAIRequest {
28
28
  model: string;
29
29
  messages: OpenAIMessage[];
30
30
  tools?: OpenAITool[];
31
- tool_choice?: "auto" | "none" | "required" | {
32
- type: "function";
31
+ tool_choice?: 'auto' | 'none' | 'required' | {
32
+ type: 'function';
33
33
  function: {
34
34
  name: string;
35
35
  };
36
36
  };
37
37
  response_format?: {
38
- type: "text" | "json_object";
38
+ type: 'text' | 'json_object';
39
39
  };
40
40
  temperature?: number;
41
41
  max_tokens?: number;
@@ -49,17 +49,17 @@ export interface OpenAIRequest {
49
49
  }
50
50
  export interface OpenAIResponse {
51
51
  id: string;
52
- object: "chat.completion";
52
+ object: 'chat.completion';
53
53
  created: number;
54
54
  model: string;
55
55
  choices: Array<{
56
56
  index: number;
57
57
  message: {
58
- role: "assistant";
58
+ role: 'assistant';
59
59
  content: string | null;
60
60
  tool_calls?: ToolCall[];
61
61
  };
62
- finish_reason: "stop" | "length" | "tool_calls" | "content_filter";
62
+ finish_reason: 'stop' | 'length' | 'tool_calls' | 'content_filter';
63
63
  }>;
64
64
  usage: {
65
65
  prompt_tokens: number;
@@ -70,7 +70,7 @@ export interface OpenAIResponse {
70
70
  export interface FileProcessOptions {
71
71
  context: IExecuteFunctions;
72
72
  itemIndex: number;
73
- source: "manual" | "url" | "binary";
73
+ source: 'manual' | 'url' | 'binary';
74
74
  filePath?: string;
75
75
  url?: string;
76
76
  binaryProperty?: string;
@@ -1,4 +1,4 @@
1
- import { IExecuteFunctions, INodeExecutionData, INodeType, INodeTypeDescription } from "n8n-workflow";
1
+ import { IExecuteFunctions, INodeExecutionData, INodeType, INodeTypeDescription } from 'n8n-workflow';
2
2
  export declare class GitHubCopilotTest implements INodeType {
3
3
  description: INodeTypeDescription;
4
4
  execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]>;