n8n-nodes-github-copilot 3.3.0 → 3.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/dist/credentials/GitHubApi.credentials.d.ts +8 -8
  2. package/dist/credentials/GitHubApi.credentials.js +50 -50
  3. package/dist/credentials/GitHubApiManual.credentials.d.ts +7 -7
  4. package/dist/credentials/GitHubApiManual.credentials.js +33 -33
  5. package/dist/nodes/GitHubCopilot/GitHubCopilot.node.d.ts +5 -5
  6. package/dist/nodes/GitHubCopilot/GitHubCopilot.node.js +324 -324
  7. package/dist/nodes/GitHubCopilotChatAPI/GitHubCopilotChatAPI.node.d.ts +5 -5
  8. package/dist/nodes/GitHubCopilotChatAPI/GitHubCopilotChatAPI.node.js +141 -146
  9. package/dist/nodes/GitHubCopilotChatAPI/nodeProperties.d.ts +2 -2
  10. package/dist/nodes/GitHubCopilotChatAPI/nodeProperties.js +172 -202
  11. package/dist/nodes/GitHubCopilotChatAPI/utils/helpers.d.ts +19 -21
  12. package/dist/nodes/GitHubCopilotChatAPI/utils/helpers.js +130 -131
  13. package/dist/nodes/GitHubCopilotChatAPI/utils/imageProcessor.d.ts +8 -8
  14. package/dist/nodes/GitHubCopilotChatAPI/utils/imageProcessor.js +100 -101
  15. package/dist/nodes/GitHubCopilotChatAPI/utils/index.d.ts +3 -3
  16. package/dist/nodes/GitHubCopilotChatAPI/utils/index.js +19 -19
  17. package/dist/nodes/GitHubCopilotChatAPI/utils/mediaDetection.d.ts +14 -14
  18. package/dist/nodes/GitHubCopilotChatAPI/utils/mediaDetection.js +70 -71
  19. package/dist/nodes/GitHubCopilotChatAPI/utils/modelCapabilities.d.ts +5 -5
  20. package/dist/nodes/GitHubCopilotChatAPI/utils/modelCapabilities.js +113 -113
  21. package/dist/nodes/GitHubCopilotChatAPI/utils/types.d.ts +57 -57
  22. package/dist/nodes/GitHubCopilotChatAPI/utils/types.js +2 -2
  23. package/dist/nodes/GitHubCopilotChatModel/GitHubCopilotChatModel.node.d.ts +5 -0
  24. package/dist/nodes/GitHubCopilotChatModel/GitHubCopilotChatModel.node.js +140 -0
  25. package/dist/nodes/GitHubCopilotChatModel/copilot.svg +34 -0
  26. package/dist/shared/models/GitHubCopilotModels.d.ts +43 -0
  27. package/dist/shared/models/GitHubCopilotModels.js +218 -0
  28. package/package.json +7 -6
  29. package/dist/nodes/GitHubCopilotChatAPI/GitHubCopilotChatAPI.node.backup.d.ts +0 -5
  30. package/dist/nodes/GitHubCopilotChatAPI/GitHubCopilotChatAPI.node.backup.js +0 -651
  31. package/dist/nodes/GitHubCopilotChatAPI/utils/audioProcessor.d.ts +0 -11
  32. package/dist/nodes/GitHubCopilotChatAPI/utils/audioProcessor.js +0 -86
  33. package/dist/nodes/N8nAiAgent/N8nAiAgent.node.d.ts +0 -5
  34. package/dist/nodes/N8nAiAgent/N8nAiAgent.node.js +0 -214
  35. package/dist/nodes/N8nAiAgent/n8n-ai.svg +0 -35
  36. package/dist/nodes/N8nAiAgent/nodeProperties.d.ts +0 -2
  37. package/dist/nodes/N8nAiAgent/nodeProperties.js +0 -432
@@ -1,86 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.createAudioSummary = exports.optimizeAudioForTokens = exports.chunkAudioData = exports.processAudioFile = void 0;
4
- const helpers_1 = require("./helpers");
5
- async function processAudioFile(context, itemIndex, audioSource, audioFile, audioUrl, audioProperty) {
6
- var _a, _b;
7
- let audioBuffer;
8
- let filename;
9
- switch (audioSource) {
10
- case 'file':
11
- if (!audioFile) {
12
- throw new Error('Audio file content is required when source is "file"');
13
- }
14
- audioBuffer = Buffer.from(audioFile, 'base64');
15
- filename = 'uploaded_audio.mp3';
16
- break;
17
- case 'url':
18
- if (!audioUrl) {
19
- throw new Error('Audio URL is required when source is "url"');
20
- }
21
- audioBuffer = await (0, helpers_1.downloadFileFromUrl)(audioUrl);
22
- filename = audioUrl.split('/').pop() || 'downloaded_audio.mp3';
23
- break;
24
- case 'binary':
25
- if (!audioProperty) {
26
- throw new Error('Audio property name is required when source is "binary"');
27
- }
28
- audioBuffer = await (0, helpers_1.getFileFromBinary)(context, itemIndex, audioProperty);
29
- const items = context.getInputData();
30
- const item = items[itemIndex];
31
- filename = ((_b = (_a = item.binary) === null || _a === void 0 ? void 0 : _a[audioProperty]) === null || _b === void 0 ? void 0 : _b.fileName) || 'binary_audio.mp3';
32
- break;
33
- default:
34
- throw new Error(`Invalid audio source: ${audioSource}`);
35
- }
36
- (0, helpers_1.validateFileSize)(audioBuffer, 25600);
37
- const base64Audio = audioBuffer.toString('base64');
38
- const estimatedTokens = (0, helpers_1.estimateTokens)(base64Audio);
39
- if (estimatedTokens > 100000) {
40
- throw new Error(`Audio file too large (${estimatedTokens} estimated tokens). Consider using a smaller file.`);
41
- }
42
- const mimeType = (0, helpers_1.getAudioMimeType)(filename);
43
- return {
44
- data: base64Audio,
45
- mimeType,
46
- filename,
47
- size: audioBuffer.length,
48
- estimatedTokens
49
- };
50
- }
51
- exports.processAudioFile = processAudioFile;
52
- function chunkAudioData(base64Data, maxChunkSize = 50000) {
53
- const chunks = [];
54
- for (let i = 0; i < base64Data.length; i += maxChunkSize) {
55
- chunks.push(base64Data.slice(i, i + maxChunkSize));
56
- }
57
- return chunks;
58
- }
59
- exports.chunkAudioData = chunkAudioData;
60
- function optimizeAudioForTokens(base64Data, maxTokens = 100000) {
61
- const originalTokens = (0, helpers_1.estimateTokens)(base64Data);
62
- if (originalTokens <= maxTokens) {
63
- return {
64
- data: base64Data,
65
- truncated: false,
66
- originalTokens,
67
- finalTokens: originalTokens
68
- };
69
- }
70
- const compressionRatio = maxTokens / originalTokens;
71
- const targetLength = Math.floor(base64Data.length * compressionRatio);
72
- const compressedData = base64Data.slice(0, Math.max(targetLength, 1000));
73
- return {
74
- data: compressedData,
75
- truncated: true,
76
- originalTokens,
77
- finalTokens: (0, helpers_1.estimateTokens)(compressedData)
78
- };
79
- }
80
- exports.optimizeAudioForTokens = optimizeAudioForTokens;
81
- function createAudioSummary(filename, size, duration) {
82
- const sizeKB = Math.round(size / 1024);
83
- const durationText = duration ? ` (${Math.round(duration)}s)` : '';
84
- return `Audio file: ${filename} - ${sizeKB}KB${durationText}. File too large for direct processing, providing description instead.`;
85
- }
86
- exports.createAudioSummary = createAudioSummary;
@@ -1,5 +0,0 @@
1
- import { IExecuteFunctions, INodeExecutionData, INodeType, INodeTypeDescription } from 'n8n-workflow';
2
- export declare class N8nAiAgent implements INodeType {
3
- description: INodeTypeDescription;
4
- execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]>;
5
- }
@@ -1,214 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.N8nAiAgent = void 0;
4
- const n8n_workflow_1 = require("n8n-workflow");
5
- const nodeProperties_1 = require("./nodeProperties");
6
- const mediaDetection_1 = require("../GitHubCopilotChatAPI/utils/mediaDetection");
7
- class N8nAiAgent {
8
- constructor() {
9
- this.description = {
10
- displayName: 'N8N AI Agent',
11
- name: 'n8nAiAgent',
12
- icon: 'file:n8n-ai.svg',
13
- group: ['AI'],
14
- version: 1,
15
- subtitle: '={{$parameter["operation"] + ": " + $parameter["model"]}}',
16
- description: 'Connect to N8N AI Agent service for advanced AI capabilities with tool calling and memory',
17
- defaults: {
18
- name: 'N8N AI Agent',
19
- },
20
- inputs: ["main"],
21
- outputs: ["main"],
22
- credentials: [
23
- {
24
- name: 'n8nApi',
25
- required: true,
26
- },
27
- ],
28
- properties: nodeProperties_1.nodeProperties,
29
- };
30
- }
31
- async execute() {
32
- const items = this.getInputData();
33
- const returnData = [];
34
- for (let i = 0; i < items.length; i++) {
35
- try {
36
- const operation = this.getNodeParameter('operation', i);
37
- const model = this.getNodeParameter('model', i);
38
- if (operation === 'chat') {
39
- const message = this.getNodeParameter('message', i);
40
- const includeMedia = this.getNodeParameter('includeMedia', i, false);
41
- const messages = [];
42
- const systemMessage = this.getNodeParameter('systemMessage', i, '');
43
- if (systemMessage) {
44
- messages.push({
45
- role: 'system',
46
- content: systemMessage,
47
- });
48
- }
49
- const messageContent = [
50
- {
51
- type: 'text',
52
- text: message,
53
- },
54
- ];
55
- if (includeMedia) {
56
- const mediaSource = this.getNodeParameter('mediaSource', i);
57
- const processedMedia = await (0, mediaDetection_1.processMediaFile)(this, i, mediaSource, this.getNodeParameter('mediaFile', i, ''), this.getNodeParameter('mediaUrl', i, ''), this.getNodeParameter('mediaProperty', i, ''));
58
- if (processedMedia && processedMedia.dataUrl) {
59
- messageContent.push({
60
- type: 'image_url',
61
- image_url: {
62
- url: processedMedia.dataUrl,
63
- },
64
- });
65
- }
66
- }
67
- messages.push({
68
- role: 'user',
69
- content: messageContent,
70
- });
71
- const includeHistory = this.getNodeParameter('includeHistory', i, false);
72
- if (includeHistory) {
73
- const historyMessages = this.getNodeParameter('conversationHistory', i, []);
74
- messages.splice(-1, 0, ...historyMessages);
75
- }
76
- const advancedOptions = {};
77
- const maxTokens = this.getNodeParameter('maxTokens', i, 1000);
78
- const temperature = this.getNodeParameter('temperature', i, 0.7);
79
- const enableTools = this.getNodeParameter('enableTools', i, false);
80
- if (maxTokens > 0) {
81
- advancedOptions.max_tokens = maxTokens;
82
- }
83
- advancedOptions.temperature = temperature;
84
- if (enableTools) {
85
- const toolsConfig = this.getNodeParameter('toolsConfig', i, {});
86
- if (toolsConfig.tools && Array.isArray(toolsConfig.tools)) {
87
- advancedOptions.tools = toolsConfig.tools;
88
- }
89
- }
90
- const requestBody = {
91
- model,
92
- messages,
93
- stream: false,
94
- ...advancedOptions,
95
- };
96
- const response = await makeN8nAiAgentRequest(this, '/chat', requestBody, includeMedia);
97
- const result = {
98
- message: response.response || response.message || '',
99
- model,
100
- operation,
101
- usage: response.usage || null,
102
- tool_calls: response.tool_calls || null,
103
- memory: response.memory || null,
104
- finish_reason: response.finish_reason || 'completed',
105
- };
106
- returnData.push({
107
- json: result,
108
- pairedItem: { item: i },
109
- });
110
- }
111
- else if (operation === 'tools') {
112
- const toolName = this.getNodeParameter('toolName', i);
113
- const toolArguments = this.getNodeParameter('toolArguments', i, {});
114
- const context = this.getNodeParameter('context', i, '');
115
- const requestBody = {
116
- tool: toolName,
117
- arguments: toolArguments,
118
- context,
119
- model,
120
- };
121
- const response = await makeN8nAiAgentRequest(this, '/tools', requestBody, false);
122
- const result = {
123
- tool_name: toolName,
124
- result: response.result || response.response,
125
- execution_time: response.execution_time || null,
126
- operation,
127
- model,
128
- };
129
- returnData.push({
130
- json: result,
131
- pairedItem: { item: i },
132
- });
133
- }
134
- else if (operation === 'memory') {
135
- const memoryAction = this.getNodeParameter('memoryAction', i);
136
- const sessionId = this.getNodeParameter('sessionId', i, '');
137
- const requestBody = {
138
- action: memoryAction,
139
- session_id: sessionId,
140
- };
141
- if (memoryAction === 'store') {
142
- const memoryData = this.getNodeParameter('memoryData', i);
143
- requestBody.data = memoryData;
144
- }
145
- const response = await makeN8nAiAgentRequest(this, '/memory', requestBody, false);
146
- const result = {
147
- action: memoryAction,
148
- session_id: sessionId,
149
- data: response.data || response.memory,
150
- operation,
151
- };
152
- returnData.push({
153
- json: result,
154
- pairedItem: { item: i },
155
- });
156
- }
157
- }
158
- catch (error) {
159
- if (this.continueOnFail()) {
160
- const errorMessage = error instanceof Error ? error.message : 'Unknown error';
161
- returnData.push({
162
- json: {
163
- error: errorMessage,
164
- operation: this.getNodeParameter('operation', i),
165
- model: this.getNodeParameter('model', i),
166
- },
167
- pairedItem: { item: i },
168
- });
169
- }
170
- else {
171
- throw error;
172
- }
173
- }
174
- }
175
- return [returnData];
176
- }
177
- }
178
- exports.N8nAiAgent = N8nAiAgent;
179
- async function makeN8nAiAgentRequest(context, endpoint, requestBody, hasMedia) {
180
- const credentials = await context.getCredentials('n8nApi');
181
- const baseUrl = credentials.baseUrl || 'http://localhost:5678';
182
- const apiKey = credentials.apiKey;
183
- if (!apiKey) {
184
- throw new n8n_workflow_1.NodeOperationError(context.getNode(), 'N8N API key is required');
185
- }
186
- const options = {
187
- method: 'POST',
188
- headers: {
189
- 'Content-Type': 'application/json',
190
- 'Authorization': `Bearer ${apiKey}`,
191
- 'User-Agent': 'N8nAiAgentNode/1.0',
192
- ...(hasMedia && { 'AI-Vision-Request': 'true' }),
193
- },
194
- body: JSON.stringify(requestBody),
195
- uri: `${baseUrl}/api/v1/ai-agent${endpoint}`,
196
- json: true,
197
- };
198
- try {
199
- const response = await context.helpers.request(options);
200
- return response;
201
- }
202
- catch (error) {
203
- const apiError = error;
204
- if (apiError.statusCode === 401) {
205
- throw new n8n_workflow_1.NodeOperationError(context.getNode(), 'Invalid N8N API key');
206
- }
207
- else if (apiError.statusCode === 404) {
208
- throw new n8n_workflow_1.NodeOperationError(context.getNode(), 'N8N AI Agent endpoint not found. Make sure AI Agent is enabled in your N8N instance.');
209
- }
210
- else {
211
- throw new n8n_workflow_1.NodeOperationError(context.getNode(), `N8N AI Agent API error: ${apiError.message || String(error)}`);
212
- }
213
- }
214
- }
@@ -1,35 +0,0 @@
1
- <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 100 100" width="100" height="100">
2
- <!-- Background circle -->
3
- <circle cx="50" cy="50" r="45" fill="#ff6d5a" stroke="#e55a4a" stroke-width="2"/>
4
-
5
- <!-- N8N Logo style -->
6
- <g transform="translate(50,50)" fill="white">
7
- <!-- Brain/Neural network pattern -->
8
- <circle cx="-15" cy="-15" r="3" opacity="0.9"/>
9
- <circle cx="0" cy="-20" r="3" opacity="0.9"/>
10
- <circle cx="15" cy="-15" r="3" opacity="0.9"/>
11
- <circle cx="-20" cy="0" r="3" opacity="0.9"/>
12
- <circle cx="0" cy="0" r="4" opacity="1"/>
13
- <circle cx="20" cy="0" r="3" opacity="0.9"/>
14
- <circle cx="-15" cy="15" r="3" opacity="0.9"/>
15
- <circle cx="0" cy="20" r="3" opacity="0.9"/>
16
- <circle cx="15" cy="15" r="3" opacity="0.9"/>
17
-
18
- <!-- Connections -->
19
- <line x1="-15" y1="-15" x2="0" y2="-20" stroke="white" stroke-width="1" opacity="0.6"/>
20
- <line x1="0" y1="-20" x2="15" y2="-15" stroke="white" stroke-width="1" opacity="0.6"/>
21
- <line x1="-20" y1="0" x2="-15" y2="-15" stroke="white" stroke-width="1" opacity="0.6"/>
22
- <line x1="-20" y1="0" x2="0" y2="0" stroke="white" stroke-width="2" opacity="0.8"/>
23
- <line x1="0" y1="0" x2="20" y2="0" stroke="white" stroke-width="2" opacity="0.8"/>
24
- <line x1="20" y1="0" x2="15" y2="15" stroke="white" stroke-width="1" opacity="0.6"/>
25
- <line x1="0" y1="0" x2="0" y2="20" stroke="white" stroke-width="2" opacity="0.8"/>
26
- <line x1="-15" y1="15" x2="0" y2="20" stroke="white" stroke-width="1" opacity="0.6"/>
27
- <line x1="0" y1="20" x2="15" y2="15" stroke="white" stroke-width="1" opacity="0.6"/>
28
-
29
- <!-- AI indicator -->
30
- <text x="0" y="35" text-anchor="middle" font-family="Arial, sans-serif" font-size="12" font-weight="bold" fill="white">AI</text>
31
- </g>
32
-
33
- <!-- N8N text at bottom -->
34
- <text x="50" y="85" text-anchor="middle" font-family="Arial, sans-serif" font-size="10" font-weight="bold" fill="white">N8N</text>
35
- </svg>
@@ -1,2 +0,0 @@
1
- import { INodeProperties } from 'n8n-workflow';
2
- export declare const nodeProperties: INodeProperties[];