n8n-nodes-github-copilot 3.27.5 → 3.28.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/credentials/GitHubCopilotApi.credentials.d.ts +1 -1
- package/dist/credentials/GitHubCopilotApi.credentials.js +19 -19
- package/dist/credentials/GitHubCopilotOAuth2Api.credentials.backup.d.ts +1 -1
- package/dist/credentials/GitHubCopilotOAuth2Api.credentials.backup.js +71 -71
- package/dist/credentials/GitHubCopilotOAuth2Api.credentials.d.ts +1 -1
- package/dist/credentials/GitHubCopilotOAuth2Api.credentials.js +67 -67
- package/dist/credentials/GitHubCopilotOAuth2Api.credentials.oauth.d.ts +1 -9
- package/dist/credentials/GitHubCopilotOAuth2Api.credentials.oauth.js +38 -63
- package/dist/nodes/GitHubCopilot/GitHubCopilot.node.d.ts +1 -1
- package/dist/nodes/GitHubCopilot/GitHubCopilot.node.js +188 -181
- package/dist/nodes/GitHubCopilotChatAPI/GitHubCopilotChatAPI.node.d.ts +1 -1
- package/dist/nodes/GitHubCopilotChatAPI/GitHubCopilotChatAPI.node.js +38 -38
- package/dist/nodes/GitHubCopilotChatAPI/nodeProperties.d.ts +1 -1
- package/dist/nodes/GitHubCopilotChatAPI/nodeProperties.js +97 -97
- package/dist/nodes/GitHubCopilotChatAPI/utils/imageProcessor.d.ts +2 -2
- package/dist/nodes/GitHubCopilotChatAPI/utils/imageProcessor.js +16 -15
- package/dist/nodes/GitHubCopilotChatAPI/utils/index.d.ts +3 -3
- package/dist/nodes/GitHubCopilotChatAPI/utils/mediaDetection.d.ts +3 -3
- package/dist/nodes/GitHubCopilotChatAPI/utils/mediaDetection.js +20 -26
- package/dist/nodes/GitHubCopilotChatAPI/utils/modelCapabilities.d.ts +1 -1
- package/dist/nodes/GitHubCopilotChatAPI/utils/modelCapabilities.js +24 -24
- package/dist/nodes/GitHubCopilotChatAPI/utils/types.d.ts +4 -4
- package/dist/nodes/GitHubCopilotChatModel/GitHubCopilotChatModel.node.d.ts +1 -1
- package/dist/nodes/GitHubCopilotChatModel/GitHubCopilotChatModel.node.js +86 -82
- package/dist/nodes/GitHubCopilotOpenAI/GitHubCopilotOpenAI.node.d.ts +5 -0
- package/dist/nodes/GitHubCopilotOpenAI/GitHubCopilotOpenAI.node.js +142 -0
- package/dist/nodes/GitHubCopilotOpenAI/nodeProperties.d.ts +2 -0
- package/dist/nodes/GitHubCopilotOpenAI/nodeProperties.js +326 -0
- package/dist/nodes/GitHubCopilotOpenAI/utils/index.d.ts +2 -0
- package/dist/nodes/GitHubCopilotOpenAI/utils/index.js +24 -0
- package/dist/nodes/GitHubCopilotOpenAI/utils/openaiCompat.d.ts +95 -0
- package/dist/nodes/GitHubCopilotOpenAI/utils/openaiCompat.js +175 -0
- package/dist/nodes/GitHubCopilotOpenAI/utils/types.d.ts +101 -0
- package/dist/nodes/GitHubCopilotOpenAI/utils/types.js +2 -0
- package/dist/nodes/GitHubCopilotTest/GitHubCopilotTest.node.d.ts +1 -1
- package/dist/nodes/GitHubCopilotTest/GitHubCopilotTest.node.js +96 -94
- package/package.json +75 -74
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.GitHubCopilotOpenAI = void 0;
|
|
4
|
+
const nodeProperties_1 = require("./nodeProperties");
|
|
5
|
+
class GitHubCopilotOpenAI {
|
|
6
|
+
constructor() {
|
|
7
|
+
this.description = {
|
|
8
|
+
displayName: "GitHub Copilot OpenAI",
|
|
9
|
+
name: "gitHubCopilotOpenAI",
|
|
10
|
+
icon: "file:../../shared/icons/copilot.svg",
|
|
11
|
+
group: ["AI"],
|
|
12
|
+
version: 1,
|
|
13
|
+
subtitle: "={{$parameter[\"operation\"] + \": \" + $parameter[\"model\"]}}",
|
|
14
|
+
description: "OpenAI-compatible GitHub Copilot Chat API with full support for messages, tools, and all OpenAI parameters",
|
|
15
|
+
defaults: {
|
|
16
|
+
name: "GitHub Copilot OpenAI",
|
|
17
|
+
},
|
|
18
|
+
inputs: ["main"],
|
|
19
|
+
outputs: ["main"],
|
|
20
|
+
credentials: [
|
|
21
|
+
{
|
|
22
|
+
name: "githubCopilotApi",
|
|
23
|
+
required: true,
|
|
24
|
+
displayOptions: {
|
|
25
|
+
show: {
|
|
26
|
+
credentialType: ["githubCopilotApi"],
|
|
27
|
+
},
|
|
28
|
+
},
|
|
29
|
+
},
|
|
30
|
+
{
|
|
31
|
+
name: "githubCopilotOAuth2Api",
|
|
32
|
+
required: true,
|
|
33
|
+
displayOptions: {
|
|
34
|
+
show: {
|
|
35
|
+
credentialType: ["githubCopilotOAuth2Api"],
|
|
36
|
+
},
|
|
37
|
+
},
|
|
38
|
+
},
|
|
39
|
+
],
|
|
40
|
+
properties: nodeProperties_1.nodeProperties,
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
async execute() {
|
|
44
|
+
const items = this.getInputData();
|
|
45
|
+
const returnData = [];
|
|
46
|
+
for (let i = 0; i < items.length; i++) {
|
|
47
|
+
try {
|
|
48
|
+
const operation = this.getNodeParameter("operation", i);
|
|
49
|
+
if (operation === "chat") {
|
|
50
|
+
const model = this.getNodeParameter("model", i, "gpt-4o");
|
|
51
|
+
const messagesParam = this.getNodeParameter("messages", i, {
|
|
52
|
+
message: [],
|
|
53
|
+
});
|
|
54
|
+
const temperature = this.getNodeParameter("temperature", i, 1);
|
|
55
|
+
const tools = this.getNodeParameter("tools", i, "");
|
|
56
|
+
const messages = [];
|
|
57
|
+
if (messagesParam.message && Array.isArray(messagesParam.message)) {
|
|
58
|
+
for (const msg of messagesParam.message) {
|
|
59
|
+
messages.push({
|
|
60
|
+
role: msg.role,
|
|
61
|
+
content: msg.content,
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
if (messages.length === 0) {
|
|
66
|
+
messages.push({
|
|
67
|
+
role: "user",
|
|
68
|
+
content: "Hello! How can you help me?",
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
const modelMapping = {
|
|
72
|
+
"gpt-4": "gpt-4o",
|
|
73
|
+
"gpt-4o": "gpt-4o",
|
|
74
|
+
"gpt-4o-mini": "gpt-4o-mini",
|
|
75
|
+
"claude-3-5-sonnet": "claude-3.5-sonnet",
|
|
76
|
+
};
|
|
77
|
+
const copilotModel = modelMapping[model] || "gpt-4o";
|
|
78
|
+
const mockResponse = {
|
|
79
|
+
id: `chatcmpl-${Date.now()}`,
|
|
80
|
+
object: "chat.completion",
|
|
81
|
+
created: Math.floor(Date.now() / 1000),
|
|
82
|
+
model: model,
|
|
83
|
+
choices: [
|
|
84
|
+
{
|
|
85
|
+
index: 0,
|
|
86
|
+
message: {
|
|
87
|
+
role: "assistant",
|
|
88
|
+
content: `🚀 GitHub Copilot OpenAI Mock Response
|
|
89
|
+
|
|
90
|
+
**Request Details:**
|
|
91
|
+
- Model: ${model} → ${copilotModel}
|
|
92
|
+
- Messages: ${messages.length}
|
|
93
|
+
- Temperature: ${temperature}
|
|
94
|
+
- Tools: ${tools ? "Yes" : "No"}
|
|
95
|
+
|
|
96
|
+
**Sample Messages:**
|
|
97
|
+
${messages
|
|
98
|
+
.map((msg, idx) => `${idx + 1}. [${msg.role}]: ${msg.content.substring(0, 100)}${msg.content.length > 100 ? "..." : ""}`)
|
|
99
|
+
.join("\n")}
|
|
100
|
+
|
|
101
|
+
This is a mock response. The real GitHub Copilot integration will be implemented next.`,
|
|
102
|
+
},
|
|
103
|
+
finish_reason: "stop",
|
|
104
|
+
},
|
|
105
|
+
],
|
|
106
|
+
usage: {
|
|
107
|
+
prompt_tokens: 50,
|
|
108
|
+
completion_tokens: 125,
|
|
109
|
+
total_tokens: 175,
|
|
110
|
+
},
|
|
111
|
+
};
|
|
112
|
+
returnData.push({
|
|
113
|
+
json: mockResponse,
|
|
114
|
+
pairedItem: { item: i },
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
else {
|
|
118
|
+
throw new Error(`Unknown operation: ${operation}`);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
catch (error) {
|
|
122
|
+
if (this.continueOnFail()) {
|
|
123
|
+
returnData.push({
|
|
124
|
+
json: {
|
|
125
|
+
error: {
|
|
126
|
+
message: error instanceof Error ? error.message : "Unknown error",
|
|
127
|
+
type: "api_error",
|
|
128
|
+
code: "github_copilot_openai_error",
|
|
129
|
+
},
|
|
130
|
+
},
|
|
131
|
+
pairedItem: { item: i },
|
|
132
|
+
});
|
|
133
|
+
}
|
|
134
|
+
else {
|
|
135
|
+
throw error;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
return [returnData];
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
exports.GitHubCopilotOpenAI = GitHubCopilotOpenAI;
|
|
@@ -0,0 +1,326 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.nodeProperties = void 0;
|
|
4
|
+
exports.nodeProperties = [
|
|
5
|
+
{
|
|
6
|
+
displayName: "Credential Type",
|
|
7
|
+
name: "credentialType",
|
|
8
|
+
type: "options",
|
|
9
|
+
options: [
|
|
10
|
+
{
|
|
11
|
+
name: "GitHub Copilot API (Manual Token)",
|
|
12
|
+
value: "githubCopilotApi",
|
|
13
|
+
description: "Use manual GitHub CLI token",
|
|
14
|
+
},
|
|
15
|
+
{
|
|
16
|
+
name: "GitHub Copilot OAuth2 (with Helper)",
|
|
17
|
+
value: "githubCopilotOAuth2Api",
|
|
18
|
+
description: "Use OAuth2 credential with helper script",
|
|
19
|
+
},
|
|
20
|
+
],
|
|
21
|
+
default: "githubCopilotApi",
|
|
22
|
+
description: "Type of credential to use for GitHub Copilot authentication",
|
|
23
|
+
},
|
|
24
|
+
{
|
|
25
|
+
displayName: "Operation",
|
|
26
|
+
name: "operation",
|
|
27
|
+
type: "options",
|
|
28
|
+
noDataExpression: true,
|
|
29
|
+
options: [
|
|
30
|
+
{
|
|
31
|
+
name: "Chat Completion",
|
|
32
|
+
value: "chat",
|
|
33
|
+
description: "Send messages to GitHub Copilot Chat API with full OpenAI compatibility",
|
|
34
|
+
},
|
|
35
|
+
],
|
|
36
|
+
default: "chat",
|
|
37
|
+
},
|
|
38
|
+
{
|
|
39
|
+
displayName: "Model",
|
|
40
|
+
name: "model",
|
|
41
|
+
type: "string",
|
|
42
|
+
default: "gpt-4o",
|
|
43
|
+
placeholder: "gpt-4o",
|
|
44
|
+
description: "The model to use for the completion. Supports all OpenAI model names that map to GitHub Copilot models.",
|
|
45
|
+
},
|
|
46
|
+
{
|
|
47
|
+
displayName: "Messages",
|
|
48
|
+
name: "messages",
|
|
49
|
+
type: "fixedCollection",
|
|
50
|
+
typeOptions: {
|
|
51
|
+
multipleValues: true,
|
|
52
|
+
sortable: true,
|
|
53
|
+
},
|
|
54
|
+
default: {
|
|
55
|
+
message: [
|
|
56
|
+
{
|
|
57
|
+
role: "user",
|
|
58
|
+
content: "",
|
|
59
|
+
},
|
|
60
|
+
],
|
|
61
|
+
},
|
|
62
|
+
options: [
|
|
63
|
+
{
|
|
64
|
+
name: "message",
|
|
65
|
+
displayName: "Message",
|
|
66
|
+
values: [
|
|
67
|
+
{
|
|
68
|
+
displayName: "Role",
|
|
69
|
+
name: "role",
|
|
70
|
+
type: "options",
|
|
71
|
+
options: [
|
|
72
|
+
{
|
|
73
|
+
name: "System",
|
|
74
|
+
value: "system",
|
|
75
|
+
description: "System message to set the behavior of the AI",
|
|
76
|
+
},
|
|
77
|
+
{
|
|
78
|
+
name: "User",
|
|
79
|
+
value: "user",
|
|
80
|
+
description: "Message from the user",
|
|
81
|
+
},
|
|
82
|
+
{
|
|
83
|
+
name: "Assistant",
|
|
84
|
+
value: "assistant",
|
|
85
|
+
description: "Previous response from the AI assistant",
|
|
86
|
+
},
|
|
87
|
+
],
|
|
88
|
+
default: "user",
|
|
89
|
+
},
|
|
90
|
+
{
|
|
91
|
+
displayName: "Content",
|
|
92
|
+
name: "content",
|
|
93
|
+
type: "string",
|
|
94
|
+
typeOptions: {
|
|
95
|
+
rows: 3,
|
|
96
|
+
},
|
|
97
|
+
default: "",
|
|
98
|
+
placeholder: "Enter message content...",
|
|
99
|
+
description: "The content of the message",
|
|
100
|
+
},
|
|
101
|
+
],
|
|
102
|
+
},
|
|
103
|
+
],
|
|
104
|
+
description: "Array of messages for the conversation",
|
|
105
|
+
},
|
|
106
|
+
{
|
|
107
|
+
displayName: "Tools",
|
|
108
|
+
name: "tools",
|
|
109
|
+
type: "json",
|
|
110
|
+
default: "",
|
|
111
|
+
placeholder: `[
|
|
112
|
+
{
|
|
113
|
+
"type": "function",
|
|
114
|
+
"function": {
|
|
115
|
+
"name": "get_weather",
|
|
116
|
+
"description": "Get current weather",
|
|
117
|
+
"parameters": {
|
|
118
|
+
"type": "object",
|
|
119
|
+
"properties": {
|
|
120
|
+
"location": {
|
|
121
|
+
"type": "string",
|
|
122
|
+
"description": "City name"
|
|
123
|
+
}
|
|
124
|
+
},
|
|
125
|
+
"required": ["location"]
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
]`,
|
|
130
|
+
description: "Array of tools/functions available to the model (OpenAI format)",
|
|
131
|
+
hint: "JSON array of tool definitions in OpenAI format",
|
|
132
|
+
},
|
|
133
|
+
{
|
|
134
|
+
displayName: "Tool Choice",
|
|
135
|
+
name: "tool_choice",
|
|
136
|
+
type: "options",
|
|
137
|
+
options: [
|
|
138
|
+
{
|
|
139
|
+
name: "Auto",
|
|
140
|
+
value: "auto",
|
|
141
|
+
description: "Let the model decide whether to call functions",
|
|
142
|
+
},
|
|
143
|
+
{
|
|
144
|
+
name: "None",
|
|
145
|
+
value: "none",
|
|
146
|
+
description: "Force the model to not call any functions",
|
|
147
|
+
},
|
|
148
|
+
{
|
|
149
|
+
name: "Required",
|
|
150
|
+
value: "required",
|
|
151
|
+
description: "Force the model to call at least one function",
|
|
152
|
+
},
|
|
153
|
+
],
|
|
154
|
+
default: "auto",
|
|
155
|
+
description: "Control how the model uses tools",
|
|
156
|
+
displayOptions: {
|
|
157
|
+
show: {
|
|
158
|
+
tools: ["/.+/"],
|
|
159
|
+
},
|
|
160
|
+
},
|
|
161
|
+
},
|
|
162
|
+
{
|
|
163
|
+
displayName: "Response Format",
|
|
164
|
+
name: "response_format",
|
|
165
|
+
type: "options",
|
|
166
|
+
options: [
|
|
167
|
+
{
|
|
168
|
+
name: "Text",
|
|
169
|
+
value: "text",
|
|
170
|
+
description: "Return response as plain text",
|
|
171
|
+
},
|
|
172
|
+
{
|
|
173
|
+
name: "JSON Object",
|
|
174
|
+
value: "json_object",
|
|
175
|
+
description: "Return response as JSON object",
|
|
176
|
+
},
|
|
177
|
+
],
|
|
178
|
+
default: "text",
|
|
179
|
+
description: "The format of the response",
|
|
180
|
+
},
|
|
181
|
+
{
|
|
182
|
+
displayName: "Temperature",
|
|
183
|
+
name: "temperature",
|
|
184
|
+
type: "number",
|
|
185
|
+
typeOptions: {
|
|
186
|
+
minValue: 0,
|
|
187
|
+
maxValue: 2,
|
|
188
|
+
numberPrecision: 2,
|
|
189
|
+
},
|
|
190
|
+
default: 1,
|
|
191
|
+
description: "Controls randomness in the response. Lower values make responses more focused and deterministic.",
|
|
192
|
+
},
|
|
193
|
+
{
|
|
194
|
+
displayName: "Max Tokens",
|
|
195
|
+
name: "max_tokens",
|
|
196
|
+
type: "number",
|
|
197
|
+
typeOptions: {
|
|
198
|
+
minValue: 1,
|
|
199
|
+
maxValue: 4096,
|
|
200
|
+
},
|
|
201
|
+
default: "",
|
|
202
|
+
placeholder: "1000",
|
|
203
|
+
description: "Maximum number of tokens to generate",
|
|
204
|
+
},
|
|
205
|
+
{
|
|
206
|
+
displayName: "Top P",
|
|
207
|
+
name: "top_p",
|
|
208
|
+
type: "number",
|
|
209
|
+
typeOptions: {
|
|
210
|
+
minValue: 0,
|
|
211
|
+
maxValue: 1,
|
|
212
|
+
numberPrecision: 2,
|
|
213
|
+
},
|
|
214
|
+
default: 1,
|
|
215
|
+
description: "Controls diversity via nucleus sampling",
|
|
216
|
+
},
|
|
217
|
+
{
|
|
218
|
+
displayName: "Frequency Penalty",
|
|
219
|
+
name: "frequency_penalty",
|
|
220
|
+
type: "number",
|
|
221
|
+
typeOptions: {
|
|
222
|
+
minValue: -2,
|
|
223
|
+
maxValue: 2,
|
|
224
|
+
numberPrecision: 2,
|
|
225
|
+
},
|
|
226
|
+
default: 0,
|
|
227
|
+
description: "Penalty for repeated tokens based on their frequency",
|
|
228
|
+
},
|
|
229
|
+
{
|
|
230
|
+
displayName: "Presence Penalty",
|
|
231
|
+
name: "presence_penalty",
|
|
232
|
+
type: "number",
|
|
233
|
+
typeOptions: {
|
|
234
|
+
minValue: -2,
|
|
235
|
+
maxValue: 2,
|
|
236
|
+
numberPrecision: 2,
|
|
237
|
+
},
|
|
238
|
+
default: 0,
|
|
239
|
+
description: "Penalty for repeated tokens based on their presence",
|
|
240
|
+
},
|
|
241
|
+
{
|
|
242
|
+
displayName: "Stop Sequences",
|
|
243
|
+
name: "stop",
|
|
244
|
+
type: "string",
|
|
245
|
+
default: "",
|
|
246
|
+
placeholder: "[\"\\n\", \"Human:\", \"AI:\"]",
|
|
247
|
+
description: "JSON array of strings where the API will stop generating tokens",
|
|
248
|
+
},
|
|
249
|
+
{
|
|
250
|
+
displayName: "Stream",
|
|
251
|
+
name: "stream",
|
|
252
|
+
type: "boolean",
|
|
253
|
+
default: false,
|
|
254
|
+
description: "Whether to stream the response",
|
|
255
|
+
},
|
|
256
|
+
{
|
|
257
|
+
displayName: "Seed",
|
|
258
|
+
name: "seed",
|
|
259
|
+
type: "number",
|
|
260
|
+
default: "",
|
|
261
|
+
placeholder: "12345",
|
|
262
|
+
description: "Seed for deterministic sampling",
|
|
263
|
+
},
|
|
264
|
+
{
|
|
265
|
+
displayName: "User ID",
|
|
266
|
+
name: "user",
|
|
267
|
+
type: "string",
|
|
268
|
+
default: "",
|
|
269
|
+
placeholder: "user-123",
|
|
270
|
+
description: "Unique identifier for the end-user",
|
|
271
|
+
},
|
|
272
|
+
{
|
|
273
|
+
displayName: "Advanced Options",
|
|
274
|
+
name: "advancedOptions",
|
|
275
|
+
type: "collection",
|
|
276
|
+
placeholder: "Add Advanced Option",
|
|
277
|
+
default: {},
|
|
278
|
+
options: [
|
|
279
|
+
{
|
|
280
|
+
displayName: "Enable Retry",
|
|
281
|
+
name: "enableRetry",
|
|
282
|
+
type: "boolean",
|
|
283
|
+
default: true,
|
|
284
|
+
description: "Whether to retry failed requests",
|
|
285
|
+
},
|
|
286
|
+
{
|
|
287
|
+
displayName: "Max Retries",
|
|
288
|
+
name: "maxRetries",
|
|
289
|
+
type: "number",
|
|
290
|
+
default: 3,
|
|
291
|
+
description: "Maximum number of retries for failed requests",
|
|
292
|
+
displayOptions: {
|
|
293
|
+
show: {
|
|
294
|
+
enableRetry: [true],
|
|
295
|
+
},
|
|
296
|
+
},
|
|
297
|
+
},
|
|
298
|
+
{
|
|
299
|
+
displayName: "Retry Delay (ms)",
|
|
300
|
+
name: "retryDelay",
|
|
301
|
+
type: "number",
|
|
302
|
+
default: 1000,
|
|
303
|
+
description: "Delay between retries in milliseconds",
|
|
304
|
+
displayOptions: {
|
|
305
|
+
show: {
|
|
306
|
+
enableRetry: [true],
|
|
307
|
+
},
|
|
308
|
+
},
|
|
309
|
+
},
|
|
310
|
+
{
|
|
311
|
+
displayName: "Request Timeout (ms)",
|
|
312
|
+
name: "timeout",
|
|
313
|
+
type: "number",
|
|
314
|
+
default: 60000,
|
|
315
|
+
description: "Request timeout in milliseconds",
|
|
316
|
+
},
|
|
317
|
+
{
|
|
318
|
+
displayName: "Debug Mode",
|
|
319
|
+
name: "debugMode",
|
|
320
|
+
type: "boolean",
|
|
321
|
+
default: false,
|
|
322
|
+
description: "Enable debug logging",
|
|
323
|
+
},
|
|
324
|
+
],
|
|
325
|
+
},
|
|
326
|
+
];
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
exports.debugLog = exports.parseOpenAIRequest = exports.convertCopilotResponseToOpenAI = exports.convertOpenAIMessagesToCopilot = exports.mapOpenAIModelToCopilot = void 0;
|
|
18
|
+
__exportStar(require("./types"), exports);
|
|
19
|
+
var openaiCompat_1 = require("./openaiCompat");
|
|
20
|
+
Object.defineProperty(exports, "mapOpenAIModelToCopilot", { enumerable: true, get: function () { return openaiCompat_1.mapOpenAIModelToCopilot; } });
|
|
21
|
+
Object.defineProperty(exports, "convertOpenAIMessagesToCopilot", { enumerable: true, get: function () { return openaiCompat_1.convertOpenAIMessagesToCopilot; } });
|
|
22
|
+
Object.defineProperty(exports, "convertCopilotResponseToOpenAI", { enumerable: true, get: function () { return openaiCompat_1.convertCopilotResponseToOpenAI; } });
|
|
23
|
+
Object.defineProperty(exports, "parseOpenAIRequest", { enumerable: true, get: function () { return openaiCompat_1.parseOpenAIRequest; } });
|
|
24
|
+
Object.defineProperty(exports, "debugLog", { enumerable: true, get: function () { return openaiCompat_1.debugLog; } });
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
import { IDataObject, IExecuteFunctions } from "n8n-workflow";
|
|
2
|
+
export interface OpenAIMessage {
|
|
3
|
+
role: "system" | "user" | "assistant" | "tool";
|
|
4
|
+
content: string;
|
|
5
|
+
name?: string;
|
|
6
|
+
tool_calls?: ToolCall[];
|
|
7
|
+
tool_call_id?: string;
|
|
8
|
+
}
|
|
9
|
+
export interface ToolCall {
|
|
10
|
+
id: string;
|
|
11
|
+
type: "function";
|
|
12
|
+
function: {
|
|
13
|
+
name: string;
|
|
14
|
+
arguments: string;
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
export interface OpenAITool {
|
|
18
|
+
type: "function";
|
|
19
|
+
function: {
|
|
20
|
+
name: string;
|
|
21
|
+
description: string;
|
|
22
|
+
parameters: IDataObject;
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
export interface OpenAIRequest {
|
|
26
|
+
model: string;
|
|
27
|
+
messages: OpenAIMessage[];
|
|
28
|
+
tools?: OpenAITool[];
|
|
29
|
+
tool_choice?: "auto" | "none" | "required" | {
|
|
30
|
+
type: "function";
|
|
31
|
+
function: {
|
|
32
|
+
name: string;
|
|
33
|
+
};
|
|
34
|
+
};
|
|
35
|
+
response_format?: {
|
|
36
|
+
type: "text" | "json_object";
|
|
37
|
+
};
|
|
38
|
+
temperature?: number;
|
|
39
|
+
max_tokens?: number;
|
|
40
|
+
top_p?: number;
|
|
41
|
+
frequency_penalty?: number;
|
|
42
|
+
presence_penalty?: number;
|
|
43
|
+
stop?: string | string[];
|
|
44
|
+
stream?: boolean;
|
|
45
|
+
seed?: number;
|
|
46
|
+
user?: string;
|
|
47
|
+
}
|
|
48
|
+
export interface OpenAIResponse {
|
|
49
|
+
id: string;
|
|
50
|
+
object: "chat.completion";
|
|
51
|
+
created: number;
|
|
52
|
+
model: string;
|
|
53
|
+
choices: Array<{
|
|
54
|
+
index: number;
|
|
55
|
+
message: {
|
|
56
|
+
role: "assistant";
|
|
57
|
+
content: string | null;
|
|
58
|
+
tool_calls?: ToolCall[];
|
|
59
|
+
};
|
|
60
|
+
finish_reason: "stop" | "length" | "tool_calls" | "content_filter";
|
|
61
|
+
}>;
|
|
62
|
+
usage: {
|
|
63
|
+
prompt_tokens: number;
|
|
64
|
+
completion_tokens: number;
|
|
65
|
+
total_tokens: number;
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
export interface CopilotRequest {
|
|
69
|
+
model: string;
|
|
70
|
+
message: string;
|
|
71
|
+
system_message?: string;
|
|
72
|
+
temperature?: number;
|
|
73
|
+
max_tokens?: number;
|
|
74
|
+
tools?: OpenAITool[];
|
|
75
|
+
tool_choice?: string;
|
|
76
|
+
}
|
|
77
|
+
export interface CopilotResponse {
|
|
78
|
+
message: string;
|
|
79
|
+
model: string;
|
|
80
|
+
usage: {
|
|
81
|
+
prompt_tokens: number;
|
|
82
|
+
completion_tokens: number;
|
|
83
|
+
total_tokens: number;
|
|
84
|
+
};
|
|
85
|
+
finish_reason: string;
|
|
86
|
+
tool_calls?: ToolCall[];
|
|
87
|
+
}
|
|
88
|
+
export declare function mapOpenAIModelToCopilot(openaiModel: string): string;
|
|
89
|
+
export declare function convertOpenAIMessagesToCopilot(messages: OpenAIMessage[]): {
|
|
90
|
+
message: string;
|
|
91
|
+
system_message?: string;
|
|
92
|
+
};
|
|
93
|
+
export declare function convertCopilotResponseToOpenAI(copilotResponse: CopilotResponse, model: string): OpenAIResponse;
|
|
94
|
+
export declare function parseOpenAIRequest(context: IExecuteFunctions, itemIndex: number): OpenAIRequest;
|
|
95
|
+
export declare function debugLog(context: IExecuteFunctions, itemIndex: number, message: string, data?: unknown): void;
|