@jskit-ai/assistant 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.descriptor.mjs +284 -0
- package/package.json +31 -0
- package/src/client/components/AssistantClientElement.vue +1316 -0
- package/src/client/components/AssistantConsoleSettingsClientElement.vue +71 -0
- package/src/client/components/AssistantSettingsFormCard.vue +76 -0
- package/src/client/components/AssistantWorkspaceClientElement.vue +15 -0
- package/src/client/components/AssistantWorkspaceSettingsClientElement.vue +73 -0
- package/src/client/composables/useAssistantWorkspaceRuntime.js +789 -0
- package/src/client/index.js +12 -0
- package/src/client/lib/assistantApi.js +137 -0
- package/src/client/lib/assistantHttpClient.js +10 -0
- package/src/client/lib/markdownRenderer.js +31 -0
- package/src/client/providers/AssistantWebClientProvider.js +25 -0
- package/src/server/AssistantServiceProvider.js +179 -0
- package/src/server/actionIds.js +11 -0
- package/src/server/actions.js +191 -0
- package/src/server/diTokens.js +19 -0
- package/src/server/lib/aiClient.js +43 -0
- package/src/server/lib/ndjson.js +47 -0
- package/src/server/lib/providers/anthropicClient.js +375 -0
- package/src/server/lib/providers/common.js +158 -0
- package/src/server/lib/providers/deepSeekClient.js +22 -0
- package/src/server/lib/providers/openAiClient.js +13 -0
- package/src/server/lib/providers/openAiCompatibleClient.js +69 -0
- package/src/server/lib/resolveWorkspaceSlug.js +24 -0
- package/src/server/lib/serviceToolCatalog.js +459 -0
- package/src/server/registerRoutes.js +384 -0
- package/src/server/repositories/assistantSettingsRepository.js +100 -0
- package/src/server/repositories/conversationsRepository.js +244 -0
- package/src/server/repositories/messagesRepository.js +154 -0
- package/src/server/repositories/repositoryPersistenceUtils.js +63 -0
- package/src/server/services/assistantSettingsService.js +153 -0
- package/src/server/services/chatService.js +987 -0
- package/src/server/services/transcriptService.js +334 -0
- package/src/shared/assistantPaths.js +50 -0
- package/src/shared/assistantResource.js +323 -0
- package/src/shared/assistantSettingsResource.js +214 -0
- package/src/shared/index.js +39 -0
- package/src/shared/queryKeys.js +69 -0
- package/src/shared/settingsEvents.js +7 -0
- package/src/shared/streamEvents.js +31 -0
- package/src/shared/support/positiveInteger.js +9 -0
- package/templates/migrations/assistant_settings_initial.cjs +39 -0
- package/templates/migrations/assistant_transcripts_initial.cjs +51 -0
- package/templates/src/pages/admin/workspace/assistant/index.vue +7 -0
- package/test/aiConfigValidation.test.js +15 -0
- package/test/assistantApiSurfaceHeader.test.js +64 -0
- package/test/assistantResource.test.js +53 -0
- package/test/assistantSettingsResource.test.js +48 -0
- package/test/assistantSettingsService.test.js +133 -0
- package/test/chatService.test.js +841 -0
- package/test/descriptorSurfaceOption.test.js +35 -0
- package/test/queryKeys.test.js +41 -0
- package/test/resolveWorkspaceSlug.test.js +83 -0
- package/test/routeInputContracts.test.js +287 -0
- package/test/serviceToolCatalog.test.js +1235 -0
- package/test/transcriptService.test.js +175 -0
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
const NDJSON_CONTENT_TYPE = "application/x-ndjson; charset=utf-8";
|
|
2
|
+
|
|
3
|
+
function setNdjsonHeaders(reply) {
|
|
4
|
+
// Fastify reply.hijack bypasses part of reply serialization. Set headers on both
|
|
5
|
+
// Fastify reply and the underlying raw response to keep streaming content-type intact.
|
|
6
|
+
reply.header("Content-Type", NDJSON_CONTENT_TYPE);
|
|
7
|
+
reply.header("Cache-Control", "no-cache");
|
|
8
|
+
reply.header("X-Accel-Buffering", "no");
|
|
9
|
+
|
|
10
|
+
if (reply?.raw && typeof reply.raw.setHeader === "function") {
|
|
11
|
+
reply.raw.setHeader("Content-Type", NDJSON_CONTENT_TYPE);
|
|
12
|
+
reply.raw.setHeader("Cache-Control", "no-cache");
|
|
13
|
+
reply.raw.setHeader("X-Accel-Buffering", "no");
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
function writeNdjson(reply, payload = {}) {
|
|
18
|
+
const body = `${JSON.stringify(payload)}\n`;
|
|
19
|
+
reply.raw.write(body);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
function endNdjson(reply) {
|
|
23
|
+
if (!reply || !reply.raw || reply.raw.writableEnded) {
|
|
24
|
+
return;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
reply.raw.end();
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
function mapStreamError(error) {
|
|
31
|
+
const status = Number(error?.status || error?.statusCode || 500);
|
|
32
|
+
const safeStatus = Number.isInteger(status) && status >= 400 && status <= 599 ? status : 500;
|
|
33
|
+
|
|
34
|
+
return Object.freeze({
|
|
35
|
+
code: String(error?.code || "assistant_stream_failed").trim() || "assistant_stream_failed",
|
|
36
|
+
message: safeStatus >= 500 ? "Assistant stream failed." : String(error?.message || "Request failed."),
|
|
37
|
+
status: safeStatus
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
export {
|
|
42
|
+
NDJSON_CONTENT_TYPE,
|
|
43
|
+
setNdjsonHeaders,
|
|
44
|
+
writeNdjson,
|
|
45
|
+
endNdjson,
|
|
46
|
+
mapStreamError
|
|
47
|
+
};
|
|
@@ -0,0 +1,375 @@
|
|
|
1
|
+
import { normalizeText } from "@jskit-ai/kernel/shared/support/normalize";
|
|
2
|
+
import {
|
|
3
|
+
createDisabledClient,
|
|
4
|
+
createProviderRequestError,
|
|
5
|
+
normalizeArray,
|
|
6
|
+
normalizeContentText,
|
|
7
|
+
normalizeModel,
|
|
8
|
+
normalizeObject,
|
|
9
|
+
normalizeOptionalHttpUrl,
|
|
10
|
+
normalizeTimeoutMs,
|
|
11
|
+
parseJsonObjectOrDefault
|
|
12
|
+
} from "./common.js";
|
|
13
|
+
|
|
14
|
+
const DEFAULT_ANTHROPIC_MODEL = "claude-3-5-sonnet-latest";
|
|
15
|
+
const DEFAULT_ANTHROPIC_BASE_URL = "https://api.anthropic.com";
|
|
16
|
+
const DEFAULT_ANTHROPIC_MAX_TOKENS = 4096;
|
|
17
|
+
const DEFAULT_ANTHROPIC_VERSION = "2023-06-01";
|
|
18
|
+
|
|
19
|
+
function normalizeTemperature(value, fallback = 0.2) {
|
|
20
|
+
const parsed = Number(value);
|
|
21
|
+
if (!Number.isFinite(parsed)) {
|
|
22
|
+
return fallback;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
if (parsed < 0) {
|
|
26
|
+
return 0;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
if (parsed > 1) {
|
|
30
|
+
return 1;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
return parsed;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
function normalizeToolDescriptor(tool) {
|
|
37
|
+
const toolObject = normalizeObject(tool);
|
|
38
|
+
const functionSpec = normalizeObject(toolObject.function);
|
|
39
|
+
const name = normalizeText(functionSpec.name);
|
|
40
|
+
|
|
41
|
+
if (!name) {
|
|
42
|
+
return null;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const inputSchema = normalizeObject(functionSpec.parameters);
|
|
46
|
+
return {
|
|
47
|
+
name,
|
|
48
|
+
description: normalizeText(functionSpec.description),
|
|
49
|
+
input_schema: Object.keys(inputSchema).length > 0
|
|
50
|
+
? inputSchema
|
|
51
|
+
: {
|
|
52
|
+
type: "object",
|
|
53
|
+
properties: {},
|
|
54
|
+
additionalProperties: true
|
|
55
|
+
}
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
function toAnthropicTools(tools = []) {
|
|
60
|
+
return normalizeArray(tools)
|
|
61
|
+
.map((tool) => normalizeToolDescriptor(tool))
|
|
62
|
+
.filter(Boolean);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
function toAnthropicSystemAndMessages(messages = []) {
|
|
66
|
+
const systemLines = [];
|
|
67
|
+
const anthropicMessages = [];
|
|
68
|
+
|
|
69
|
+
for (const entry of normalizeArray(messages)) {
|
|
70
|
+
const message = normalizeObject(entry);
|
|
71
|
+
const role = normalizeText(message.role).toLowerCase();
|
|
72
|
+
|
|
73
|
+
if (role === "system") {
|
|
74
|
+
const systemText = normalizeContentText(message.content);
|
|
75
|
+
if (systemText) {
|
|
76
|
+
systemLines.push(systemText);
|
|
77
|
+
}
|
|
78
|
+
continue;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
if (role === "user") {
|
|
82
|
+
const text = normalizeContentText(message.content);
|
|
83
|
+
if (!text) {
|
|
84
|
+
continue;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
anthropicMessages.push({
|
|
88
|
+
role: "user",
|
|
89
|
+
content: text
|
|
90
|
+
});
|
|
91
|
+
continue;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
if (role === "assistant") {
|
|
95
|
+
const blocks = [];
|
|
96
|
+
const text = normalizeContentText(message.content);
|
|
97
|
+
if (text) {
|
|
98
|
+
blocks.push({
|
|
99
|
+
type: "text",
|
|
100
|
+
text
|
|
101
|
+
});
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
const toolCalls = normalizeArray(message.tool_calls);
|
|
105
|
+
for (const [index, toolCall] of toolCalls.entries()) {
|
|
106
|
+
const toolCallObject = normalizeObject(toolCall);
|
|
107
|
+
const functionSpec = normalizeObject(toolCallObject.function);
|
|
108
|
+
const name = normalizeText(functionSpec.name);
|
|
109
|
+
if (!name) {
|
|
110
|
+
continue;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
blocks.push({
|
|
114
|
+
type: "tool_use",
|
|
115
|
+
id: normalizeText(toolCallObject.id) || `tool_call_${index + 1}`,
|
|
116
|
+
name,
|
|
117
|
+
input: parseJsonObjectOrDefault(functionSpec.arguments, {})
|
|
118
|
+
});
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
if (blocks.length < 1) {
|
|
122
|
+
continue;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
anthropicMessages.push({
|
|
126
|
+
role: "assistant",
|
|
127
|
+
content: blocks.length === 1 && blocks[0].type === "text" ? blocks[0].text : blocks
|
|
128
|
+
});
|
|
129
|
+
continue;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
if (role === "tool") {
|
|
133
|
+
const toolUseId = normalizeText(message.tool_call_id);
|
|
134
|
+
if (!toolUseId) {
|
|
135
|
+
continue;
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
const text = normalizeContentText(message.content) || "{}";
|
|
139
|
+
anthropicMessages.push({
|
|
140
|
+
role: "user",
|
|
141
|
+
content: [
|
|
142
|
+
{
|
|
143
|
+
type: "tool_result",
|
|
144
|
+
tool_use_id: toolUseId,
|
|
145
|
+
content: text
|
|
146
|
+
}
|
|
147
|
+
]
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
return {
|
|
153
|
+
system: systemLines.join("\n\n"),
|
|
154
|
+
messages: anthropicMessages
|
|
155
|
+
};
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
function mapAnthropicContentToOpenAiDelta(content = []) {
|
|
159
|
+
const blocks = normalizeArray(content);
|
|
160
|
+
const textParts = [];
|
|
161
|
+
const toolCalls = [];
|
|
162
|
+
|
|
163
|
+
for (const block of blocks) {
|
|
164
|
+
const blockObject = normalizeObject(block);
|
|
165
|
+
const type = normalizeText(blockObject.type).toLowerCase();
|
|
166
|
+
|
|
167
|
+
if (type === "text") {
|
|
168
|
+
const text = String(blockObject.text || "");
|
|
169
|
+
if (text) {
|
|
170
|
+
textParts.push(text);
|
|
171
|
+
}
|
|
172
|
+
continue;
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
if (type === "tool_use") {
|
|
176
|
+
const name = normalizeText(blockObject.name);
|
|
177
|
+
if (!name) {
|
|
178
|
+
continue;
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
toolCalls.push({
|
|
182
|
+
id: normalizeText(blockObject.id) || `tool_call_${toolCalls.length + 1}`,
|
|
183
|
+
index: toolCalls.length,
|
|
184
|
+
type: "function",
|
|
185
|
+
function: {
|
|
186
|
+
name,
|
|
187
|
+
arguments: JSON.stringify(normalizeObject(blockObject.input))
|
|
188
|
+
}
|
|
189
|
+
});
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
const delta = {};
|
|
194
|
+
if (textParts.length > 0) {
|
|
195
|
+
delta.content = textParts.join("");
|
|
196
|
+
}
|
|
197
|
+
if (toolCalls.length > 0) {
|
|
198
|
+
delta.tool_calls = toolCalls;
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
return delta;
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
function createSingleChunkStream(chunk) {
|
|
205
|
+
return (async function* singleChunkGenerator() {
|
|
206
|
+
yield chunk;
|
|
207
|
+
})();
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
async function fetchAnthropicMessage({
|
|
211
|
+
apiKey,
|
|
212
|
+
baseUrl,
|
|
213
|
+
model,
|
|
214
|
+
timeoutMs,
|
|
215
|
+
system = "",
|
|
216
|
+
messages,
|
|
217
|
+
tools,
|
|
218
|
+
temperature = 0.2,
|
|
219
|
+
signal
|
|
220
|
+
} = {}) {
|
|
221
|
+
const hasFetch = typeof fetch === "function";
|
|
222
|
+
if (!hasFetch) {
|
|
223
|
+
throw createProviderRequestError({
|
|
224
|
+
status: 500,
|
|
225
|
+
code: "assistant_provider_fetch_missing",
|
|
226
|
+
message: "Global fetch is not available for anthropic provider."
|
|
227
|
+
});
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
const timeout = normalizeTimeoutMs(timeoutMs);
|
|
231
|
+
const upstreamSignal = signal;
|
|
232
|
+
const controller = new AbortController();
|
|
233
|
+
const timeoutHandle = setTimeout(() => {
|
|
234
|
+
controller.abort();
|
|
235
|
+
}, timeout);
|
|
236
|
+
|
|
237
|
+
const handleAbort = () => {
|
|
238
|
+
controller.abort();
|
|
239
|
+
};
|
|
240
|
+
|
|
241
|
+
if (upstreamSignal) {
|
|
242
|
+
if (upstreamSignal.aborted) {
|
|
243
|
+
controller.abort();
|
|
244
|
+
} else {
|
|
245
|
+
upstreamSignal.addEventListener("abort", handleAbort, {
|
|
246
|
+
once: true
|
|
247
|
+
});
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
const requestPayload = {
|
|
252
|
+
model,
|
|
253
|
+
max_tokens: DEFAULT_ANTHROPIC_MAX_TOKENS,
|
|
254
|
+
messages,
|
|
255
|
+
temperature: normalizeTemperature(temperature)
|
|
256
|
+
};
|
|
257
|
+
if (normalizeText(system)) {
|
|
258
|
+
requestPayload.system = system;
|
|
259
|
+
}
|
|
260
|
+
if (tools.length > 0) {
|
|
261
|
+
requestPayload.tools = tools;
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
try {
|
|
265
|
+
const response = await fetch(`${baseUrl}/v1/messages`, {
|
|
266
|
+
method: "POST",
|
|
267
|
+
headers: {
|
|
268
|
+
"content-type": "application/json",
|
|
269
|
+
"x-api-key": apiKey,
|
|
270
|
+
"anthropic-version": DEFAULT_ANTHROPIC_VERSION
|
|
271
|
+
},
|
|
272
|
+
body: JSON.stringify(requestPayload),
|
|
273
|
+
signal: controller.signal
|
|
274
|
+
});
|
|
275
|
+
|
|
276
|
+
let payload = {};
|
|
277
|
+
try {
|
|
278
|
+
payload = await response.json();
|
|
279
|
+
} catch {
|
|
280
|
+
throw createProviderRequestError({
|
|
281
|
+
status: response.status,
|
|
282
|
+
code: "assistant_provider_invalid_response",
|
|
283
|
+
message: "Assistant provider returned an invalid response payload."
|
|
284
|
+
});
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
if (response.ok !== true) {
|
|
288
|
+
const providerError = normalizeObject(payload.error);
|
|
289
|
+
throw createProviderRequestError({
|
|
290
|
+
status: response.status,
|
|
291
|
+
code: normalizeText(providerError.type) || "assistant_provider_failed",
|
|
292
|
+
message: normalizeText(providerError.message) || normalizeText(payload.message)
|
|
293
|
+
});
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
return payload;
|
|
297
|
+
} finally {
|
|
298
|
+
clearTimeout(timeoutHandle);
|
|
299
|
+
if (upstreamSignal) {
|
|
300
|
+
upstreamSignal.removeEventListener("abort", handleAbort);
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
function createAnthropicClient({
|
|
306
|
+
enabled = true,
|
|
307
|
+
apiKey = "",
|
|
308
|
+
baseUrl = "",
|
|
309
|
+
model = "",
|
|
310
|
+
timeoutMs = 120_000
|
|
311
|
+
} = {}) {
|
|
312
|
+
const normalizedApiKey = normalizeText(apiKey);
|
|
313
|
+
const normalizedBaseUrl = normalizeOptionalHttpUrl(normalizeText(baseUrl) || DEFAULT_ANTHROPIC_BASE_URL, {
|
|
314
|
+
context: "assistant anthropic baseUrl"
|
|
315
|
+
});
|
|
316
|
+
const normalizedModel = normalizeModel(model, DEFAULT_ANTHROPIC_MODEL);
|
|
317
|
+
|
|
318
|
+
if (enabled !== true || !normalizedApiKey) {
|
|
319
|
+
return createDisabledClient({
|
|
320
|
+
provider: "anthropic",
|
|
321
|
+
model: normalizedModel
|
|
322
|
+
});
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
async function createCompletion({ messages = [], tools = [], temperature = 0, signal } = {}) {
|
|
326
|
+
const normalizedMessages = toAnthropicSystemAndMessages(messages);
|
|
327
|
+
const anthropicTools = toAnthropicTools(tools);
|
|
328
|
+
|
|
329
|
+
const payload = await fetchAnthropicMessage({
|
|
330
|
+
apiKey: normalizedApiKey,
|
|
331
|
+
baseUrl: normalizedBaseUrl,
|
|
332
|
+
model: normalizedModel,
|
|
333
|
+
timeoutMs,
|
|
334
|
+
system: normalizedMessages.system,
|
|
335
|
+
messages: normalizedMessages.messages,
|
|
336
|
+
tools: anthropicTools,
|
|
337
|
+
temperature,
|
|
338
|
+
signal
|
|
339
|
+
});
|
|
340
|
+
|
|
341
|
+
return {
|
|
342
|
+
...payload,
|
|
343
|
+
__openAiLikeDelta: mapAnthropicContentToOpenAiDelta(payload.content)
|
|
344
|
+
};
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
return Object.freeze({
|
|
348
|
+
enabled: true,
|
|
349
|
+
provider: "anthropic",
|
|
350
|
+
defaultModel: normalizedModel,
|
|
351
|
+
createChatCompletion: createCompletion,
|
|
352
|
+
async createChatCompletionStream({ messages = [], tools = [], temperature = 0.2, signal } = {}) {
|
|
353
|
+
const completion = await createCompletion({
|
|
354
|
+
messages,
|
|
355
|
+
tools,
|
|
356
|
+
temperature,
|
|
357
|
+
signal
|
|
358
|
+
});
|
|
359
|
+
|
|
360
|
+
return createSingleChunkStream({
|
|
361
|
+
choices: [
|
|
362
|
+
{
|
|
363
|
+
delta: completion.__openAiLikeDelta || {}
|
|
364
|
+
}
|
|
365
|
+
]
|
|
366
|
+
});
|
|
367
|
+
}
|
|
368
|
+
});
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
export {
|
|
372
|
+
createAnthropicClient,
|
|
373
|
+
DEFAULT_ANTHROPIC_MODEL,
|
|
374
|
+
DEFAULT_ANTHROPIC_BASE_URL
|
|
375
|
+
};
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
import { AppError } from "@jskit-ai/kernel/server/runtime/errors";
|
|
2
|
+
import { normalizeText } from "@jskit-ai/kernel/shared/support/normalize";
|
|
3
|
+
|
|
4
|
+
const SUPPORTED_AI_PROVIDERS = Object.freeze(["openai", "deepseek", "anthropic"]);
|
|
5
|
+
const SUPPORTED_AI_PROVIDER_SET = new Set(SUPPORTED_AI_PROVIDERS);
|
|
6
|
+
const DEFAULT_AI_PROVIDER = "openai";
|
|
7
|
+
const DEFAULT_AI_TIMEOUT_MS = 120_000;
|
|
8
|
+
|
|
9
|
+
function normalizeProvider(provider) {
|
|
10
|
+
const normalizedProvider = normalizeText(provider).toLowerCase() || DEFAULT_AI_PROVIDER;
|
|
11
|
+
if (SUPPORTED_AI_PROVIDER_SET.has(normalizedProvider)) {
|
|
12
|
+
return normalizedProvider;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
throw new TypeError(
|
|
16
|
+
`Unsupported assistant provider: ${normalizedProvider}. Supported providers: ${SUPPORTED_AI_PROVIDERS.join(", ")}.`
|
|
17
|
+
);
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function normalizeTimeoutMs(value, fallback = DEFAULT_AI_TIMEOUT_MS) {
|
|
21
|
+
const parsed = Number(value);
|
|
22
|
+
if (!Number.isInteger(parsed) || parsed < 1) {
|
|
23
|
+
return fallback;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
return parsed;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function normalizeModel(value, fallback = "") {
|
|
30
|
+
return normalizeText(value) || fallback;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
function normalizeOptionalHttpUrl(value, { context = "assistant baseUrl" } = {}) {
|
|
34
|
+
const normalized = normalizeText(value);
|
|
35
|
+
if (!normalized) {
|
|
36
|
+
return "";
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
let parsed;
|
|
40
|
+
try {
|
|
41
|
+
parsed = new URL(normalized);
|
|
42
|
+
} catch {
|
|
43
|
+
throw new TypeError(`${context} must be an absolute http(s) URL.`);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const protocol = String(parsed.protocol || "").toLowerCase();
|
|
47
|
+
if (protocol !== "http:" && protocol !== "https:") {
|
|
48
|
+
throw new TypeError(`${context} must be an absolute http(s) URL.`);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const serialized = parsed.toString();
|
|
52
|
+
return serialized.replace(/\/+$/g, "") || serialized;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
function createDisabledClient({ provider = DEFAULT_AI_PROVIDER, model = "" } = {}) {
|
|
56
|
+
const disabledError = () => {
|
|
57
|
+
throw new AppError(503, "Assistant provider is not configured.");
|
|
58
|
+
};
|
|
59
|
+
|
|
60
|
+
return Object.freeze({
|
|
61
|
+
enabled: false,
|
|
62
|
+
provider,
|
|
63
|
+
defaultModel: model,
|
|
64
|
+
async createChatCompletion() {
|
|
65
|
+
disabledError();
|
|
66
|
+
},
|
|
67
|
+
async createChatCompletionStream() {
|
|
68
|
+
disabledError();
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
function createProviderRequestError({ status = 500, code = "assistant_provider_failed", message = "" } = {}) {
|
|
74
|
+
const normalizedStatus = Number.isInteger(Number(status)) ? Number(status) : 500;
|
|
75
|
+
const safeStatus = normalizedStatus >= 400 && normalizedStatus <= 599 ? normalizedStatus : 500;
|
|
76
|
+
const normalizedCode = normalizeText(code) || "assistant_provider_failed";
|
|
77
|
+
const normalizedMessage =
|
|
78
|
+
normalizeText(message) || (safeStatus >= 500 ? "Assistant provider request failed." : "Request failed.");
|
|
79
|
+
|
|
80
|
+
return new AppError(safeStatus, normalizedMessage, {
|
|
81
|
+
code: normalizedCode
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
function normalizeObject(value) {
|
|
86
|
+
if (!value || typeof value !== "object" || Array.isArray(value)) {
|
|
87
|
+
return {};
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
return value;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
function normalizeArray(value) {
|
|
94
|
+
return Array.isArray(value) ? value : [];
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
function normalizeContentText(content) {
|
|
98
|
+
if (typeof content === "string") {
|
|
99
|
+
return content;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
if (!Array.isArray(content)) {
|
|
103
|
+
return "";
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
return content
|
|
107
|
+
.map((entry) => {
|
|
108
|
+
if (typeof entry === "string") {
|
|
109
|
+
return entry;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
const block = normalizeObject(entry);
|
|
113
|
+
if (block.type === "text") {
|
|
114
|
+
return String(block.text || "");
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
return String(block.text || "");
|
|
118
|
+
})
|
|
119
|
+
.join("");
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
function parseJsonObjectOrDefault(value, fallback = {}) {
|
|
123
|
+
if (value && typeof value === "object" && !Array.isArray(value)) {
|
|
124
|
+
return value;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
const source = normalizeText(value);
|
|
128
|
+
if (!source) {
|
|
129
|
+
return fallback;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
try {
|
|
133
|
+
const parsed = JSON.parse(source);
|
|
134
|
+
if (parsed && typeof parsed === "object" && !Array.isArray(parsed)) {
|
|
135
|
+
return parsed;
|
|
136
|
+
}
|
|
137
|
+
} catch {
|
|
138
|
+
// Ignore malformed JSON and fallback to empty object.
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
return fallback;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
export {
|
|
145
|
+
SUPPORTED_AI_PROVIDERS,
|
|
146
|
+
DEFAULT_AI_PROVIDER,
|
|
147
|
+
DEFAULT_AI_TIMEOUT_MS,
|
|
148
|
+
normalizeProvider,
|
|
149
|
+
normalizeTimeoutMs,
|
|
150
|
+
normalizeModel,
|
|
151
|
+
normalizeOptionalHttpUrl,
|
|
152
|
+
createDisabledClient,
|
|
153
|
+
createProviderRequestError,
|
|
154
|
+
normalizeObject,
|
|
155
|
+
normalizeArray,
|
|
156
|
+
normalizeContentText,
|
|
157
|
+
parseJsonObjectOrDefault
|
|
158
|
+
};
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { normalizeText } from "@jskit-ai/kernel/shared/support/normalize";
|
|
2
|
+
import { createOpenAiCompatibleClient } from "./openAiCompatibleClient.js";
|
|
3
|
+
|
|
4
|
+
const DEFAULT_DEEPSEEK_MODEL = "deepseek-chat";
|
|
5
|
+
const DEFAULT_DEEPSEEK_BASE_URL = "https://api.deepseek.com";
|
|
6
|
+
|
|
7
|
+
function createDeepSeekClient(options = {}) {
|
|
8
|
+
const normalizedBaseUrl = normalizeText(options.baseUrl) || DEFAULT_DEEPSEEK_BASE_URL;
|
|
9
|
+
|
|
10
|
+
return createOpenAiCompatibleClient({
|
|
11
|
+
...options,
|
|
12
|
+
provider: "deepseek",
|
|
13
|
+
baseUrl: normalizedBaseUrl,
|
|
14
|
+
defaultModel: DEFAULT_DEEPSEEK_MODEL
|
|
15
|
+
});
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export {
|
|
19
|
+
createDeepSeekClient,
|
|
20
|
+
DEFAULT_DEEPSEEK_MODEL,
|
|
21
|
+
DEFAULT_DEEPSEEK_BASE_URL
|
|
22
|
+
};
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { createOpenAiCompatibleClient } from "./openAiCompatibleClient.js";
|
|
2
|
+
|
|
3
|
+
const DEFAULT_OPENAI_MODEL = "gpt-4.1-mini";
|
|
4
|
+
|
|
5
|
+
function createOpenAiClient(options = {}) {
|
|
6
|
+
return createOpenAiCompatibleClient({
|
|
7
|
+
...options,
|
|
8
|
+
provider: "openai",
|
|
9
|
+
defaultModel: DEFAULT_OPENAI_MODEL
|
|
10
|
+
});
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export { createOpenAiClient, DEFAULT_OPENAI_MODEL };
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import OpenAI from "openai";
|
|
2
|
+
import { normalizeText } from "@jskit-ai/kernel/shared/support/normalize";
|
|
3
|
+
import {
|
|
4
|
+
createDisabledClient,
|
|
5
|
+
normalizeModel,
|
|
6
|
+
normalizeOptionalHttpUrl,
|
|
7
|
+
normalizeTimeoutMs
|
|
8
|
+
} from "./common.js";
|
|
9
|
+
|
|
10
|
+
function createOpenAiCompatibleClient({
|
|
11
|
+
enabled = true,
|
|
12
|
+
apiKey = "",
|
|
13
|
+
baseUrl = "",
|
|
14
|
+
model = "",
|
|
15
|
+
defaultModel = "",
|
|
16
|
+
provider = "openai",
|
|
17
|
+
timeoutMs = 120_000
|
|
18
|
+
} = {}) {
|
|
19
|
+
const normalizedApiKey = normalizeText(apiKey);
|
|
20
|
+
const normalizedProvider = normalizeText(provider).toLowerCase() || "openai";
|
|
21
|
+
const normalizedModel = normalizeModel(model, defaultModel);
|
|
22
|
+
|
|
23
|
+
if (enabled !== true || !normalizedApiKey) {
|
|
24
|
+
return createDisabledClient({
|
|
25
|
+
provider: normalizedProvider,
|
|
26
|
+
model: normalizedModel
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const normalizedBaseUrl = normalizeOptionalHttpUrl(baseUrl, {
|
|
31
|
+
context: `assistant ${normalizedProvider} baseUrl`
|
|
32
|
+
});
|
|
33
|
+
const client = new OpenAI({
|
|
34
|
+
apiKey: normalizedApiKey,
|
|
35
|
+
...(normalizedBaseUrl ? { baseURL: normalizedBaseUrl } : {}),
|
|
36
|
+
timeout: normalizeTimeoutMs(timeoutMs)
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
return Object.freeze({
|
|
40
|
+
enabled: true,
|
|
41
|
+
provider: normalizedProvider,
|
|
42
|
+
defaultModel: normalizedModel,
|
|
43
|
+
async createChatCompletion({ messages = [], tools = [], temperature = 0, signal } = {}) {
|
|
44
|
+
const requestPayload = {
|
|
45
|
+
model: normalizedModel,
|
|
46
|
+
messages,
|
|
47
|
+
...(Array.isArray(tools) && tools.length > 0 ? { tools } : {}),
|
|
48
|
+
temperature,
|
|
49
|
+
stream: false
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
return client.chat.completions.create(requestPayload, signal ? { signal } : undefined);
|
|
53
|
+
},
|
|
54
|
+
async createChatCompletionStream({ messages = [], tools = [], signal, temperature = 0.2 } = {}) {
|
|
55
|
+
return client.chat.completions.create(
|
|
56
|
+
{
|
|
57
|
+
model: normalizedModel,
|
|
58
|
+
messages,
|
|
59
|
+
tools,
|
|
60
|
+
temperature,
|
|
61
|
+
stream: true
|
|
62
|
+
},
|
|
63
|
+
signal ? { signal } : undefined
|
|
64
|
+
);
|
|
65
|
+
}
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
export { createOpenAiCompatibleClient };
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { normalizeText } from "@jskit-ai/kernel/shared/support/normalize";
|
|
2
|
+
|
|
3
|
+
function resolveWorkspaceSlug(context = {}, actionInput = null) {
|
|
4
|
+
const sourceContext = context && typeof context === "object" ? context : {};
|
|
5
|
+
const sourceInput = actionInput && typeof actionInput === "object" && !Array.isArray(actionInput) ? actionInput : {};
|
|
6
|
+
|
|
7
|
+
const candidates = [
|
|
8
|
+
sourceContext?.workspace?.slug,
|
|
9
|
+
sourceContext?.requestMeta?.resolvedWorkspaceContext?.workspace?.slug,
|
|
10
|
+
sourceInput.workspaceSlug,
|
|
11
|
+
sourceContext?.requestMeta?.request?.input?.params?.workspaceSlug
|
|
12
|
+
];
|
|
13
|
+
|
|
14
|
+
for (const candidate of candidates) {
|
|
15
|
+
const normalized = normalizeText(candidate).toLowerCase();
|
|
16
|
+
if (normalized) {
|
|
17
|
+
return normalized;
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
return "";
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export { resolveWorkspaceSlug };
|