@easynet/agent-common 1.0.4 → 1.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/connectivity/check.d.ts +17 -0
- package/dist/connectivity/index.d.ts +3 -0
- package/dist/connectivity/types.d.ts +12 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.js +406 -0
- package/dist/index.js.map +1 -1
- package/dist/model/chat/index.d.ts +29 -0
- package/dist/model/embedding/index.d.ts +24 -0
- package/dist/model/hub/index.d.ts +28 -0
- package/dist/model/index.d.ts +8 -0
- package/dist/model/llm/index.d.ts +2 -0
- package/dist/model/llm/parser.d.ts +9 -0
- package/dist/model/llm/types.d.ts +30 -0
- package/dist/utils/deep-merge.d.ts +4 -0
- package/dist/utils/index.d.ts +1 -0
- package/package.json +1 -1
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import type { CheckConnectivityResult } from "./types.js";
|
|
2
|
+
export interface EndpointConnectivityOptions {
|
|
3
|
+
timeoutMs?: number;
|
|
4
|
+
resolveHost?: {
|
|
5
|
+
from: string;
|
|
6
|
+
to: string;
|
|
7
|
+
};
|
|
8
|
+
host?: string;
|
|
9
|
+
verifySSL?: boolean;
|
|
10
|
+
bypassAuth?: boolean;
|
|
11
|
+
featureKey?: string;
|
|
12
|
+
}
|
|
13
|
+
export declare function checkEndpointConnectivity(baseURL: string, options?: {
|
|
14
|
+
timeoutMs?: number;
|
|
15
|
+
} & EndpointConnectivityOptions): Promise<CheckConnectivityResult>;
|
|
16
|
+
export declare const CIS_UNREACHABLE_REMINDER = "Please ensure you are connected to Secure VPN and try again.";
|
|
17
|
+
export declare function buildUnreachableError(endpointId: string, baseURL: string, detail?: string): string;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
export type ConnectionPhase = "checking" | "reachable" | "unreachable";
|
|
2
|
+
export interface ConnectionStatus {
|
|
3
|
+
phase: ConnectionPhase;
|
|
4
|
+
endpointId?: string;
|
|
5
|
+
baseURL?: string;
|
|
6
|
+
message?: string;
|
|
7
|
+
}
|
|
8
|
+
export interface CheckConnectivityResult {
|
|
9
|
+
reachable: boolean;
|
|
10
|
+
message?: string;
|
|
11
|
+
statusCode?: number;
|
|
12
|
+
}
|
package/dist/index.d.ts
CHANGED
package/dist/index.js
CHANGED
|
@@ -120,11 +120,416 @@ function resolveConfigPath(pathRef, configDir, options = {}) {
|
|
|
120
120
|
const expanded = options.expandHome === false ? pathRef : expandHomePath(pathRef, { homeDir: options.homeDir });
|
|
121
121
|
return path2.resolve(configDir, expanded);
|
|
122
122
|
}
|
|
123
|
+
|
|
124
|
+
// src/connectivity/check.ts
|
|
125
|
+
import https from "https";
|
|
126
|
+
var DEFAULT_TIMEOUT_MS = 8e3;
|
|
127
|
+
function probePath(baseURL) {
|
|
128
|
+
const base = baseURL.replace(/\/+$/, "");
|
|
129
|
+
if (base.endsWith("/v1")) return `${base}/models`;
|
|
130
|
+
return base.includes("/v1") ? `${base}/models` : `${base}/v1/models`;
|
|
131
|
+
}
|
|
132
|
+
function resolveProbeRequest(baseURL, endpointOptions) {
|
|
133
|
+
let path3 = probePath(baseURL);
|
|
134
|
+
const urlObj = new URL(path3);
|
|
135
|
+
const resolveFrom = endpointOptions?.resolveHost?.from;
|
|
136
|
+
const resolveTo = endpointOptions?.resolveHost?.to;
|
|
137
|
+
let hostHeader = endpointOptions?.host;
|
|
138
|
+
if (resolveFrom && resolveTo && urlObj.hostname) {
|
|
139
|
+
urlObj.hostname = urlObj.hostname.replace(resolveFrom, resolveTo);
|
|
140
|
+
hostHeader = hostHeader ?? resolveFrom;
|
|
141
|
+
}
|
|
142
|
+
const searchParams = new URLSearchParams(urlObj.search);
|
|
143
|
+
if (endpointOptions?.bypassAuth === true) searchParams.set("bypass_auth", "true");
|
|
144
|
+
urlObj.search = searchParams.toString();
|
|
145
|
+
return { url: urlObj.toString(), hostHeader };
|
|
146
|
+
}
|
|
147
|
+
function checkWithHttps(url, hostHeader, options) {
|
|
148
|
+
return new Promise((resolve) => {
|
|
149
|
+
const u = new URL(url);
|
|
150
|
+
const reqOpts = {
|
|
151
|
+
hostname: u.hostname,
|
|
152
|
+
port: u.port || (u.protocol === "https:" ? 443 : 80),
|
|
153
|
+
path: u.pathname + u.search,
|
|
154
|
+
method: "GET",
|
|
155
|
+
headers: { Accept: "application/json" },
|
|
156
|
+
rejectUnauthorized: options.verifySSL
|
|
157
|
+
};
|
|
158
|
+
if (hostHeader) reqOpts.headers = { ...reqOpts.headers, Host: hostHeader };
|
|
159
|
+
const timeoutId = setTimeout(() => {
|
|
160
|
+
req.destroy();
|
|
161
|
+
resolve({ reachable: false, message: "Connection timed out" });
|
|
162
|
+
}, options.timeoutMs);
|
|
163
|
+
const req = https.request(reqOpts, (res) => {
|
|
164
|
+
clearTimeout(timeoutId);
|
|
165
|
+
resolve({
|
|
166
|
+
reachable: true,
|
|
167
|
+
message: res.statusCode === 200 ? "OK" : `HTTP ${res.statusCode}`,
|
|
168
|
+
statusCode: res.statusCode
|
|
169
|
+
});
|
|
170
|
+
});
|
|
171
|
+
req.on("error", (err) => {
|
|
172
|
+
clearTimeout(timeoutId);
|
|
173
|
+
resolve({ reachable: false, message: err.message || "Connection failed" });
|
|
174
|
+
});
|
|
175
|
+
req.end();
|
|
176
|
+
});
|
|
177
|
+
}
|
|
178
|
+
async function checkEndpointConnectivity(baseURL, options) {
|
|
179
|
+
const timeoutMs = options?.timeoutMs ?? DEFAULT_TIMEOUT_MS;
|
|
180
|
+
const useHttps = options?.resolveHost != null || options?.verifySSL === false || options?.host != null || options?.bypassAuth === true;
|
|
181
|
+
if (useHttps) {
|
|
182
|
+
const { url: url2, hostHeader } = resolveProbeRequest(baseURL, options);
|
|
183
|
+
return checkWithHttps(url2, hostHeader, {
|
|
184
|
+
timeoutMs,
|
|
185
|
+
verifySSL: options?.verifySSL === true
|
|
186
|
+
});
|
|
187
|
+
}
|
|
188
|
+
const url = probePath(baseURL);
|
|
189
|
+
const controller = new AbortController();
|
|
190
|
+
const timeoutId = setTimeout(() => controller.abort(), timeoutMs);
|
|
191
|
+
try {
|
|
192
|
+
const res = await fetch(url, {
|
|
193
|
+
method: "GET",
|
|
194
|
+
signal: controller.signal,
|
|
195
|
+
headers: { Accept: "application/json" }
|
|
196
|
+
});
|
|
197
|
+
clearTimeout(timeoutId);
|
|
198
|
+
return {
|
|
199
|
+
reachable: true,
|
|
200
|
+
message: res.ok ? "OK" : `HTTP ${res.status}`,
|
|
201
|
+
statusCode: res.status
|
|
202
|
+
};
|
|
203
|
+
} catch (err) {
|
|
204
|
+
clearTimeout(timeoutId);
|
|
205
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
206
|
+
const isTimeout = err instanceof Error && err.name === "AbortError";
|
|
207
|
+
return {
|
|
208
|
+
reachable: false,
|
|
209
|
+
message: isTimeout ? "Connection timed out" : message || "Connection failed"
|
|
210
|
+
};
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
var CIS_UNREACHABLE_REMINDER = "Please ensure you are connected to Secure VPN and try again.";
|
|
214
|
+
function buildUnreachableError(endpointId, baseURL, detail) {
|
|
215
|
+
const parts = [
|
|
216
|
+
`Cannot connect to CIS (endpoint: ${endpointId}, base URL: ${baseURL}).`,
|
|
217
|
+
detail && ` ${detail}`,
|
|
218
|
+
` ${CIS_UNREACHABLE_REMINDER}`
|
|
219
|
+
];
|
|
220
|
+
return parts.filter(Boolean).join("").trim();
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
// src/model/llm/parser.ts
|
|
224
|
+
var DEFAULT_LLM_ID = "default";
|
|
225
|
+
var RESERVED_KEYS = /* @__PURE__ */ new Set([
|
|
226
|
+
"default",
|
|
227
|
+
"instances",
|
|
228
|
+
"catalog",
|
|
229
|
+
"provider",
|
|
230
|
+
"model",
|
|
231
|
+
"temperature",
|
|
232
|
+
"apiKey",
|
|
233
|
+
"baseURL",
|
|
234
|
+
"base_url",
|
|
235
|
+
"type",
|
|
236
|
+
"id"
|
|
237
|
+
]);
|
|
238
|
+
function parseLlmSection(section) {
|
|
239
|
+
if (section == null || typeof section !== "object") {
|
|
240
|
+
return { defaultId: DEFAULT_LLM_ID, configs: [] };
|
|
241
|
+
}
|
|
242
|
+
if (Array.isArray(section)) {
|
|
243
|
+
const configs = section.filter((i) => i != null && typeof i === "object").map((item, i) => normalizeLlmConfig({ ...item, id: item.id ?? item.name ?? String(i) })).filter((c) => c != null);
|
|
244
|
+
const defaultId = configs.length > 0 ? configs[0].id : DEFAULT_LLM_ID;
|
|
245
|
+
return { defaultId, configs };
|
|
246
|
+
}
|
|
247
|
+
const s = section;
|
|
248
|
+
const flatEntries = Object.entries(s).filter(
|
|
249
|
+
([k, v]) => !RESERVED_KEYS.has(k) && v != null && typeof v === "object" && !Array.isArray(v)
|
|
250
|
+
);
|
|
251
|
+
if (flatEntries.length > 0) {
|
|
252
|
+
const configs = [];
|
|
253
|
+
for (const [id, entry] of flatEntries) {
|
|
254
|
+
const c = entryToLlmConfig(id, entry);
|
|
255
|
+
if (c) configs.push(c);
|
|
256
|
+
}
|
|
257
|
+
const defaultId = typeof s.default === "string" && s.default && flatEntries.some(([k]) => k === s.default) ? s.default : configs.length > 0 ? configs[0].id : DEFAULT_LLM_ID;
|
|
258
|
+
return { defaultId, configs };
|
|
259
|
+
}
|
|
260
|
+
if (Array.isArray(s.instances)) {
|
|
261
|
+
const configs = s.instances.filter((i) => i != null && typeof i === "object").map((i) => normalizeLlmConfig(i)).filter((c) => c != null);
|
|
262
|
+
const defaultId = typeof s.default === "string" && s.default ? s.default : configs.length > 0 ? configs[0].id : DEFAULT_LLM_ID;
|
|
263
|
+
return { defaultId, configs };
|
|
264
|
+
}
|
|
265
|
+
if (typeof s.provider === "string" || typeof s.model === "string" || typeof s.name === "string") {
|
|
266
|
+
const one = singleObjectToLlmConfig(s);
|
|
267
|
+
return { defaultId: one.id, configs: [one] };
|
|
268
|
+
}
|
|
269
|
+
return { defaultId: DEFAULT_LLM_ID, configs: [] };
|
|
270
|
+
}
|
|
271
|
+
var EXTENSION_OPTION_KEYS = ["featureKey", "tenant", "authToken", "verifySSL", "bypassAuth", "host", "resolveHost", "timeoutMs", "options"];
|
|
272
|
+
function entryToLlmConfig(id, entry) {
|
|
273
|
+
const opts = entry.options;
|
|
274
|
+
const baseURL = typeof entry.base_url === "string" ? entry.base_url : typeof entry.baseURL === "string" ? entry.baseURL : void 0;
|
|
275
|
+
const model = typeof entry.name === "string" ? entry.name : typeof entry.model === "string" ? entry.model : void 0;
|
|
276
|
+
const provider = typeof entry.provider === "string" && entry.provider ? entry.provider : "openai";
|
|
277
|
+
const config = {
|
|
278
|
+
id,
|
|
279
|
+
type: "chat",
|
|
280
|
+
provider,
|
|
281
|
+
model,
|
|
282
|
+
temperature: typeof opts?.temperature === "number" ? opts.temperature : typeof entry.temperature === "number" ? entry.temperature : void 0,
|
|
283
|
+
apiKey: typeof opts?.apiKey === "string" ? opts.apiKey : typeof entry.apiKey === "string" ? entry.apiKey : void 0,
|
|
284
|
+
baseURL
|
|
285
|
+
};
|
|
286
|
+
if (typeof entry.type === "string" && entry.type === "image") config.type = "image";
|
|
287
|
+
if (opts && typeof opts === "object") config.options = opts;
|
|
288
|
+
for (const k of EXTENSION_OPTION_KEYS) {
|
|
289
|
+
if (entry[k] !== void 0) config[k] = entry[k];
|
|
290
|
+
else if (opts && opts[k] !== void 0) config[k] = opts[k];
|
|
291
|
+
}
|
|
292
|
+
return config;
|
|
293
|
+
}
|
|
294
|
+
function singleObjectToLlmConfig(s) {
|
|
295
|
+
const one = {
|
|
296
|
+
id: DEFAULT_LLM_ID,
|
|
297
|
+
type: "chat",
|
|
298
|
+
provider: typeof s.provider === "string" ? s.provider : "openai",
|
|
299
|
+
model: typeof s.model === "string" ? s.model : typeof s.name === "string" ? s.name : void 0,
|
|
300
|
+
temperature: typeof s.temperature === "number" ? s.temperature : void 0,
|
|
301
|
+
apiKey: typeof s.apiKey === "string" ? s.apiKey : void 0,
|
|
302
|
+
baseURL: typeof s.baseURL === "string" ? s.baseURL : typeof s.base_url === "string" ? s.base_url : void 0
|
|
303
|
+
};
|
|
304
|
+
Object.keys(s).forEach((k) => {
|
|
305
|
+
if (!["id", "type", "provider", "model", "name", "temperature", "apiKey", "baseURL", "base_url", "default", "instances"].includes(k)) {
|
|
306
|
+
one[k] = s[k];
|
|
307
|
+
}
|
|
308
|
+
});
|
|
309
|
+
return one;
|
|
310
|
+
}
|
|
311
|
+
function normalizeLlmConfig(o) {
|
|
312
|
+
const id = typeof o.id === "string" && o.id ? o.id : DEFAULT_LLM_ID;
|
|
313
|
+
const type = o.type === "image" ? "image" : "chat";
|
|
314
|
+
const provider = typeof o.provider === "string" && o.provider ? o.provider : "openai";
|
|
315
|
+
const opts = o.options;
|
|
316
|
+
const config = {
|
|
317
|
+
id,
|
|
318
|
+
type,
|
|
319
|
+
provider,
|
|
320
|
+
model: typeof o.model === "string" ? o.model : typeof o.name === "string" ? o.name : void 0,
|
|
321
|
+
temperature: typeof o.temperature === "number" ? o.temperature : typeof opts?.temperature === "number" ? opts.temperature : void 0,
|
|
322
|
+
apiKey: typeof o.apiKey === "string" ? o.apiKey : typeof opts?.apiKey === "string" ? opts.apiKey : void 0,
|
|
323
|
+
baseURL: typeof o.baseURL === "string" ? o.baseURL : typeof o.base_url === "string" ? o.base_url : void 0
|
|
324
|
+
};
|
|
325
|
+
Object.keys(o).forEach((k) => {
|
|
326
|
+
if (!["id", "type", "provider", "model", "name", "temperature", "apiKey", "baseURL", "base_url"].includes(k)) {
|
|
327
|
+
config[k] = o[k];
|
|
328
|
+
}
|
|
329
|
+
});
|
|
330
|
+
return config;
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
// src/model/embedding/index.ts
|
|
334
|
+
var DEFAULT_EMBEDDING_TIMEOUT_MS = 3e4;
|
|
335
|
+
function isLocalBaseUrl(url) {
|
|
336
|
+
try {
|
|
337
|
+
const u = new URL(url);
|
|
338
|
+
const host = u.hostname.toLowerCase();
|
|
339
|
+
return host === "localhost" || host === "127.0.0.1" || host === "::1";
|
|
340
|
+
} catch {
|
|
341
|
+
return false;
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
async function embedViaOpenAICompatibleApi(options, input) {
|
|
345
|
+
const baseUrl = options.baseURL.replace(/\/$/, "");
|
|
346
|
+
const apiKey = options.apiKey?.trim();
|
|
347
|
+
const timeoutMs = options.timeoutMs ?? DEFAULT_EMBEDDING_TIMEOUT_MS;
|
|
348
|
+
const modelName = typeof input.model === "string" && input.model.trim() !== "" ? input.model.trim() : options.model ?? "text-embedding-3-small";
|
|
349
|
+
if (!apiKey && !isLocalBaseUrl(baseUrl)) {
|
|
350
|
+
throw new Error("Embedding API key is required for non-local baseURL");
|
|
351
|
+
}
|
|
352
|
+
const controller = new AbortController();
|
|
353
|
+
const timer = setTimeout(() => controller.abort(), timeoutMs);
|
|
354
|
+
try {
|
|
355
|
+
const response = await fetch(`${baseUrl}/embeddings`, {
|
|
356
|
+
method: "POST",
|
|
357
|
+
headers: {
|
|
358
|
+
"Content-Type": "application/json",
|
|
359
|
+
...apiKey ? { Authorization: `Bearer ${apiKey}` } : {}
|
|
360
|
+
},
|
|
361
|
+
body: JSON.stringify({ model: modelName, input: input.input }),
|
|
362
|
+
signal: controller.signal
|
|
363
|
+
});
|
|
364
|
+
if (!response.ok) {
|
|
365
|
+
const body = await response.text();
|
|
366
|
+
throw new Error(`Embedding API error ${response.status}: ${body.slice(0, 500)}`);
|
|
367
|
+
}
|
|
368
|
+
const data = await response.json();
|
|
369
|
+
const vectors = (data.data ?? []).slice().sort((a, b) => (a.index ?? 0) - (b.index ?? 0)).map((v) => v.embedding).filter((v) => Array.isArray(v));
|
|
370
|
+
return {
|
|
371
|
+
vectors,
|
|
372
|
+
dimensions: vectors[0]?.length,
|
|
373
|
+
model: data.model ?? modelName
|
|
374
|
+
};
|
|
375
|
+
} finally {
|
|
376
|
+
clearTimeout(timer);
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
// src/model/chat/index.ts
|
|
381
|
+
var DEFAULT_CHAT_TIMEOUT_MS = 6e4;
|
|
382
|
+
function isLocalBaseUrl2(url) {
|
|
383
|
+
try {
|
|
384
|
+
const u = new URL(url);
|
|
385
|
+
const host = u.hostname.toLowerCase();
|
|
386
|
+
return host === "localhost" || host === "127.0.0.1" || host === "::1";
|
|
387
|
+
} catch {
|
|
388
|
+
return false;
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
function normalizeContent(content) {
|
|
392
|
+
if (typeof content === "string") return content;
|
|
393
|
+
if (!Array.isArray(content)) return String(content ?? "");
|
|
394
|
+
const parts = [];
|
|
395
|
+
for (const p of content) {
|
|
396
|
+
if (typeof p === "string") parts.push(p);
|
|
397
|
+
else if (p && typeof p === "object" && typeof p.text === "string")
|
|
398
|
+
parts.push(p.text);
|
|
399
|
+
}
|
|
400
|
+
return parts.join("\n").trim();
|
|
401
|
+
}
|
|
402
|
+
async function chatCompletionViaOpenAICompatibleApi(options, request) {
|
|
403
|
+
const baseUrl = options.baseURL.replace(/\/$/, "");
|
|
404
|
+
const apiKey = options.apiKey?.trim();
|
|
405
|
+
const timeoutMs = options.timeoutMs ?? DEFAULT_CHAT_TIMEOUT_MS;
|
|
406
|
+
const modelName = typeof request.model === "string" && request.model.trim() !== "" ? request.model.trim() : options.model ?? "gpt-4o-mini";
|
|
407
|
+
if (!apiKey && !isLocalBaseUrl2(baseUrl)) {
|
|
408
|
+
throw new Error("Chat completion API key is required for non-local baseURL");
|
|
409
|
+
}
|
|
410
|
+
const body = {
|
|
411
|
+
model: modelName,
|
|
412
|
+
messages: request.messages.map((m) => ({ role: m.role, content: m.content })),
|
|
413
|
+
temperature: typeof request.temperature === "number" ? request.temperature : 0
|
|
414
|
+
};
|
|
415
|
+
if (typeof request.maxTokens === "number") body.max_tokens = request.maxTokens;
|
|
416
|
+
const controller = new AbortController();
|
|
417
|
+
const timer = setTimeout(() => controller.abort(), timeoutMs);
|
|
418
|
+
try {
|
|
419
|
+
const response = await fetch(`${baseUrl}/chat/completions`, {
|
|
420
|
+
method: "POST",
|
|
421
|
+
headers: {
|
|
422
|
+
"Content-Type": "application/json",
|
|
423
|
+
...apiKey ? { Authorization: `Bearer ${apiKey}` } : {}
|
|
424
|
+
},
|
|
425
|
+
body: JSON.stringify(body),
|
|
426
|
+
signal: controller.signal
|
|
427
|
+
});
|
|
428
|
+
if (!response.ok) {
|
|
429
|
+
const text = await response.text();
|
|
430
|
+
throw new Error(`Chat completion API error ${response.status}: ${text.slice(0, 500)}`);
|
|
431
|
+
}
|
|
432
|
+
const data = await response.json();
|
|
433
|
+
const raw = data.choices?.[0]?.message?.content ?? data.choices?.[0]?.text ?? "";
|
|
434
|
+
return {
|
|
435
|
+
text: normalizeContent(raw),
|
|
436
|
+
model: data.model ?? modelName
|
|
437
|
+
};
|
|
438
|
+
} finally {
|
|
439
|
+
clearTimeout(timer);
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
// src/model/hub/index.ts
|
|
444
|
+
var DEFAULT_EMBEDDING_MODEL = "text-embedding-3-small";
|
|
445
|
+
var DEFAULT_CHAT_MODEL = "gpt-4o-mini";
|
|
446
|
+
var DEFAULT_BASE_URL = "https://api.openai.com/v1";
|
|
447
|
+
function getDefaultLlmConfig(llmSection) {
|
|
448
|
+
const parsed = parseLlmSection(llmSection ?? null);
|
|
449
|
+
const config = parsed.configs.find((c) => c.id === parsed.defaultId) ?? parsed.configs[0] ?? null;
|
|
450
|
+
return config;
|
|
451
|
+
}
|
|
452
|
+
function createModelHub(options = {}) {
|
|
453
|
+
const baseCfg = getDefaultLlmConfig(options.llmSection);
|
|
454
|
+
const defaultBaseUrl = (options.embeddingBaseURL ?? baseCfg?.baseURL ?? DEFAULT_BASE_URL).replace(/\/$/, "");
|
|
455
|
+
const defaultApiKey = options.embeddingApiKey ?? baseCfg?.apiKey ?? process.env.OPENAI_API_KEY;
|
|
456
|
+
const defaultEmbeddingModel = options.embeddingModel ?? (baseCfg?.options && typeof baseCfg.options.embeddingModel === "string" ? baseCfg.options.embeddingModel : void 0) ?? baseCfg?.model ?? process.env.OPENAI_EMBEDDING_MODEL ?? DEFAULT_EMBEDDING_MODEL;
|
|
457
|
+
const defaultChatModel = baseCfg?.model ?? process.env.OPENAI_MODEL ?? DEFAULT_CHAT_MODEL;
|
|
458
|
+
const embeddingTimeoutMs = options.embeddingTimeoutMs;
|
|
459
|
+
const chatTimeoutMs = options.chatTimeoutMs;
|
|
460
|
+
return {
|
|
461
|
+
async generate(input) {
|
|
462
|
+
const messages = [];
|
|
463
|
+
if (typeof input.systemPrompt === "string" && input.systemPrompt.trim() !== "") {
|
|
464
|
+
messages.push({ role: "system", content: input.systemPrompt.trim() });
|
|
465
|
+
}
|
|
466
|
+
messages.push({ role: "user", content: input.input });
|
|
467
|
+
const result = await chatCompletionViaOpenAICompatibleApi(
|
|
468
|
+
{
|
|
469
|
+
baseURL: defaultBaseUrl,
|
|
470
|
+
apiKey: defaultApiKey,
|
|
471
|
+
model: defaultChatModel,
|
|
472
|
+
timeoutMs: chatTimeoutMs
|
|
473
|
+
},
|
|
474
|
+
{
|
|
475
|
+
messages,
|
|
476
|
+
model: input.model,
|
|
477
|
+
temperature: input.temperature
|
|
478
|
+
}
|
|
479
|
+
);
|
|
480
|
+
return {
|
|
481
|
+
text: result.text,
|
|
482
|
+
model: typeof input.model === "string" && input.model.trim() !== "" ? input.model : result.model ?? baseCfg?.model
|
|
483
|
+
};
|
|
484
|
+
},
|
|
485
|
+
async embed(input) {
|
|
486
|
+
const result = await embedViaOpenAICompatibleApi(
|
|
487
|
+
{
|
|
488
|
+
baseURL: defaultBaseUrl,
|
|
489
|
+
apiKey: defaultApiKey,
|
|
490
|
+
model: defaultEmbeddingModel,
|
|
491
|
+
timeoutMs: embeddingTimeoutMs
|
|
492
|
+
},
|
|
493
|
+
{ input: input.input, model: input.model }
|
|
494
|
+
);
|
|
495
|
+
return {
|
|
496
|
+
vectors: result.vectors,
|
|
497
|
+
dimensions: result.dimensions,
|
|
498
|
+
model: result.model
|
|
499
|
+
};
|
|
500
|
+
}
|
|
501
|
+
};
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
// src/utils/deep-merge.ts
|
|
505
|
+
function deepMerge(target, ...sources) {
|
|
506
|
+
for (const src of sources) {
|
|
507
|
+
if (!src || typeof src !== "object") continue;
|
|
508
|
+
for (const k of Object.keys(src)) {
|
|
509
|
+
const v = src[k];
|
|
510
|
+
if (v === void 0) continue;
|
|
511
|
+
const t = target[k];
|
|
512
|
+
if (v !== null && typeof v === "object" && !Array.isArray(v) && t !== null && typeof t === "object" && !Array.isArray(t)) {
|
|
513
|
+
deepMerge(t, v);
|
|
514
|
+
} else {
|
|
515
|
+
target[k] = v;
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
return target;
|
|
520
|
+
}
|
|
123
521
|
export {
|
|
522
|
+
CIS_UNREACHABLE_REMINDER,
|
|
124
523
|
NPM_PROTOCOL_PREFIX,
|
|
125
524
|
assertNpmCommandSuccess,
|
|
525
|
+
buildUnreachableError,
|
|
526
|
+
chatCompletionViaOpenAICompatibleApi,
|
|
527
|
+
checkEndpointConnectivity,
|
|
126
528
|
clearVersionCache,
|
|
127
529
|
clearYamlFileCache,
|
|
530
|
+
createModelHub,
|
|
531
|
+
deepMerge,
|
|
532
|
+
embedViaOpenAICompatibleApi,
|
|
128
533
|
ensureNpmPackageInstalled,
|
|
129
534
|
ensurePackageInCache,
|
|
130
535
|
expandHomePath,
|
|
@@ -141,6 +546,7 @@ export {
|
|
|
141
546
|
npmRoot,
|
|
142
547
|
npmRunScript,
|
|
143
548
|
npmSearchJson,
|
|
549
|
+
parseLlmSection,
|
|
144
550
|
parseNpmProvider,
|
|
145
551
|
parseNpmProviderSpec,
|
|
146
552
|
parseYamlContent,
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/config/yaml.ts","../src/config/path.ts"],"sourcesContent":["import { existsSync, readFileSync, statSync } from \"node:fs\";\nimport { readFile, stat } from \"node:fs/promises\";\nimport path from \"node:path\";\nimport { parse as parseYaml } from \"yaml\";\n\nexport interface YamlEnvOptions {\n substituteEnv?: boolean;\n missingEnv?: \"keep\" | \"empty\";\n env?: NodeJS.ProcessEnv;\n}\n\nexport interface LoadYamlOptions extends YamlEnvOptions {\n cache?: boolean;\n}\n\ninterface CacheEntry {\n mtimeMs: number;\n value: unknown;\n}\n\nconst YAML_CACHE = new Map<string, CacheEntry>();\n\nexport function clearYamlFileCache(): void {\n YAML_CACHE.clear();\n}\n\nfunction substituteEnvInString(\n input: string,\n env: NodeJS.ProcessEnv,\n missingEnv: \"keep\" | \"empty\",\n): string {\n return input.replace(/\\$\\{(\\w+)\\}/g, (_, name: string) => {\n const value = env[name];\n if (value !== undefined) return value;\n return missingEnv === \"empty\" ? \"\" : `\\${${name}}`;\n });\n}\n\nfunction substituteEnvInValue(\n input: unknown,\n options: { env: NodeJS.ProcessEnv; missingEnv: \"keep\" | \"empty\" },\n): unknown {\n if (typeof input === \"string\") {\n return substituteEnvInString(input, options.env, options.missingEnv);\n }\n if (Array.isArray(input)) {\n return input.map((item) => substituteEnvInValue(item, options));\n }\n if (input && typeof input === \"object\") {\n const out: Record<string, unknown> = {};\n for (const [k, v] of Object.entries(input)) {\n out[k] = substituteEnvInValue(v, options);\n }\n return out;\n }\n return input;\n}\n\nexport function parseYamlContent<T = unknown>(\n content: string,\n options: YamlEnvOptions = {},\n): T {\n const parsed = parseYaml(content) as unknown;\n if (options.substituteEnv === false) return parsed as T;\n return substituteEnvInValue(parsed, {\n env: options.env ?? process.env,\n missingEnv: options.missingEnv ?? \"keep\",\n }) as T;\n}\n\nfunction readFromCache(filePath: string, mtimeMs: number): unknown | undefined {\n const cached = YAML_CACHE.get(filePath);\n if (!cached) return undefined;\n if (cached.mtimeMs !== mtimeMs) return undefined;\n return cached.value;\n}\n\nfunction writeCache(filePath: string, mtimeMs: number, value: unknown): void {\n YAML_CACHE.set(filePath, { mtimeMs, value });\n}\n\nexport function loadYamlFileSync<T = unknown>(\n filePath: string,\n options: LoadYamlOptions = {},\n): T | undefined {\n const absPath = path.resolve(filePath);\n if (!existsSync(absPath)) return undefined;\n const useCache = options.cache === true;\n const fileStat = statSync(absPath);\n if (useCache) {\n const cached = readFromCache(absPath, fileStat.mtimeMs);\n if (cached !== undefined) return cached as T;\n }\n const content = readFileSync(absPath, \"utf-8\");\n const parsed = parseYamlContent<T>(content, options);\n if (useCache) writeCache(absPath, fileStat.mtimeMs, parsed);\n return parsed;\n}\n\nexport async function loadYamlFile<T = unknown>(\n filePath: string,\n options: LoadYamlOptions = {},\n): Promise<T | undefined> {\n const absPath = path.resolve(filePath);\n let fileStat;\n try {\n fileStat = await stat(absPath);\n } catch (err) {\n if ((err as NodeJS.ErrnoException)?.code === \"ENOENT\") return undefined;\n throw err;\n }\n const useCache = options.cache === true;\n if (useCache) {\n const cached = readFromCache(absPath, fileStat.mtimeMs);\n if (cached !== undefined) return cached as T;\n }\n const content = await readFile(absPath, \"utf-8\");\n const parsed = parseYamlContent<T>(content, options);\n if (useCache) writeCache(absPath, fileStat.mtimeMs, parsed);\n return parsed;\n}\n","import path from \"node:path\";\nimport { homedir } from \"node:os\";\n\nexport interface ResolveConfigPathOptions {\n expandHome?: boolean;\n homeDir?: string;\n}\n\nexport function isPathRef(value: unknown): value is string {\n return (\n typeof value === \"string\" &&\n value.length > 0 &&\n (value.endsWith(\".yaml\") || value.endsWith(\".yml\"))\n );\n}\n\nexport function expandHomePath(inputPath: string, options: { homeDir?: string } = {}): string {\n const home = options.homeDir ?? homedir();\n if (inputPath === \"~\") return home;\n if (inputPath.startsWith(\"~/\")) return path.join(home, inputPath.slice(2));\n return inputPath;\n}\n\nexport function resolveConfigPath(\n pathRef: string,\n configDir: string,\n options: ResolveConfigPathOptions = {},\n): string {\n const expanded = options.expandHome === false ? pathRef : expandHomePath(pathRef, { homeDir: options.homeDir });\n return path.resolve(configDir, expanded);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,SAAS,YAAY,cAAc,gBAAgB;AACnD,SAAS,UAAU,YAAY;AAC/B,OAAO,UAAU;AACjB,SAAS,SAAS,iBAAiB;AAiBnC,IAAM,aAAa,oBAAI,IAAwB;AAExC,SAAS,qBAA2B;AACzC,aAAW,MAAM;AACnB;AAEA,SAAS,sBACP,OACA,KACA,YACQ;AACR,SAAO,MAAM,QAAQ,gBAAgB,CAAC,GAAG,SAAiB;AACxD,UAAM,QAAQ,IAAI,IAAI;AACtB,QAAI,UAAU,OAAW,QAAO;AAChC,WAAO,eAAe,UAAU,KAAK,MAAM,IAAI;AAAA,EACjD,CAAC;AACH;AAEA,SAAS,qBACP,OACA,SACS;AACT,MAAI,OAAO,UAAU,UAAU;AAC7B,WAAO,sBAAsB,OAAO,QAAQ,KAAK,QAAQ,UAAU;AAAA,EACrE;AACA,MAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,WAAO,MAAM,IAAI,CAAC,SAAS,qBAAqB,MAAM,OAAO,CAAC;AAAA,EAChE;AACA,MAAI,SAAS,OAAO,UAAU,UAAU;AACtC,UAAM,MAA+B,CAAC;AACtC,eAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,KAAK,GAAG;AAC1C,UAAI,CAAC,IAAI,qBAAqB,GAAG,OAAO;AAAA,IAC1C;AACA,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAEO,SAAS,iBACd,SACA,UAA0B,CAAC,GACxB;AACH,QAAM,SAAS,UAAU,OAAO;AAChC,MAAI,QAAQ,kBAAkB,MAAO,QAAO;AAC5C,SAAO,qBAAqB,QAAQ;AAAA,IAClC,KAAK,QAAQ,OAAO,QAAQ;AAAA,IAC5B,YAAY,QAAQ,cAAc;AAAA,EACpC,CAAC;AACH;AAEA,SAAS,cAAc,UAAkB,SAAsC;AAC7E,QAAM,SAAS,WAAW,IAAI,QAAQ;AACtC,MAAI,CAAC,OAAQ,QAAO;AACpB,MAAI,OAAO,YAAY,QAAS,QAAO;AACvC,SAAO,OAAO;AAChB;AAEA,SAAS,WAAW,UAAkB,SAAiB,OAAsB;AAC3E,aAAW,IAAI,UAAU,EAAE,SAAS,MAAM,CAAC;AAC7C;AAEO,SAAS,iBACd,UACA,UAA2B,CAAC,GACb;AACf,QAAM,UAAU,KAAK,QAAQ,QAAQ;AACrC,MAAI,CAAC,WAAW,OAAO,EAAG,QAAO;AACjC,QAAM,WAAW,QAAQ,UAAU;AACnC,QAAM,WAAW,SAAS,OAAO;AACjC,MAAI,UAAU;AACZ,UAAM,SAAS,cAAc,SAAS,SAAS,OAAO;AACtD,QAAI,WAAW,OAAW,QAAO;AAAA,EACnC;AACA,QAAM,UAAU,aAAa,SAAS,OAAO;AAC7C,QAAM,SAAS,iBAAoB,SAAS,OAAO;AACnD,MAAI,SAAU,YAAW,SAAS,SAAS,SAAS,MAAM;AAC1D,SAAO;AACT;AAEA,eAAsB,aACpB,UACA,UAA2B,CAAC,GACJ;AACxB,QAAM,UAAU,KAAK,QAAQ,QAAQ;AACrC,MAAI;AACJ,MAAI;AACF,eAAW,MAAM,KAAK,OAAO;AAAA,EAC/B,SAAS,KAAK;AACZ,QAAK,KAA+B,SAAS,SAAU,QAAO;AAC9D,UAAM;AAAA,EACR;AACA,QAAM,WAAW,QAAQ,UAAU;AACnC,MAAI,UAAU;AACZ,UAAM,SAAS,cAAc,SAAS,SAAS,OAAO;AACtD,QAAI,WAAW,OAAW,QAAO;AAAA,EACnC;AACA,QAAM,UAAU,MAAM,SAAS,SAAS,OAAO;AAC/C,QAAM,SAAS,iBAAoB,SAAS,OAAO;AACnD,MAAI,SAAU,YAAW,SAAS,SAAS,SAAS,MAAM;AAC1D,SAAO;AACT;;;ACxHA,OAAOA,WAAU;AACjB,SAAS,eAAe;AAOjB,SAAS,UAAU,OAAiC;AACzD,SACE,OAAO,UAAU,YACjB,MAAM,SAAS,MACd,MAAM,SAAS,OAAO,KAAK,MAAM,SAAS,MAAM;AAErD;AAEO,SAAS,eAAe,WAAmB,UAAgC,CAAC,GAAW;AAC5F,QAAM,OAAO,QAAQ,WAAW,QAAQ;AACxC,MAAI,cAAc,IAAK,QAAO;AAC9B,MAAI,UAAU,WAAW,IAAI,EAAG,QAAOA,MAAK,KAAK,MAAM,UAAU,MAAM,CAAC,CAAC;AACzE,SAAO;AACT;AAEO,SAAS,kBACd,SACA,WACA,UAAoC,CAAC,GAC7B;AACR,QAAM,WAAW,QAAQ,eAAe,QAAQ,UAAU,eAAe,SAAS,EAAE,SAAS,QAAQ,QAAQ,CAAC;AAC9G,SAAOA,MAAK,QAAQ,WAAW,QAAQ;AACzC;","names":["path"]}
|
|
1
|
+
{"version":3,"sources":["../src/config/yaml.ts","../src/config/path.ts","../src/connectivity/check.ts","../src/model/llm/parser.ts","../src/model/embedding/index.ts","../src/model/chat/index.ts","../src/model/hub/index.ts","../src/utils/deep-merge.ts"],"sourcesContent":["import { existsSync, readFileSync, statSync } from \"node:fs\";\nimport { readFile, stat } from \"node:fs/promises\";\nimport path from \"node:path\";\nimport { parse as parseYaml } from \"yaml\";\n\nexport interface YamlEnvOptions {\n substituteEnv?: boolean;\n missingEnv?: \"keep\" | \"empty\";\n env?: NodeJS.ProcessEnv;\n}\n\nexport interface LoadYamlOptions extends YamlEnvOptions {\n cache?: boolean;\n}\n\ninterface CacheEntry {\n mtimeMs: number;\n value: unknown;\n}\n\nconst YAML_CACHE = new Map<string, CacheEntry>();\n\nexport function clearYamlFileCache(): void {\n YAML_CACHE.clear();\n}\n\nfunction substituteEnvInString(\n input: string,\n env: NodeJS.ProcessEnv,\n missingEnv: \"keep\" | \"empty\",\n): string {\n return input.replace(/\\$\\{(\\w+)\\}/g, (_, name: string) => {\n const value = env[name];\n if (value !== undefined) return value;\n return missingEnv === \"empty\" ? \"\" : `\\${${name}}`;\n });\n}\n\nfunction substituteEnvInValue(\n input: unknown,\n options: { env: NodeJS.ProcessEnv; missingEnv: \"keep\" | \"empty\" },\n): unknown {\n if (typeof input === \"string\") {\n return substituteEnvInString(input, options.env, options.missingEnv);\n }\n if (Array.isArray(input)) {\n return input.map((item) => substituteEnvInValue(item, options));\n }\n if (input && typeof input === \"object\") {\n const out: Record<string, unknown> = {};\n for (const [k, v] of Object.entries(input)) {\n out[k] = substituteEnvInValue(v, options);\n }\n return out;\n }\n return input;\n}\n\nexport function parseYamlContent<T = unknown>(\n content: string,\n options: YamlEnvOptions = {},\n): T {\n const parsed = parseYaml(content) as unknown;\n if (options.substituteEnv === false) return parsed as T;\n return substituteEnvInValue(parsed, {\n env: options.env ?? process.env,\n missingEnv: options.missingEnv ?? \"keep\",\n }) as T;\n}\n\nfunction readFromCache(filePath: string, mtimeMs: number): unknown | undefined {\n const cached = YAML_CACHE.get(filePath);\n if (!cached) return undefined;\n if (cached.mtimeMs !== mtimeMs) return undefined;\n return cached.value;\n}\n\nfunction writeCache(filePath: string, mtimeMs: number, value: unknown): void {\n YAML_CACHE.set(filePath, { mtimeMs, value });\n}\n\nexport function loadYamlFileSync<T = unknown>(\n filePath: string,\n options: LoadYamlOptions = {},\n): T | undefined {\n const absPath = path.resolve(filePath);\n if (!existsSync(absPath)) return undefined;\n const useCache = options.cache === true;\n const fileStat = statSync(absPath);\n if (useCache) {\n const cached = readFromCache(absPath, fileStat.mtimeMs);\n if (cached !== undefined) return cached as T;\n }\n const content = readFileSync(absPath, \"utf-8\");\n const parsed = parseYamlContent<T>(content, options);\n if (useCache) writeCache(absPath, fileStat.mtimeMs, parsed);\n return parsed;\n}\n\nexport async function loadYamlFile<T = unknown>(\n filePath: string,\n options: LoadYamlOptions = {},\n): Promise<T | undefined> {\n const absPath = path.resolve(filePath);\n let fileStat;\n try {\n fileStat = await stat(absPath);\n } catch (err) {\n if ((err as NodeJS.ErrnoException)?.code === \"ENOENT\") return undefined;\n throw err;\n }\n const useCache = options.cache === true;\n if (useCache) {\n const cached = readFromCache(absPath, fileStat.mtimeMs);\n if (cached !== undefined) return cached as T;\n }\n const content = await readFile(absPath, \"utf-8\");\n const parsed = parseYamlContent<T>(content, options);\n if (useCache) writeCache(absPath, fileStat.mtimeMs, parsed);\n return parsed;\n}\n","import path from \"node:path\";\nimport { homedir } from \"node:os\";\n\nexport interface ResolveConfigPathOptions {\n expandHome?: boolean;\n homeDir?: string;\n}\n\nexport function isPathRef(value: unknown): value is string {\n return (\n typeof value === \"string\" &&\n value.length > 0 &&\n (value.endsWith(\".yaml\") || value.endsWith(\".yml\"))\n );\n}\n\nexport function expandHomePath(inputPath: string, options: { homeDir?: string } = {}): string {\n const home = options.homeDir ?? homedir();\n if (inputPath === \"~\") return home;\n if (inputPath.startsWith(\"~/\")) return path.join(home, inputPath.slice(2));\n return inputPath;\n}\n\nexport function resolveConfigPath(\n pathRef: string,\n configDir: string,\n options: ResolveConfigPathOptions = {},\n): string {\n const expanded = options.expandHome === false ? pathRef : expandHomePath(pathRef, { homeDir: options.homeDir });\n return path.resolve(configDir, expanded);\n}\n","/**\n * Check connectivity to OpenAI-compatible / CIS endpoints.\n */\nimport https from \"node:https\";\nimport type { CheckConnectivityResult } from \"./types.js\";\n\nconst DEFAULT_TIMEOUT_MS = 8000;\n\nexport interface EndpointConnectivityOptions {\n timeoutMs?: number;\n resolveHost?: { from: string; to: string };\n host?: string;\n verifySSL?: boolean;\n bypassAuth?: boolean;\n featureKey?: string;\n}\n\nfunction probePath(baseURL: string): string {\n const base = baseURL.replace(/\\/+$/, \"\");\n if (base.endsWith(\"/v1\")) return `${base}/models`;\n return base.includes(\"/v1\") ? `${base}/models` : `${base}/v1/models`;\n}\n\nfunction resolveProbeRequest(\n baseURL: string,\n endpointOptions?: EndpointConnectivityOptions | null\n): { url: string; hostHeader: string | undefined } {\n let path = probePath(baseURL);\n const urlObj = new URL(path);\n const resolveFrom = endpointOptions?.resolveHost?.from;\n const resolveTo = endpointOptions?.resolveHost?.to;\n let hostHeader = endpointOptions?.host;\n if (resolveFrom && resolveTo && urlObj.hostname) {\n urlObj.hostname = urlObj.hostname.replace(resolveFrom, resolveTo);\n hostHeader = hostHeader ?? resolveFrom;\n }\n const searchParams = new URLSearchParams(urlObj.search);\n if (endpointOptions?.bypassAuth === true) searchParams.set(\"bypass_auth\", \"true\");\n urlObj.search = searchParams.toString();\n return { url: urlObj.toString(), hostHeader };\n}\n\nfunction checkWithHttps(\n url: string,\n hostHeader: string | undefined,\n options: { timeoutMs: number; verifySSL: boolean }\n): Promise<CheckConnectivityResult> {\n return new Promise((resolve) => {\n const u = new URL(url);\n const reqOpts: https.RequestOptions = {\n hostname: u.hostname,\n port: u.port || (u.protocol === \"https:\" ? 443 : 80),\n path: u.pathname + u.search,\n method: \"GET\",\n headers: { Accept: \"application/json\" },\n rejectUnauthorized: options.verifySSL,\n };\n if (hostHeader) reqOpts.headers = { ...reqOpts.headers, Host: hostHeader };\n\n const timeoutId = setTimeout(() => {\n req.destroy();\n resolve({ reachable: false, message: \"Connection timed out\" });\n }, options.timeoutMs);\n\n const req = https.request(reqOpts, (res) => {\n clearTimeout(timeoutId);\n resolve({\n reachable: true,\n message: res.statusCode === 200 ? \"OK\" : `HTTP ${res.statusCode}`,\n statusCode: res.statusCode,\n });\n });\n req.on(\"error\", (err) => {\n clearTimeout(timeoutId);\n resolve({ reachable: false, message: err.message || \"Connection failed\" });\n });\n req.end();\n });\n}\n\nexport async function checkEndpointConnectivity(\n baseURL: string,\n options?: { timeoutMs?: number } & EndpointConnectivityOptions\n): Promise<CheckConnectivityResult> {\n const timeoutMs = options?.timeoutMs ?? DEFAULT_TIMEOUT_MS;\n const useHttps =\n options?.resolveHost != null ||\n options?.verifySSL === false ||\n options?.host != null ||\n options?.bypassAuth === true;\n\n if (useHttps) {\n const { url, hostHeader } = resolveProbeRequest(baseURL, options);\n return checkWithHttps(url, hostHeader, {\n timeoutMs,\n verifySSL: options?.verifySSL === true,\n });\n }\n\n const url = probePath(baseURL);\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), timeoutMs);\n\n try {\n const res = await fetch(url, {\n method: \"GET\",\n signal: controller.signal,\n headers: { Accept: \"application/json\" },\n });\n clearTimeout(timeoutId);\n return {\n reachable: true,\n message: res.ok ? \"OK\" : `HTTP ${res.status}`,\n statusCode: res.status,\n };\n } catch (err) {\n clearTimeout(timeoutId);\n const message = err instanceof Error ? err.message : String(err);\n const isTimeout = err instanceof Error && err.name === \"AbortError\";\n return {\n reachable: false,\n message: isTimeout ? \"Connection timed out\" : message || \"Connection failed\",\n };\n }\n}\n\nexport const CIS_UNREACHABLE_REMINDER =\n \"Please ensure you are connected to Secure VPN and try again.\";\n\nexport function buildUnreachableError(\n endpointId: string,\n baseURL: string,\n detail?: string\n): string {\n const parts = [\n `Cannot connect to CIS (endpoint: ${endpointId}, base URL: ${baseURL}).`,\n detail && ` ${detail}`,\n ` ${CIS_UNREACHABLE_REMINDER}`,\n ];\n return parts.filter(Boolean).join(\"\").trim();\n}\n","/**\n * Parse agent.yaml llm section into normalized LLMConfig[] and default id.\n * Supports: flat (each model keyed by name), instances[], or single object.\n */\n\nimport type { LLMConfig } from \"./types.js\";\n\nconst DEFAULT_LLM_ID = \"default\";\n\nconst RESERVED_KEYS = new Set([\n \"default\",\n \"instances\",\n \"catalog\",\n \"provider\",\n \"model\",\n \"temperature\",\n \"apiKey\",\n \"baseURL\",\n \"base_url\",\n \"type\",\n \"id\",\n]);\n\nexport function parseLlmSection(section: unknown): { defaultId: string; configs: LLMConfig[] } {\n if (section == null || typeof section !== \"object\") {\n return { defaultId: DEFAULT_LLM_ID, configs: [] };\n }\n\n if (Array.isArray(section)) {\n const configs = section\n .filter((i): i is Record<string, unknown> => i != null && typeof i === \"object\")\n .map((item, i) => normalizeLlmConfig({ ...item, id: item.id ?? item.name ?? String(i) }))\n .filter((c): c is LLMConfig => c != null);\n const defaultId = configs.length > 0 ? configs[0]!.id : DEFAULT_LLM_ID;\n return { defaultId, configs };\n }\n\n const s = section as Record<string, unknown>;\n\n const flatEntries = Object.entries(s).filter(\n ([k, v]) => !RESERVED_KEYS.has(k) && v != null && typeof v === \"object\" && !Array.isArray(v)\n );\n if (flatEntries.length > 0) {\n const configs: LLMConfig[] = [];\n for (const [id, entry] of flatEntries) {\n const c = entryToLlmConfig(id, entry as Record<string, unknown>);\n if (c) configs.push(c);\n }\n const defaultId =\n typeof s.default === \"string\" && s.default && flatEntries.some(([k]) => k === s.default)\n ? s.default\n : configs.length > 0\n ? configs[0]!.id\n : DEFAULT_LLM_ID;\n return { defaultId, configs };\n }\n\n if (Array.isArray(s.instances)) {\n const configs = (s.instances as unknown[])\n .filter((i): i is Record<string, unknown> => i != null && typeof i === \"object\")\n .map((i) => normalizeLlmConfig(i))\n .filter((c): c is LLMConfig => c != null);\n const defaultId =\n typeof s.default === \"string\" && s.default\n ? s.default\n : configs.length > 0\n ? configs[0]!.id\n : DEFAULT_LLM_ID;\n return { defaultId, configs };\n }\n\n if (typeof s.provider === \"string\" || typeof s.model === \"string\" || typeof (s as { name?: string }).name === \"string\") {\n const one = singleObjectToLlmConfig(s);\n return { defaultId: one.id, configs: [one] };\n }\n\n return { defaultId: DEFAULT_LLM_ID, configs: [] };\n}\n\nconst EXTENSION_OPTION_KEYS = [\"featureKey\", \"tenant\", \"authToken\", \"verifySSL\", \"bypassAuth\", \"host\", \"resolveHost\", \"timeoutMs\", \"options\"];\n\nfunction entryToLlmConfig(id: string, entry: Record<string, unknown>): LLMConfig | null {\n const opts = entry.options as Record<string, unknown> | undefined;\n const baseURL =\n typeof entry.base_url === \"string\"\n ? entry.base_url\n : typeof entry.baseURL === \"string\"\n ? entry.baseURL\n : undefined;\n const model = typeof entry.name === \"string\" ? entry.name : typeof entry.model === \"string\" ? entry.model : undefined;\n const provider = typeof entry.provider === \"string\" && entry.provider ? entry.provider : \"openai\";\n const config: LLMConfig = {\n id,\n type: \"chat\",\n provider,\n model,\n temperature: typeof opts?.temperature === \"number\" ? opts.temperature : typeof entry.temperature === \"number\" ? entry.temperature : undefined,\n apiKey: typeof opts?.apiKey === \"string\" ? opts.apiKey : typeof entry.apiKey === \"string\" ? entry.apiKey : undefined,\n baseURL,\n };\n if (typeof entry.type === \"string\" && entry.type === \"image\") config.type = \"image\";\n if (opts && typeof opts === \"object\") (config as Record<string, unknown>).options = opts;\n for (const k of EXTENSION_OPTION_KEYS) {\n if (entry[k] !== undefined) (config as Record<string, unknown>)[k] = entry[k];\n else if (opts && opts[k] !== undefined) (config as Record<string, unknown>)[k] = opts[k];\n }\n return config;\n}\n\nfunction singleObjectToLlmConfig(s: Record<string, unknown>): LLMConfig {\n const one: LLMConfig = {\n id: DEFAULT_LLM_ID,\n type: \"chat\",\n provider: typeof s.provider === \"string\" ? s.provider : \"openai\",\n model: typeof s.model === \"string\" ? s.model : (typeof (s as { name?: string }).name === \"string\" ? (s as { name: string }).name : undefined),\n temperature: typeof s.temperature === \"number\" ? s.temperature : undefined,\n apiKey: typeof s.apiKey === \"string\" ? s.apiKey : undefined,\n baseURL:\n typeof s.baseURL === \"string\" ? s.baseURL : typeof s.base_url === \"string\" ? s.base_url : undefined,\n };\n Object.keys(s).forEach((k) => {\n if (![\"id\", \"type\", \"provider\", \"model\", \"name\", \"temperature\", \"apiKey\", \"baseURL\", \"base_url\", \"default\", \"instances\"].includes(k)) {\n (one as Record<string, unknown>)[k] = s[k];\n }\n });\n return one;\n}\n\nfunction normalizeLlmConfig(o: Record<string, unknown>): LLMConfig | null {\n const id = typeof o.id === \"string\" && o.id ? o.id : DEFAULT_LLM_ID;\n const type = o.type === \"image\" ? \"image\" : \"chat\";\n const provider = typeof o.provider === \"string\" && o.provider ? o.provider : \"openai\";\n const opts = o.options as Record<string, unknown> | undefined;\n const config: LLMConfig = {\n id,\n type,\n provider,\n model: typeof o.model === \"string\" ? o.model : (typeof o.name === \"string\" ? o.name : undefined),\n temperature:\n typeof o.temperature === \"number\"\n ? o.temperature\n : typeof opts?.temperature === \"number\"\n ? opts.temperature\n : undefined,\n apiKey:\n typeof o.apiKey === \"string\"\n ? o.apiKey\n : typeof opts?.apiKey === \"string\"\n ? opts.apiKey\n : undefined,\n baseURL: typeof o.baseURL === \"string\" ? o.baseURL : (typeof o.base_url === \"string\" ? o.base_url : undefined),\n };\n Object.keys(o).forEach((k) => {\n if (![\"id\", \"type\", \"provider\", \"model\", \"name\", \"temperature\", \"apiKey\", \"baseURL\", \"base_url\"].includes(k)) {\n (config as Record<string, unknown>)[k] = o[k];\n }\n });\n return config;\n}\n","/**\n * OpenAI-compatible embedding API: POST /embeddings.\n * No LangChain dependency; for use by any module that needs embeddings.\n */\n\nconst DEFAULT_EMBEDDING_TIMEOUT_MS = 30_000;\n\nexport interface EmbeddingOptions {\n baseURL: string;\n apiKey?: string;\n model?: string;\n timeoutMs?: number;\n}\n\nexport interface EmbedRequest {\n input: string | string[];\n model?: string;\n}\n\nexport interface EmbedResult {\n vectors: number[][];\n dimensions?: number;\n model?: string;\n}\n\nfunction isLocalBaseUrl(url: string): boolean {\n try {\n const u = new URL(url);\n const host = u.hostname.toLowerCase();\n return host === \"localhost\" || host === \"127.0.0.1\" || host === \"::1\";\n } catch {\n return false;\n }\n}\n\n/**\n * Call OpenAI-compatible /embeddings endpoint.\n * baseURL should be the API root (e.g. https://api.openai.com/v1); trailing slash is stripped.\n */\nexport async function embedViaOpenAICompatibleApi(\n options: EmbeddingOptions,\n input: EmbedRequest\n): Promise<EmbedResult> {\n const baseUrl = options.baseURL.replace(/\\/$/, \"\");\n const apiKey = options.apiKey?.trim();\n const timeoutMs = options.timeoutMs ?? DEFAULT_EMBEDDING_TIMEOUT_MS;\n const modelName =\n typeof input.model === \"string\" && input.model.trim() !== \"\"\n ? input.model.trim()\n : options.model ?? \"text-embedding-3-small\";\n\n if (!apiKey && !isLocalBaseUrl(baseUrl)) {\n throw new Error(\"Embedding API key is required for non-local baseURL\");\n }\n\n const controller = new AbortController();\n const timer = setTimeout(() => controller.abort(), timeoutMs);\n try {\n const response = await fetch(`${baseUrl}/embeddings`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n ...(apiKey ? { Authorization: `Bearer ${apiKey}` } : {}),\n },\n body: JSON.stringify({ model: modelName, input: input.input }),\n signal: controller.signal,\n });\n if (!response.ok) {\n const body = await response.text();\n throw new Error(`Embedding API error ${response.status}: ${body.slice(0, 500)}`);\n }\n const data = (await response.json()) as {\n data?: Array<{ embedding?: number[]; index?: number }>;\n model?: string;\n };\n const vectors = (data.data ?? [])\n .slice()\n .sort((a, b) => (a.index ?? 0) - (b.index ?? 0))\n .map((v) => v.embedding)\n .filter((v): v is number[] => Array.isArray(v));\n return {\n vectors,\n dimensions: vectors[0]?.length,\n model: data.model ?? modelName,\n };\n } finally {\n clearTimeout(timer);\n }\n}\n","/**\n * OpenAI-compatible chat completion API: POST /chat/completions.\n * No LangChain dependency; for use by any module that needs LLM text generation.\n */\n\nconst DEFAULT_CHAT_TIMEOUT_MS = 60_000;\n\nexport interface ChatCompletionOptions {\n baseURL: string;\n apiKey?: string;\n model?: string;\n timeoutMs?: number;\n}\n\nexport interface ChatCompletionMessage {\n role: \"system\" | \"user\" | \"assistant\";\n content: string;\n}\n\nexport interface ChatCompletionRequest {\n messages: ChatCompletionMessage[];\n model?: string;\n temperature?: number;\n maxTokens?: number;\n}\n\nexport interface ChatCompletionResult {\n text: string;\n model?: string;\n}\n\nfunction isLocalBaseUrl(url: string): boolean {\n try {\n const u = new URL(url);\n const host = u.hostname.toLowerCase();\n return host === \"localhost\" || host === \"127.0.0.1\" || host === \"::1\";\n } catch {\n return false;\n }\n}\n\nfunction normalizeContent(content: unknown): string {\n if (typeof content === \"string\") return content;\n if (!Array.isArray(content)) return String(content ?? \"\");\n const parts: string[] = [];\n for (const p of content) {\n if (typeof p === \"string\") parts.push(p);\n else if (p && typeof p === \"object\" && typeof (p as { text?: unknown }).text === \"string\")\n parts.push((p as { text: string }).text);\n }\n return parts.join(\"\\n\").trim();\n}\n\n/**\n * Call OpenAI-compatible /chat/completions endpoint.\n * baseURL should be the API root (e.g. https://api.openai.com/v1); trailing slash is stripped.\n */\nexport async function chatCompletionViaOpenAICompatibleApi(\n options: ChatCompletionOptions,\n request: ChatCompletionRequest\n): Promise<ChatCompletionResult> {\n const baseUrl = options.baseURL.replace(/\\/$/, \"\");\n const apiKey = options.apiKey?.trim();\n const timeoutMs = options.timeoutMs ?? DEFAULT_CHAT_TIMEOUT_MS;\n const modelName =\n typeof request.model === \"string\" && request.model.trim() !== \"\"\n ? request.model.trim()\n : options.model ?? \"gpt-4o-mini\";\n\n if (!apiKey && !isLocalBaseUrl(baseUrl)) {\n throw new Error(\"Chat completion API key is required for non-local baseURL\");\n }\n\n const body: Record<string, unknown> = {\n model: modelName,\n messages: request.messages.map((m) => ({ role: m.role, content: m.content })),\n temperature: typeof request.temperature === \"number\" ? request.temperature : 0,\n };\n if (typeof request.maxTokens === \"number\") body.max_tokens = request.maxTokens;\n\n const controller = new AbortController();\n const timer = setTimeout(() => controller.abort(), timeoutMs);\n try {\n const response = await fetch(`${baseUrl}/chat/completions`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n ...(apiKey ? { Authorization: `Bearer ${apiKey}` } : {}),\n },\n body: JSON.stringify(body),\n signal: controller.signal,\n });\n if (!response.ok) {\n const text = await response.text();\n throw new Error(`Chat completion API error ${response.status}: ${text.slice(0, 500)}`);\n }\n const data = (await response.json()) as {\n choices?: Array<{ message?: { content?: unknown }; text?: unknown }>;\n model?: string;\n };\n const raw =\n data.choices?.[0]?.message?.content ?? data.choices?.[0]?.text ?? \"\";\n return {\n text: normalizeContent(raw),\n model: data.model ?? modelName,\n };\n } finally {\n clearTimeout(timer);\n }\n}\n","/**\n * Model hub: generate (chat completion) + embed, driven by llm section config.\n * No LangChain; uses OpenAI-compatible HTTP APIs so any package can use it via agent-common.\n */\n\nimport { parseLlmSection } from \"../llm/parser.js\";\nimport type { LLMConfig } from \"../llm/types.js\";\nimport { chatCompletionViaOpenAICompatibleApi } from \"../chat/index.js\";\nimport { embedViaOpenAICompatibleApi } from \"../embedding/index.js\";\nimport type { EmbedRequest, EmbedResult } from \"../embedding/index.js\";\n\nconst DEFAULT_EMBEDDING_MODEL = \"text-embedding-3-small\";\nconst DEFAULT_CHAT_MODEL = \"gpt-4o-mini\";\nconst DEFAULT_BASE_URL = \"https://api.openai.com/v1\";\n\nexport interface CreateModelHubOptions {\n llmSection?: unknown;\n embeddingBaseURL?: string;\n embeddingApiKey?: string;\n embeddingModel?: string;\n embeddingTimeoutMs?: number;\n chatTimeoutMs?: number;\n}\n\nexport interface ChatGenerateRequest {\n input: string;\n systemPrompt?: string;\n model?: string;\n temperature?: number;\n}\n\nexport interface ChatGenerateResult {\n text: string;\n model?: string;\n}\n\nexport interface ModelHub {\n generate(input: ChatGenerateRequest): Promise<ChatGenerateResult>;\n embed(input: EmbedRequest): Promise<EmbedResult>;\n}\n\nfunction getDefaultLlmConfig(llmSection: unknown): LLMConfig | null {\n const parsed = parseLlmSection(llmSection ?? null);\n const config =\n parsed.configs.find((c: LLMConfig) => c.id === parsed.defaultId) ??\n parsed.configs[0] ??\n null;\n return config;\n}\n\nexport function createModelHub(options: CreateModelHubOptions = {}): ModelHub {\n const baseCfg = getDefaultLlmConfig(options.llmSection);\n const defaultBaseUrl = (\n options.embeddingBaseURL ??\n baseCfg?.baseURL ??\n DEFAULT_BASE_URL\n ).replace(/\\/$/, \"\");\n const defaultApiKey =\n options.embeddingApiKey ??\n baseCfg?.apiKey ??\n process.env.OPENAI_API_KEY;\n const defaultEmbeddingModel =\n options.embeddingModel ??\n (baseCfg?.options && typeof baseCfg.options.embeddingModel === \"string\"\n ? baseCfg.options.embeddingModel\n : undefined) ??\n baseCfg?.model ??\n process.env.OPENAI_EMBEDDING_MODEL ??\n DEFAULT_EMBEDDING_MODEL;\n const defaultChatModel =\n baseCfg?.model ??\n process.env.OPENAI_MODEL ??\n DEFAULT_CHAT_MODEL;\n const embeddingTimeoutMs = options.embeddingTimeoutMs;\n const chatTimeoutMs = options.chatTimeoutMs;\n\n return {\n async generate(input: ChatGenerateRequest): Promise<ChatGenerateResult> {\n const messages: Array<{ role: \"system\" | \"user\" | \"assistant\"; content: string }> = [];\n if (typeof input.systemPrompt === \"string\" && input.systemPrompt.trim() !== \"\") {\n messages.push({ role: \"system\", content: input.systemPrompt.trim() });\n }\n messages.push({ role: \"user\", content: input.input });\n const result = await chatCompletionViaOpenAICompatibleApi(\n {\n baseURL: defaultBaseUrl,\n apiKey: defaultApiKey,\n model: defaultChatModel,\n timeoutMs: chatTimeoutMs,\n },\n {\n messages,\n model: input.model,\n temperature: input.temperature,\n }\n );\n return {\n text: result.text,\n model:\n typeof input.model === \"string\" && input.model.trim() !== \"\"\n ? input.model\n : result.model ?? baseCfg?.model,\n };\n },\n async embed(input: EmbedRequest): Promise<EmbedResult> {\n const result = await embedViaOpenAICompatibleApi(\n {\n baseURL: defaultBaseUrl,\n apiKey: defaultApiKey,\n model: defaultEmbeddingModel,\n timeoutMs: embeddingTimeoutMs,\n },\n { input: input.input, model: input.model }\n );\n return {\n vectors: result.vectors,\n dimensions: result.dimensions,\n model: result.model,\n };\n },\n };\n}\n","/**\n * Deep merge: overwrite target with each source (only defined values). Nested objects merged recursively.\n */\n\nexport function deepMerge<T extends object>(target: T, ...sources: Partial<T>[]): T {\n for (const src of sources) {\n if (!src || typeof src !== \"object\") continue;\n for (const k of Object.keys(src) as (keyof T)[]) {\n const v = src[k];\n if (v === undefined) continue;\n const t = (target as Record<keyof T, unknown>)[k];\n if (\n v !== null &&\n typeof v === \"object\" &&\n !Array.isArray(v) &&\n t !== null &&\n typeof t === \"object\" &&\n !Array.isArray(t)\n ) {\n deepMerge(t as object, v as object);\n } else {\n (target as Record<keyof T, unknown>)[k] = v;\n }\n }\n }\n return target;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,SAAS,YAAY,cAAc,gBAAgB;AACnD,SAAS,UAAU,YAAY;AAC/B,OAAO,UAAU;AACjB,SAAS,SAAS,iBAAiB;AAiBnC,IAAM,aAAa,oBAAI,IAAwB;AAExC,SAAS,qBAA2B;AACzC,aAAW,MAAM;AACnB;AAEA,SAAS,sBACP,OACA,KACA,YACQ;AACR,SAAO,MAAM,QAAQ,gBAAgB,CAAC,GAAG,SAAiB;AACxD,UAAM,QAAQ,IAAI,IAAI;AACtB,QAAI,UAAU,OAAW,QAAO;AAChC,WAAO,eAAe,UAAU,KAAK,MAAM,IAAI;AAAA,EACjD,CAAC;AACH;AAEA,SAAS,qBACP,OACA,SACS;AACT,MAAI,OAAO,UAAU,UAAU;AAC7B,WAAO,sBAAsB,OAAO,QAAQ,KAAK,QAAQ,UAAU;AAAA,EACrE;AACA,MAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,WAAO,MAAM,IAAI,CAAC,SAAS,qBAAqB,MAAM,OAAO,CAAC;AAAA,EAChE;AACA,MAAI,SAAS,OAAO,UAAU,UAAU;AACtC,UAAM,MAA+B,CAAC;AACtC,eAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,KAAK,GAAG;AAC1C,UAAI,CAAC,IAAI,qBAAqB,GAAG,OAAO;AAAA,IAC1C;AACA,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAEO,SAAS,iBACd,SACA,UAA0B,CAAC,GACxB;AACH,QAAM,SAAS,UAAU,OAAO;AAChC,MAAI,QAAQ,kBAAkB,MAAO,QAAO;AAC5C,SAAO,qBAAqB,QAAQ;AAAA,IAClC,KAAK,QAAQ,OAAO,QAAQ;AAAA,IAC5B,YAAY,QAAQ,cAAc;AAAA,EACpC,CAAC;AACH;AAEA,SAAS,cAAc,UAAkB,SAAsC;AAC7E,QAAM,SAAS,WAAW,IAAI,QAAQ;AACtC,MAAI,CAAC,OAAQ,QAAO;AACpB,MAAI,OAAO,YAAY,QAAS,QAAO;AACvC,SAAO,OAAO;AAChB;AAEA,SAAS,WAAW,UAAkB,SAAiB,OAAsB;AAC3E,aAAW,IAAI,UAAU,EAAE,SAAS,MAAM,CAAC;AAC7C;AAEO,SAAS,iBACd,UACA,UAA2B,CAAC,GACb;AACf,QAAM,UAAU,KAAK,QAAQ,QAAQ;AACrC,MAAI,CAAC,WAAW,OAAO,EAAG,QAAO;AACjC,QAAM,WAAW,QAAQ,UAAU;AACnC,QAAM,WAAW,SAAS,OAAO;AACjC,MAAI,UAAU;AACZ,UAAM,SAAS,cAAc,SAAS,SAAS,OAAO;AACtD,QAAI,WAAW,OAAW,QAAO;AAAA,EACnC;AACA,QAAM,UAAU,aAAa,SAAS,OAAO;AAC7C,QAAM,SAAS,iBAAoB,SAAS,OAAO;AACnD,MAAI,SAAU,YAAW,SAAS,SAAS,SAAS,MAAM;AAC1D,SAAO;AACT;AAEA,eAAsB,aACpB,UACA,UAA2B,CAAC,GACJ;AACxB,QAAM,UAAU,KAAK,QAAQ,QAAQ;AACrC,MAAI;AACJ,MAAI;AACF,eAAW,MAAM,KAAK,OAAO;AAAA,EAC/B,SAAS,KAAK;AACZ,QAAK,KAA+B,SAAS,SAAU,QAAO;AAC9D,UAAM;AAAA,EACR;AACA,QAAM,WAAW,QAAQ,UAAU;AACnC,MAAI,UAAU;AACZ,UAAM,SAAS,cAAc,SAAS,SAAS,OAAO;AACtD,QAAI,WAAW,OAAW,QAAO;AAAA,EACnC;AACA,QAAM,UAAU,MAAM,SAAS,SAAS,OAAO;AAC/C,QAAM,SAAS,iBAAoB,SAAS,OAAO;AACnD,MAAI,SAAU,YAAW,SAAS,SAAS,SAAS,MAAM;AAC1D,SAAO;AACT;;;ACxHA,OAAOA,WAAU;AACjB,SAAS,eAAe;AAOjB,SAAS,UAAU,OAAiC;AACzD,SACE,OAAO,UAAU,YACjB,MAAM,SAAS,MACd,MAAM,SAAS,OAAO,KAAK,MAAM,SAAS,MAAM;AAErD;AAEO,SAAS,eAAe,WAAmB,UAAgC,CAAC,GAAW;AAC5F,QAAM,OAAO,QAAQ,WAAW,QAAQ;AACxC,MAAI,cAAc,IAAK,QAAO;AAC9B,MAAI,UAAU,WAAW,IAAI,EAAG,QAAOA,MAAK,KAAK,MAAM,UAAU,MAAM,CAAC,CAAC;AACzE,SAAO;AACT;AAEO,SAAS,kBACd,SACA,WACA,UAAoC,CAAC,GAC7B;AACR,QAAM,WAAW,QAAQ,eAAe,QAAQ,UAAU,eAAe,SAAS,EAAE,SAAS,QAAQ,QAAQ,CAAC;AAC9G,SAAOA,MAAK,QAAQ,WAAW,QAAQ;AACzC;;;AC3BA,OAAO,WAAW;AAGlB,IAAM,qBAAqB;AAW3B,SAAS,UAAU,SAAyB;AAC1C,QAAM,OAAO,QAAQ,QAAQ,QAAQ,EAAE;AACvC,MAAI,KAAK,SAAS,KAAK,EAAG,QAAO,GAAG,IAAI;AACxC,SAAO,KAAK,SAAS,KAAK,IAAI,GAAG,IAAI,YAAY,GAAG,IAAI;AAC1D;AAEA,SAAS,oBACP,SACA,iBACiD;AACjD,MAAIC,QAAO,UAAU,OAAO;AAC5B,QAAM,SAAS,IAAI,IAAIA,KAAI;AAC3B,QAAM,cAAc,iBAAiB,aAAa;AAClD,QAAM,YAAY,iBAAiB,aAAa;AAChD,MAAI,aAAa,iBAAiB;AAClC,MAAI,eAAe,aAAa,OAAO,UAAU;AAC/C,WAAO,WAAW,OAAO,SAAS,QAAQ,aAAa,SAAS;AAChE,iBAAa,cAAc;AAAA,EAC7B;AACA,QAAM,eAAe,IAAI,gBAAgB,OAAO,MAAM;AACtD,MAAI,iBAAiB,eAAe,KAAM,cAAa,IAAI,eAAe,MAAM;AAChF,SAAO,SAAS,aAAa,SAAS;AACtC,SAAO,EAAE,KAAK,OAAO,SAAS,GAAG,WAAW;AAC9C;AAEA,SAAS,eACP,KACA,YACA,SACkC;AAClC,SAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,UAAM,IAAI,IAAI,IAAI,GAAG;AACrB,UAAM,UAAgC;AAAA,MACpC,UAAU,EAAE;AAAA,MACZ,MAAM,EAAE,SAAS,EAAE,aAAa,WAAW,MAAM;AAAA,MACjD,MAAM,EAAE,WAAW,EAAE;AAAA,MACrB,QAAQ;AAAA,MACR,SAAS,EAAE,QAAQ,mBAAmB;AAAA,MACtC,oBAAoB,QAAQ;AAAA,IAC9B;AACA,QAAI,WAAY,SAAQ,UAAU,EAAE,GAAG,QAAQ,SAAS,MAAM,WAAW;AAEzE,UAAM,YAAY,WAAW,MAAM;AACjC,UAAI,QAAQ;AACZ,cAAQ,EAAE,WAAW,OAAO,SAAS,uBAAuB,CAAC;AAAA,IAC/D,GAAG,QAAQ,SAAS;AAEpB,UAAM,MAAM,MAAM,QAAQ,SAAS,CAAC,QAAQ;AAC1C,mBAAa,SAAS;AACtB,cAAQ;AAAA,QACN,WAAW;AAAA,QACX,SAAS,IAAI,eAAe,MAAM,OAAO,QAAQ,IAAI,UAAU;AAAA,QAC/D,YAAY,IAAI;AAAA,MAClB,CAAC;AAAA,IACH,CAAC;AACD,QAAI,GAAG,SAAS,CAAC,QAAQ;AACvB,mBAAa,SAAS;AACtB,cAAQ,EAAE,WAAW,OAAO,SAAS,IAAI,WAAW,oBAAoB,CAAC;AAAA,IAC3E,CAAC;AACD,QAAI,IAAI;AAAA,EACV,CAAC;AACH;AAEA,eAAsB,0BACpB,SACA,SACkC;AAClC,QAAM,YAAY,SAAS,aAAa;AACxC,QAAM,WACJ,SAAS,eAAe,QACxB,SAAS,cAAc,SACvB,SAAS,QAAQ,QACjB,SAAS,eAAe;AAE1B,MAAI,UAAU;AACZ,UAAM,EAAE,KAAAC,MAAK,WAAW,IAAI,oBAAoB,SAAS,OAAO;AAChE,WAAO,eAAeA,MAAK,YAAY;AAAA,MACrC;AAAA,MACA,WAAW,SAAS,cAAc;AAAA,IACpC,CAAC;AAAA,EACH;AAEA,QAAM,MAAM,UAAU,OAAO;AAC7B,QAAM,aAAa,IAAI,gBAAgB;AACvC,QAAM,YAAY,WAAW,MAAM,WAAW,MAAM,GAAG,SAAS;AAEhE,MAAI;AACF,UAAM,MAAM,MAAM,MAAM,KAAK;AAAA,MAC3B,QAAQ;AAAA,MACR,QAAQ,WAAW;AAAA,MACnB,SAAS,EAAE,QAAQ,mBAAmB;AAAA,IACxC,CAAC;AACD,iBAAa,SAAS;AACtB,WAAO;AAAA,MACL,WAAW;AAAA,MACX,SAAS,IAAI,KAAK,OAAO,QAAQ,IAAI,MAAM;AAAA,MAC3C,YAAY,IAAI;AAAA,IAClB;AAAA,EACF,SAAS,KAAK;AACZ,iBAAa,SAAS;AACtB,UAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC/D,UAAM,YAAY,eAAe,SAAS,IAAI,SAAS;AACvD,WAAO;AAAA,MACL,WAAW;AAAA,MACX,SAAS,YAAY,yBAAyB,WAAW;AAAA,IAC3D;AAAA,EACF;AACF;AAEO,IAAM,2BACX;AAEK,SAAS,sBACd,YACA,SACA,QACQ;AACR,QAAM,QAAQ;AAAA,IACZ,oCAAoC,UAAU,eAAe,OAAO;AAAA,IACpE,UAAU,IAAI,MAAM;AAAA,IACpB,IAAI,wBAAwB;AAAA,EAC9B;AACA,SAAO,MAAM,OAAO,OAAO,EAAE,KAAK,EAAE,EAAE,KAAK;AAC7C;;;ACrIA,IAAM,iBAAiB;AAEvB,IAAM,gBAAgB,oBAAI,IAAI;AAAA,EAC5B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAEM,SAAS,gBAAgB,SAA+D;AAC7F,MAAI,WAAW,QAAQ,OAAO,YAAY,UAAU;AAClD,WAAO,EAAE,WAAW,gBAAgB,SAAS,CAAC,EAAE;AAAA,EAClD;AAEA,MAAI,MAAM,QAAQ,OAAO,GAAG;AAC1B,UAAM,UAAU,QACb,OAAO,CAAC,MAAoC,KAAK,QAAQ,OAAO,MAAM,QAAQ,EAC9E,IAAI,CAAC,MAAM,MAAM,mBAAmB,EAAE,GAAG,MAAM,IAAI,KAAK,MAAM,KAAK,QAAQ,OAAO,CAAC,EAAE,CAAC,CAAC,EACvF,OAAO,CAAC,MAAsB,KAAK,IAAI;AAC1C,UAAM,YAAY,QAAQ,SAAS,IAAI,QAAQ,CAAC,EAAG,KAAK;AACxD,WAAO,EAAE,WAAW,QAAQ;AAAA,EAC9B;AAEA,QAAM,IAAI;AAEV,QAAM,cAAc,OAAO,QAAQ,CAAC,EAAE;AAAA,IACpC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,cAAc,IAAI,CAAC,KAAK,KAAK,QAAQ,OAAO,MAAM,YAAY,CAAC,MAAM,QAAQ,CAAC;AAAA,EAC7F;AACA,MAAI,YAAY,SAAS,GAAG;AAC1B,UAAM,UAAuB,CAAC;AAC9B,eAAW,CAAC,IAAI,KAAK,KAAK,aAAa;AACrC,YAAM,IAAI,iBAAiB,IAAI,KAAgC;AAC/D,UAAI,EAAG,SAAQ,KAAK,CAAC;AAAA,IACvB;AACA,UAAM,YACJ,OAAO,EAAE,YAAY,YAAY,EAAE,WAAW,YAAY,KAAK,CAAC,CAAC,CAAC,MAAM,MAAM,EAAE,OAAO,IACnF,EAAE,UACF,QAAQ,SAAS,IACf,QAAQ,CAAC,EAAG,KACZ;AACR,WAAO,EAAE,WAAW,QAAQ;AAAA,EAC9B;AAEA,MAAI,MAAM,QAAQ,EAAE,SAAS,GAAG;AAC9B,UAAM,UAAW,EAAE,UAChB,OAAO,CAAC,MAAoC,KAAK,QAAQ,OAAO,MAAM,QAAQ,EAC9E,IAAI,CAAC,MAAM,mBAAmB,CAAC,CAAC,EAChC,OAAO,CAAC,MAAsB,KAAK,IAAI;AAC1C,UAAM,YACJ,OAAO,EAAE,YAAY,YAAY,EAAE,UAC/B,EAAE,UACF,QAAQ,SAAS,IACf,QAAQ,CAAC,EAAG,KACZ;AACR,WAAO,EAAE,WAAW,QAAQ;AAAA,EAC9B;AAEA,MAAI,OAAO,EAAE,aAAa,YAAY,OAAO,EAAE,UAAU,YAAY,OAAQ,EAAwB,SAAS,UAAU;AACtH,UAAM,MAAM,wBAAwB,CAAC;AACrC,WAAO,EAAE,WAAW,IAAI,IAAI,SAAS,CAAC,GAAG,EAAE;AAAA,EAC7C;AAEA,SAAO,EAAE,WAAW,gBAAgB,SAAS,CAAC,EAAE;AAClD;AAEA,IAAM,wBAAwB,CAAC,cAAc,UAAU,aAAa,aAAa,cAAc,QAAQ,eAAe,aAAa,SAAS;AAE5I,SAAS,iBAAiB,IAAY,OAAkD;AACtF,QAAM,OAAO,MAAM;AACnB,QAAM,UACJ,OAAO,MAAM,aAAa,WACtB,MAAM,WACN,OAAO,MAAM,YAAY,WACvB,MAAM,UACN;AACR,QAAM,QAAQ,OAAO,MAAM,SAAS,WAAW,MAAM,OAAO,OAAO,MAAM,UAAU,WAAW,MAAM,QAAQ;AAC5G,QAAM,WAAW,OAAO,MAAM,aAAa,YAAY,MAAM,WAAW,MAAM,WAAW;AACzF,QAAM,SAAoB;AAAA,IACxB;AAAA,IACA,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA,aAAa,OAAO,MAAM,gBAAgB,WAAW,KAAK,cAAc,OAAO,MAAM,gBAAgB,WAAW,MAAM,cAAc;AAAA,IACpI,QAAQ,OAAO,MAAM,WAAW,WAAW,KAAK,SAAS,OAAO,MAAM,WAAW,WAAW,MAAM,SAAS;AAAA,IAC3G;AAAA,EACF;AACA,MAAI,OAAO,MAAM,SAAS,YAAY,MAAM,SAAS,QAAS,QAAO,OAAO;AAC5E,MAAI,QAAQ,OAAO,SAAS,SAAU,CAAC,OAAmC,UAAU;AACpF,aAAW,KAAK,uBAAuB;AACrC,QAAI,MAAM,CAAC,MAAM,OAAW,CAAC,OAAmC,CAAC,IAAI,MAAM,CAAC;AAAA,aACnE,QAAQ,KAAK,CAAC,MAAM,OAAW,CAAC,OAAmC,CAAC,IAAI,KAAK,CAAC;AAAA,EACzF;AACA,SAAO;AACT;AAEA,SAAS,wBAAwB,GAAuC;AACtE,QAAM,MAAiB;AAAA,IACrB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,UAAU,OAAO,EAAE,aAAa,WAAW,EAAE,WAAW;AAAA,IACxD,OAAO,OAAO,EAAE,UAAU,WAAW,EAAE,QAAS,OAAQ,EAAwB,SAAS,WAAY,EAAuB,OAAO;AAAA,IACnI,aAAa,OAAO,EAAE,gBAAgB,WAAW,EAAE,cAAc;AAAA,IACjE,QAAQ,OAAO,EAAE,WAAW,WAAW,EAAE,SAAS;AAAA,IAClD,SACE,OAAO,EAAE,YAAY,WAAW,EAAE,UAAU,OAAO,EAAE,aAAa,WAAW,EAAE,WAAW;AAAA,EAC9F;AACA,SAAO,KAAK,CAAC,EAAE,QAAQ,CAAC,MAAM;AAC5B,QAAI,CAAC,CAAC,MAAM,QAAQ,YAAY,SAAS,QAAQ,eAAe,UAAU,WAAW,YAAY,WAAW,WAAW,EAAE,SAAS,CAAC,GAAG;AACpI,MAAC,IAAgC,CAAC,IAAI,EAAE,CAAC;AAAA,IAC3C;AAAA,EACF,CAAC;AACD,SAAO;AACT;AAEA,SAAS,mBAAmB,GAA8C;AACxE,QAAM,KAAK,OAAO,EAAE,OAAO,YAAY,EAAE,KAAK,EAAE,KAAK;AACrD,QAAM,OAAO,EAAE,SAAS,UAAU,UAAU;AAC5C,QAAM,WAAW,OAAO,EAAE,aAAa,YAAY,EAAE,WAAW,EAAE,WAAW;AAC7E,QAAM,OAAO,EAAE;AACf,QAAM,SAAoB;AAAA,IACxB;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO,OAAO,EAAE,UAAU,WAAW,EAAE,QAAS,OAAO,EAAE,SAAS,WAAW,EAAE,OAAO;AAAA,IACtF,aACE,OAAO,EAAE,gBAAgB,WACrB,EAAE,cACF,OAAO,MAAM,gBAAgB,WAC3B,KAAK,cACL;AAAA,IACR,QACE,OAAO,EAAE,WAAW,WAChB,EAAE,SACF,OAAO,MAAM,WAAW,WACtB,KAAK,SACL;AAAA,IACR,SAAS,OAAO,EAAE,YAAY,WAAW,EAAE,UAAW,OAAO,EAAE,aAAa,WAAW,EAAE,WAAW;AAAA,EACtG;AACA,SAAO,KAAK,CAAC,EAAE,QAAQ,CAAC,MAAM;AAC5B,QAAI,CAAC,CAAC,MAAM,QAAQ,YAAY,SAAS,QAAQ,eAAe,UAAU,WAAW,UAAU,EAAE,SAAS,CAAC,GAAG;AAC5G,MAAC,OAAmC,CAAC,IAAI,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF,CAAC;AACD,SAAO;AACT;;;ACzJA,IAAM,+BAA+B;AAoBrC,SAAS,eAAe,KAAsB;AAC5C,MAAI;AACF,UAAM,IAAI,IAAI,IAAI,GAAG;AACrB,UAAM,OAAO,EAAE,SAAS,YAAY;AACpC,WAAO,SAAS,eAAe,SAAS,eAAe,SAAS;AAAA,EAClE,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAMA,eAAsB,4BACpB,SACA,OACsB;AACtB,QAAM,UAAU,QAAQ,QAAQ,QAAQ,OAAO,EAAE;AACjD,QAAM,SAAS,QAAQ,QAAQ,KAAK;AACpC,QAAM,YAAY,QAAQ,aAAa;AACvC,QAAM,YACJ,OAAO,MAAM,UAAU,YAAY,MAAM,MAAM,KAAK,MAAM,KACtD,MAAM,MAAM,KAAK,IACjB,QAAQ,SAAS;AAEvB,MAAI,CAAC,UAAU,CAAC,eAAe,OAAO,GAAG;AACvC,UAAM,IAAI,MAAM,qDAAqD;AAAA,EACvE;AAEA,QAAM,aAAa,IAAI,gBAAgB;AACvC,QAAM,QAAQ,WAAW,MAAM,WAAW,MAAM,GAAG,SAAS;AAC5D,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,GAAG,OAAO,eAAe;AAAA,MACpD,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,GAAI,SAAS,EAAE,eAAe,UAAU,MAAM,GAAG,IAAI,CAAC;AAAA,MACxD;AAAA,MACA,MAAM,KAAK,UAAU,EAAE,OAAO,WAAW,OAAO,MAAM,MAAM,CAAC;AAAA,MAC7D,QAAQ,WAAW;AAAA,IACrB,CAAC;AACD,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,OAAO,MAAM,SAAS,KAAK;AACjC,YAAM,IAAI,MAAM,uBAAuB,SAAS,MAAM,KAAK,KAAK,MAAM,GAAG,GAAG,CAAC,EAAE;AAAA,IACjF;AACA,UAAM,OAAQ,MAAM,SAAS,KAAK;AAIlC,UAAM,WAAW,KAAK,QAAQ,CAAC,GAC5B,MAAM,EACN,KAAK,CAAC,GAAG,OAAO,EAAE,SAAS,MAAM,EAAE,SAAS,EAAE,EAC9C,IAAI,CAAC,MAAM,EAAE,SAAS,EACtB,OAAO,CAAC,MAAqB,MAAM,QAAQ,CAAC,CAAC;AAChD,WAAO;AAAA,MACL;AAAA,MACA,YAAY,QAAQ,CAAC,GAAG;AAAA,MACxB,OAAO,KAAK,SAAS;AAAA,IACvB;AAAA,EACF,UAAE;AACA,iBAAa,KAAK;AAAA,EACpB;AACF;;;ACnFA,IAAM,0BAA0B;AA0BhC,SAASC,gBAAe,KAAsB;AAC5C,MAAI;AACF,UAAM,IAAI,IAAI,IAAI,GAAG;AACrB,UAAM,OAAO,EAAE,SAAS,YAAY;AACpC,WAAO,SAAS,eAAe,SAAS,eAAe,SAAS;AAAA,EAClE,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,SAAS,iBAAiB,SAA0B;AAClD,MAAI,OAAO,YAAY,SAAU,QAAO;AACxC,MAAI,CAAC,MAAM,QAAQ,OAAO,EAAG,QAAO,OAAO,WAAW,EAAE;AACxD,QAAM,QAAkB,CAAC;AACzB,aAAW,KAAK,SAAS;AACvB,QAAI,OAAO,MAAM,SAAU,OAAM,KAAK,CAAC;AAAA,aAC9B,KAAK,OAAO,MAAM,YAAY,OAAQ,EAAyB,SAAS;AAC/E,YAAM,KAAM,EAAuB,IAAI;AAAA,EAC3C;AACA,SAAO,MAAM,KAAK,IAAI,EAAE,KAAK;AAC/B;AAMA,eAAsB,qCACpB,SACA,SAC+B;AAC/B,QAAM,UAAU,QAAQ,QAAQ,QAAQ,OAAO,EAAE;AACjD,QAAM,SAAS,QAAQ,QAAQ,KAAK;AACpC,QAAM,YAAY,QAAQ,aAAa;AACvC,QAAM,YACJ,OAAO,QAAQ,UAAU,YAAY,QAAQ,MAAM,KAAK,MAAM,KAC1D,QAAQ,MAAM,KAAK,IACnB,QAAQ,SAAS;AAEvB,MAAI,CAAC,UAAU,CAACA,gBAAe,OAAO,GAAG;AACvC,UAAM,IAAI,MAAM,2DAA2D;AAAA,EAC7E;AAEA,QAAM,OAAgC;AAAA,IACpC,OAAO;AAAA,IACP,UAAU,QAAQ,SAAS,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,SAAS,EAAE,QAAQ,EAAE;AAAA,IAC5E,aAAa,OAAO,QAAQ,gBAAgB,WAAW,QAAQ,cAAc;AAAA,EAC/E;AACA,MAAI,OAAO,QAAQ,cAAc,SAAU,MAAK,aAAa,QAAQ;AAErE,QAAM,aAAa,IAAI,gBAAgB;AACvC,QAAM,QAAQ,WAAW,MAAM,WAAW,MAAM,GAAG,SAAS;AAC5D,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,GAAG,OAAO,qBAAqB;AAAA,MAC1D,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,GAAI,SAAS,EAAE,eAAe,UAAU,MAAM,GAAG,IAAI,CAAC;AAAA,MACxD;AAAA,MACA,MAAM,KAAK,UAAU,IAAI;AAAA,MACzB,QAAQ,WAAW;AAAA,IACrB,CAAC;AACD,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,OAAO,MAAM,SAAS,KAAK;AACjC,YAAM,IAAI,MAAM,6BAA6B,SAAS,MAAM,KAAK,KAAK,MAAM,GAAG,GAAG,CAAC,EAAE;AAAA,IACvF;AACA,UAAM,OAAQ,MAAM,SAAS,KAAK;AAIlC,UAAM,MACJ,KAAK,UAAU,CAAC,GAAG,SAAS,WAAW,KAAK,UAAU,CAAC,GAAG,QAAQ;AACpE,WAAO;AAAA,MACL,MAAM,iBAAiB,GAAG;AAAA,MAC1B,OAAO,KAAK,SAAS;AAAA,IACvB;AAAA,EACF,UAAE;AACA,iBAAa,KAAK;AAAA,EACpB;AACF;;;AClGA,IAAM,0BAA0B;AAChC,IAAM,qBAAqB;AAC3B,IAAM,mBAAmB;AA4BzB,SAAS,oBAAoB,YAAuC;AAClE,QAAM,SAAS,gBAAgB,cAAc,IAAI;AACjD,QAAM,SACJ,OAAO,QAAQ,KAAK,CAAC,MAAiB,EAAE,OAAO,OAAO,SAAS,KAC/D,OAAO,QAAQ,CAAC,KAChB;AACF,SAAO;AACT;AAEO,SAAS,eAAe,UAAiC,CAAC,GAAa;AAC5E,QAAM,UAAU,oBAAoB,QAAQ,UAAU;AACtD,QAAM,kBACJ,QAAQ,oBACR,SAAS,WACT,kBACA,QAAQ,OAAO,EAAE;AACnB,QAAM,gBACJ,QAAQ,mBACR,SAAS,UACT,QAAQ,IAAI;AACd,QAAM,wBACJ,QAAQ,mBACP,SAAS,WAAW,OAAO,QAAQ,QAAQ,mBAAmB,WAC3D,QAAQ,QAAQ,iBAChB,WACJ,SAAS,SACT,QAAQ,IAAI,0BACZ;AACF,QAAM,mBACJ,SAAS,SACT,QAAQ,IAAI,gBACZ;AACF,QAAM,qBAAqB,QAAQ;AACnC,QAAM,gBAAgB,QAAQ;AAE9B,SAAO;AAAA,IACL,MAAM,SAAS,OAAyD;AACtE,YAAM,WAA8E,CAAC;AACrF,UAAI,OAAO,MAAM,iBAAiB,YAAY,MAAM,aAAa,KAAK,MAAM,IAAI;AAC9E,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,MAAM,aAAa,KAAK,EAAE,CAAC;AAAA,MACtE;AACA,eAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,MAAM,MAAM,CAAC;AACpD,YAAM,SAAS,MAAM;AAAA,QACnB;AAAA,UACE,SAAS;AAAA,UACT,QAAQ;AAAA,UACR,OAAO;AAAA,UACP,WAAW;AAAA,QACb;AAAA,QACA;AAAA,UACE;AAAA,UACA,OAAO,MAAM;AAAA,UACb,aAAa,MAAM;AAAA,QACrB;AAAA,MACF;AACA,aAAO;AAAA,QACL,MAAM,OAAO;AAAA,QACb,OACE,OAAO,MAAM,UAAU,YAAY,MAAM,MAAM,KAAK,MAAM,KACtD,MAAM,QACN,OAAO,SAAS,SAAS;AAAA,MACjC;AAAA,IACF;AAAA,IACA,MAAM,MAAM,OAA2C;AACrD,YAAM,SAAS,MAAM;AAAA,QACnB;AAAA,UACE,SAAS;AAAA,UACT,QAAQ;AAAA,UACR,OAAO;AAAA,UACP,WAAW;AAAA,QACb;AAAA,QACA,EAAE,OAAO,MAAM,OAAO,OAAO,MAAM,MAAM;AAAA,MAC3C;AACA,aAAO;AAAA,QACL,SAAS,OAAO;AAAA,QAChB,YAAY,OAAO;AAAA,QACnB,OAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AACF;;;ACrHO,SAAS,UAA4B,WAAc,SAA0B;AAClF,aAAW,OAAO,SAAS;AACzB,QAAI,CAAC,OAAO,OAAO,QAAQ,SAAU;AACrC,eAAW,KAAK,OAAO,KAAK,GAAG,GAAkB;AAC/C,YAAM,IAAI,IAAI,CAAC;AACf,UAAI,MAAM,OAAW;AACrB,YAAM,IAAK,OAAoC,CAAC;AAChD,UACE,MAAM,QACN,OAAO,MAAM,YACb,CAAC,MAAM,QAAQ,CAAC,KAChB,MAAM,QACN,OAAO,MAAM,YACb,CAAC,MAAM,QAAQ,CAAC,GAChB;AACA,kBAAU,GAAa,CAAW;AAAA,MACpC,OAAO;AACL,QAAC,OAAoC,CAAC,IAAI;AAAA,MAC5C;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;","names":["path","path","url","isLocalBaseUrl"]}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI-compatible chat completion API: POST /chat/completions.
|
|
3
|
+
* No LangChain dependency; for use by any module that needs LLM text generation.
|
|
4
|
+
*/
|
|
5
|
+
export interface ChatCompletionOptions {
|
|
6
|
+
baseURL: string;
|
|
7
|
+
apiKey?: string;
|
|
8
|
+
model?: string;
|
|
9
|
+
timeoutMs?: number;
|
|
10
|
+
}
|
|
11
|
+
export interface ChatCompletionMessage {
|
|
12
|
+
role: "system" | "user" | "assistant";
|
|
13
|
+
content: string;
|
|
14
|
+
}
|
|
15
|
+
export interface ChatCompletionRequest {
|
|
16
|
+
messages: ChatCompletionMessage[];
|
|
17
|
+
model?: string;
|
|
18
|
+
temperature?: number;
|
|
19
|
+
maxTokens?: number;
|
|
20
|
+
}
|
|
21
|
+
export interface ChatCompletionResult {
|
|
22
|
+
text: string;
|
|
23
|
+
model?: string;
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Call OpenAI-compatible /chat/completions endpoint.
|
|
27
|
+
* baseURL should be the API root (e.g. https://api.openai.com/v1); trailing slash is stripped.
|
|
28
|
+
*/
|
|
29
|
+
export declare function chatCompletionViaOpenAICompatibleApi(options: ChatCompletionOptions, request: ChatCompletionRequest): Promise<ChatCompletionResult>;
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI-compatible embedding API: POST /embeddings.
|
|
3
|
+
* No LangChain dependency; for use by any module that needs embeddings.
|
|
4
|
+
*/
|
|
5
|
+
export interface EmbeddingOptions {
|
|
6
|
+
baseURL: string;
|
|
7
|
+
apiKey?: string;
|
|
8
|
+
model?: string;
|
|
9
|
+
timeoutMs?: number;
|
|
10
|
+
}
|
|
11
|
+
export interface EmbedRequest {
|
|
12
|
+
input: string | string[];
|
|
13
|
+
model?: string;
|
|
14
|
+
}
|
|
15
|
+
export interface EmbedResult {
|
|
16
|
+
vectors: number[][];
|
|
17
|
+
dimensions?: number;
|
|
18
|
+
model?: string;
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Call OpenAI-compatible /embeddings endpoint.
|
|
22
|
+
* baseURL should be the API root (e.g. https://api.openai.com/v1); trailing slash is stripped.
|
|
23
|
+
*/
|
|
24
|
+
export declare function embedViaOpenAICompatibleApi(options: EmbeddingOptions, input: EmbedRequest): Promise<EmbedResult>;
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Model hub: generate (chat completion) + embed, driven by llm section config.
|
|
3
|
+
* No LangChain; uses OpenAI-compatible HTTP APIs so any package can use it via agent-common.
|
|
4
|
+
*/
|
|
5
|
+
import type { EmbedRequest, EmbedResult } from "../embedding/index.js";
|
|
6
|
+
export interface CreateModelHubOptions {
|
|
7
|
+
llmSection?: unknown;
|
|
8
|
+
embeddingBaseURL?: string;
|
|
9
|
+
embeddingApiKey?: string;
|
|
10
|
+
embeddingModel?: string;
|
|
11
|
+
embeddingTimeoutMs?: number;
|
|
12
|
+
chatTimeoutMs?: number;
|
|
13
|
+
}
|
|
14
|
+
export interface ChatGenerateRequest {
|
|
15
|
+
input: string;
|
|
16
|
+
systemPrompt?: string;
|
|
17
|
+
model?: string;
|
|
18
|
+
temperature?: number;
|
|
19
|
+
}
|
|
20
|
+
export interface ChatGenerateResult {
|
|
21
|
+
text: string;
|
|
22
|
+
model?: string;
|
|
23
|
+
}
|
|
24
|
+
export interface ModelHub {
|
|
25
|
+
generate(input: ChatGenerateRequest): Promise<ChatGenerateResult>;
|
|
26
|
+
embed(input: EmbedRequest): Promise<EmbedResult>;
|
|
27
|
+
}
|
|
28
|
+
export declare function createModelHub(options?: CreateModelHubOptions): ModelHub;
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Model-related APIs: LLM config, embedding, chat completion, model hub.
|
|
3
|
+
* All OpenAI-compatible, no LangChain; any package can use via agent-common.
|
|
4
|
+
*/
|
|
5
|
+
export * from "./llm/index.js";
|
|
6
|
+
export * from "./embedding/index.js";
|
|
7
|
+
export * from "./chat/index.js";
|
|
8
|
+
export { createModelHub, type CreateModelHubOptions, type ModelHub, type ChatGenerateRequest, type ChatGenerateResult, } from "./hub/index.js";
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Parse agent.yaml llm section into normalized LLMConfig[] and default id.
|
|
3
|
+
* Supports: flat (each model keyed by name), instances[], or single object.
|
|
4
|
+
*/
|
|
5
|
+
import type { LLMConfig } from "./types.js";
|
|
6
|
+
export declare function parseLlmSection(section: unknown): {
|
|
7
|
+
defaultId: string;
|
|
8
|
+
configs: LLMConfig[];
|
|
9
|
+
};
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* LLM config types (no LangChain dependency).
|
|
3
|
+
* Used by any module that needs to read/parse llm section (e.g. agent-llm, agent-memory).
|
|
4
|
+
*/
|
|
5
|
+
export type LLMType = "chat" | "image";
|
|
6
|
+
export interface LLMConfig {
|
|
7
|
+
id: string;
|
|
8
|
+
type: LLMType;
|
|
9
|
+
provider: string;
|
|
10
|
+
model?: string;
|
|
11
|
+
temperature?: number;
|
|
12
|
+
apiKey?: string;
|
|
13
|
+
baseURL?: string;
|
|
14
|
+
options?: Record<string, unknown>;
|
|
15
|
+
[key: string]: unknown;
|
|
16
|
+
}
|
|
17
|
+
export interface AgentConfigLlmSection {
|
|
18
|
+
default?: string;
|
|
19
|
+
type?: string | string[];
|
|
20
|
+
instances?: LLMConfig[];
|
|
21
|
+
provider?: string;
|
|
22
|
+
model?: string;
|
|
23
|
+
name?: string;
|
|
24
|
+
temperature?: number;
|
|
25
|
+
apiKey?: string;
|
|
26
|
+
baseURL?: string;
|
|
27
|
+
base_url?: string;
|
|
28
|
+
options?: Record<string, unknown>;
|
|
29
|
+
[key: string]: unknown;
|
|
30
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { deepMerge } from "./deep-merge.js";
|