@easynet/agent-llm 1.0.4 → 1.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -15,14 +15,14 @@ Config is read from **llm.yaml** or **config/llm.yaml** in the current directory
15
15
 
16
16
  ## Use in a LangChain agent
17
17
 
18
- **1.** Get the LLM from config with `createAgentLlM()`:
18
+ **1.** Get the LLM from config with `createAgentLlM()` (async; resolves npm: providers):
19
19
 
20
20
  ```ts
21
21
  import { createAgentLlM } from "@easynet/agent-llm";
22
22
  import { createAgent, tool } from "langchain";
23
23
  import { z } from "zod";
24
24
 
25
- const llm = createAgentLlM();
25
+ const llm = await createAgentLlM();
26
26
  ```
27
27
 
28
28
  **2.** Create your agent with LangChain’s `createAgent` and your tools:
@@ -59,7 +59,7 @@ apiKey: ${OPENAI_API_KEY}
59
59
  # baseURL: https://api.openai.com/v1 # or Ollama, Groq, etc.
60
60
  ```
61
61
 
62
- Optional: pass a path when calling `createAgentLlM({ configPath: "/path/to/llm.yaml" })`.
62
+ Optional: pass a path `createAgentLlM("path/to/llm.yaml")` or `createAgentLlM({ configPath: "/path/to/llm.yaml" })`.
63
63
 
64
64
  ## npm: protocol in provider (install on demand)
65
65
 
@@ -72,16 +72,17 @@ You can set the provider by **npm package name** (and optional version) in confi
72
72
  - **`provider: "npm:wallee-llm#cis"`** – load wallee-llm and use the provider named `cis`.
73
73
  - **`provider: "npm:wallee-llm"`** – load wallee-llm and use its default provider (e.g. `cis`).
74
74
 
75
- Use **createChatModelFromLlmConfigWithNpm** or **createAgentLlMAsync** so npm: providers are resolved (and optionally installed) before creating the model:
75
+ **createAgentLlM()** resolves npm: providers (and optionally installs packages) when reading config. For a raw llm section use **createChatModelFromLlmConfigWithNpm**:
76
76
 
77
77
  ```ts
78
- import { createChatModelFromLlmConfigWithNpm, createAgentLlMAsync } from "@easynet/agent-llm";
78
+ import { createChatModelFromLlmConfigWithNpm, createAgentLlM } from "@easynet/agent-llm";
79
79
 
80
80
  // From a raw llm section (e.g. from loadLlmConfig)
81
81
  const model = await createChatModelFromLlmConfigWithNpm({ llmSection });
82
82
 
83
83
  // From config file (llm.yaml / config/llm.yaml)
84
- const llm = await createAgentLlMAsync();
84
+ const llm = await createAgentLlM();
85
+ // Or with a path: createAgentLlM("path/to/llm.yaml")
85
86
  ```
86
87
 
87
- Options: **installNpmIfMissing** (default `true`) and **cwd** (default `process.cwd()` for npm install). Exports: `parseNpmProviderSpec`, `ensureNpmPackageInstalled`, `resolveNpmProvider`, `resolveLlmSectionWithNpm`, `isNpmProviderSpec`, `createChatModelFromLlmConfigWithNpm`, `createAgentLlMAsync`.
88
+ Options: **installNpmIfMissing** (default `true`) and **cwd** (default `process.cwd()` for npm install). Exports: `parseNpmProviderSpec`, `ensureNpmPackageInstalled`, `resolveNpmProvider`, `resolveLlmSectionWithNpm`, `isNpmProviderSpec`, `createChatModelFromLlmConfigWithNpm`, `createAgentLlM`.
@@ -133,13 +133,23 @@ function parseLlmYaml(content, options = {}) {
133
133
  return doSub ? substituteEnv(llm) : llm;
134
134
  }
135
135
  function loadLlmConfig(filePath, options = {}) {
136
+ if (typeof filePath !== "string" || filePath.trim().length === 0) {
137
+ throw new Error("agent-llm: loadLlmConfig requires a non-empty file path");
138
+ }
136
139
  if (!existsSync(filePath)) return null;
140
+ let raw;
141
+ try {
142
+ raw = readFileSync(filePath, "utf8");
143
+ } catch (e) {
144
+ const msg = e instanceof Error ? e.message : String(e);
145
+ throw new Error(`agent-llm: failed to read config file ${filePath}: ${msg}`, { cause: e });
146
+ }
137
147
  try {
138
- const raw = readFileSync(filePath, "utf8");
139
148
  const llm = parseLlmYaml(raw, options);
140
149
  return llm ?? null;
141
- } catch {
142
- return null;
150
+ } catch (e) {
151
+ const msg = e instanceof Error ? e.message : String(e);
152
+ throw new Error(`agent-llm: failed to parse config file ${filePath}: ${msg}`, { cause: e });
143
153
  }
144
154
  }
145
155
 
@@ -311,18 +321,34 @@ function getChatModelFactory(providerName) {
311
321
  // src/llmAdapter.ts
312
322
  import { ChatOpenAI } from "@langchain/openai";
313
323
  var DEFAULT_MODEL = "gpt-4o-mini";
324
+ function normalizeAgentLlMError(e, context) {
325
+ if (e instanceof Error) return new Error(`${context}: ${e.message}`, { cause: e });
326
+ return new Error(`${context}: ${String(e)}`);
327
+ }
314
328
  function createChatModelFromLlmConfig(options) {
315
329
  const { llmSection, modelEnv, apiKeyEnv } = options;
316
- const { defaultId, configs } = parseLlmSection(llmSection ?? null);
330
+ let defaultId;
331
+ let configs;
332
+ try {
333
+ const parsed = parseLlmSection(llmSection ?? null);
334
+ defaultId = parsed.defaultId;
335
+ configs = parsed.configs;
336
+ } catch (e) {
337
+ throw normalizeAgentLlMError(e, "agent-llm: failed to parse llm section");
338
+ }
317
339
  const defaultConfig = configs.find((c) => c.id === defaultId) ?? configs[0];
318
340
  if (!defaultConfig) {
319
341
  const model2 = modelEnv ?? process.env.OPENAI_MODEL ?? DEFAULT_MODEL;
320
342
  const apiKey2 = apiKeyEnv ?? process.env.OPENAI_API_KEY;
321
- return new ChatOpenAI({
322
- model: model2,
323
- temperature: 0,
324
- ...apiKey2 ? { apiKey: apiKey2 } : {}
325
- });
343
+ try {
344
+ return new ChatOpenAI({
345
+ model: model2,
346
+ temperature: 0,
347
+ ...apiKey2 ? { apiKey: apiKey2 } : {}
348
+ });
349
+ } catch (e) {
350
+ throw normalizeAgentLlMError(e, "agent-llm: failed to create default ChatOpenAI");
351
+ }
326
352
  }
327
353
  const provider = defaultConfig.provider ?? "openai";
328
354
  const chatModelFactory = getChatModelFactory(provider);
@@ -332,7 +358,11 @@ function createChatModelFromLlmConfig(options) {
332
358
  model: modelEnv ?? defaultConfig.model,
333
359
  temperature: typeof defaultConfig.temperature === "number" ? defaultConfig.temperature : 0
334
360
  };
335
- return chatModelFactory(config);
361
+ try {
362
+ return chatModelFactory(config);
363
+ } catch (e) {
364
+ throw normalizeAgentLlMError(e, `agent-llm: failed to create ChatModel for provider "${provider}"`);
365
+ }
336
366
  }
337
367
  const model = modelEnv ?? defaultConfig?.model ?? process.env.OPENAI_MODEL ?? DEFAULT_MODEL;
338
368
  let apiKey = apiKeyEnv ?? defaultConfig?.apiKey ?? process.env.OPENAI_API_KEY;
@@ -350,24 +380,100 @@ function createChatModelFromLlmConfig(options) {
350
380
  ...apiKey ? { apiKey } : {},
351
381
  ...baseURL ? { configuration: { baseURL } } : {}
352
382
  };
353
- return new ChatOpenAI(constructorOptions);
383
+ try {
384
+ return new ChatOpenAI(constructorOptions);
385
+ } catch (e) {
386
+ throw normalizeAgentLlMError(e, "agent-llm: failed to create ChatOpenAI from config");
387
+ }
354
388
  }
355
389
 
356
390
  // src/npmProviderProtocol.ts
357
391
  import { execSync } from "child_process";
358
392
 
359
393
  // src/loadLLMExtensions.ts
394
+ import { readdirSync, readFileSync as readFileSync2, existsSync as existsSync2 } from "fs";
395
+ import { join } from "path";
360
396
  var loadedPackages = /* @__PURE__ */ new Set();
361
- var DEFAULT_EXTENSIONS = ["wallee-llm"];
397
+ function isLLMExtension(m) {
398
+ if (m == null || typeof m !== "object") return false;
399
+ const e = m;
400
+ if (typeof e.providerName !== "string") return false;
401
+ const hasRegister = typeof e.register === "function";
402
+ const hasFactories = typeof e.createClient === "function" && typeof e.createChatModel === "function";
403
+ return hasRegister || hasFactories;
404
+ }
405
+ function registerExtension(ext) {
406
+ if (typeof ext.createClient === "function" && typeof ext.createChatModel === "function") {
407
+ registerProvider(ext.providerName, ext.createClient);
408
+ registerChatModelProvider(ext.providerName, ext.createChatModel);
409
+ return;
410
+ }
411
+ if (typeof ext.register === "function") {
412
+ ext.register();
413
+ }
414
+ }
415
+ function getExtensionFromModule(m) {
416
+ if (isLLMExtension(m)) return m;
417
+ if (typeof m.getLLMExtension === "function") {
418
+ const ext = m.getLLMExtension();
419
+ return isLLMExtension(ext) ? ext : null;
420
+ }
421
+ if (isLLMExtension(m.default)) return m.default;
422
+ return null;
423
+ }
362
424
  function resolveLLMExtensionPackages(types) {
363
- const typeList = types == null ? [] : Array.isArray(types) ? types : [types];
364
- const packages = typeList.filter(
365
- (t) => typeof t === "string" && t.length > 0
425
+ if (types == null) return [];
426
+ const typeList = Array.isArray(types) ? types : [types];
427
+ return typeList.filter(
428
+ (t) => typeof t === "string" && String(t).trim().length > 0
366
429
  );
367
- return packages.length > 0 ? packages : DEFAULT_EXTENSIONS;
430
+ }
431
+ function readPackageProviderName(pkgPath) {
432
+ if (!existsSync2(pkgPath)) return null;
433
+ try {
434
+ const raw = readFileSync2(pkgPath, "utf-8");
435
+ const pkg = JSON.parse(raw);
436
+ const declared = pkg.agentLlmProvider === true || Array.isArray(pkg.keywords) && pkg.keywords.includes("agent-llm-provider");
437
+ return declared && typeof pkg.name === "string" ? pkg.name : null;
438
+ } catch {
439
+ return null;
440
+ }
441
+ }
442
+ function discoverLLMExtensions(cwd = process.cwd()) {
443
+ const dir = typeof cwd === "string" && cwd.trim().length > 0 ? cwd : process.cwd();
444
+ const nodeModules = join(dir, "node_modules");
445
+ if (!existsSync2(nodeModules)) return [];
446
+ const names = [];
447
+ const seen = /* @__PURE__ */ new Set();
448
+ try {
449
+ const entries = readdirSync(nodeModules, { withFileTypes: true });
450
+ for (const e of entries) {
451
+ if (e.name.startsWith(".") || e.name === "node") continue;
452
+ if (e.name.startsWith("@") && e.isDirectory()) {
453
+ const scopePath = join(nodeModules, e.name);
454
+ const scopeEntries = readdirSync(scopePath, { withFileTypes: true });
455
+ for (const se of scopeEntries) {
456
+ if (!se.isDirectory()) continue;
457
+ const name2 = readPackageProviderName(join(scopePath, se.name, "package.json"));
458
+ if (name2 && !seen.has(name2)) {
459
+ seen.add(name2);
460
+ names.push(name2);
461
+ }
462
+ }
463
+ continue;
464
+ }
465
+ const name = readPackageProviderName(join(nodeModules, e.name, "package.json"));
466
+ if (name && !seen.has(name)) {
467
+ seen.add(name);
468
+ names.push(name);
469
+ }
470
+ }
471
+ } catch {
472
+ }
473
+ return names;
368
474
  }
369
475
  async function loadLLMExtensions(extensionPackages) {
370
- const packages = extensionPackages ?? DEFAULT_EXTENSIONS;
476
+ const packages = Array.isArray(extensionPackages) ? extensionPackages.filter((p) => typeof p === "string" && String(p).trim().length > 0) : [];
371
477
  for (const pkg of packages) {
372
478
  if (loadedPackages.has(pkg)) continue;
373
479
  loadedPackages.add(pkg);
@@ -376,18 +482,33 @@ async function loadLLMExtensions(extensionPackages) {
376
482
  /* @vite-ignore */
377
483
  pkg
378
484
  );
485
+ const ext = getExtensionFromModule(m);
486
+ if (ext) {
487
+ registerExtension(ext);
488
+ continue;
489
+ }
379
490
  if (typeof m.registerLLMExtension === "function") {
380
491
  m.registerLLMExtension();
381
492
  }
382
- } catch {
493
+ } catch (e) {
494
+ const msg = e instanceof Error ? e.message : String(e);
495
+ if (typeof process !== "undefined" && process.emitWarning) {
496
+ process.emitWarning(`[agent-llm] Failed to load extension "${pkg}": ${msg}`, { code: "AGENT_LLM_EXTENSION_LOAD" });
497
+ }
383
498
  }
384
499
  }
385
500
  }
501
+ async function loadDiscoveredExtensions(cwd = process.cwd()) {
502
+ const dir = typeof cwd === "string" && cwd.trim().length > 0 ? cwd : process.cwd();
503
+ const names = discoverLLMExtensions(dir);
504
+ await loadLLMExtensions(names);
505
+ return names;
506
+ }
386
507
 
387
508
  // src/npmProviderProtocol.ts
388
509
  var NPM_PROTOCOL_PREFIX = "npm:";
389
510
  function parseNpmProviderSpec(spec) {
390
- if (!isNpmProviderSpec(spec)) return null;
511
+ if (typeof spec !== "string" || !spec.startsWith(NPM_PROTOCOL_PREFIX)) return null;
391
512
  const rest = spec.slice(NPM_PROTOCOL_PREFIX.length);
392
513
  const hashIdx = rest.indexOf("#");
393
514
  const beforeHash = hashIdx >= 0 ? rest.slice(0, hashIdx).trim() : rest.trim();
@@ -406,6 +527,9 @@ function isModuleNotFoundError(err) {
406
527
  return msg.includes("Cannot find module") || msg.includes("Cannot find package") || msg.includes("MODULE_NOT_FOUND") || msg.includes("ERR_MODULE_NOT_FOUND");
407
528
  }
408
529
  async function ensureNpmPackageInstalled(packageName, options = {}) {
530
+ if (typeof packageName !== "string" || packageName.trim().length === 0) {
531
+ throw new Error("agent-llm: ensureNpmPackageInstalled requires a non-empty package name");
532
+ }
409
533
  const cwd = options.cwd ?? process.cwd();
410
534
  const version = options.version;
411
535
  try {
@@ -418,13 +542,25 @@ async function ensureNpmPackageInstalled(packageName, options = {}) {
418
542
  if (!isModuleNotFoundError(err)) throw err;
419
543
  }
420
544
  const installSpec = version ? `${packageName}@${version}` : packageName;
421
- execSync(`npm install ${installSpec}`, {
422
- cwd,
423
- stdio: "inherit",
424
- encoding: "utf-8"
425
- });
545
+ try {
546
+ execSync(`npm install ${installSpec}`, {
547
+ cwd,
548
+ stdio: "inherit",
549
+ encoding: "utf-8"
550
+ });
551
+ } catch (e) {
552
+ const msg = e instanceof Error ? e.message : String(e);
553
+ throw new Error(`agent-llm: npm install failed for ${installSpec}: ${msg}`, { cause: e });
554
+ }
555
+ }
556
+ function normalizeAgentLlMError2(e, context) {
557
+ if (e instanceof Error) {
558
+ return new Error(`${context}: ${e.message}`, { cause: e });
559
+ }
560
+ return new Error(`${context}: ${String(e)}`);
426
561
  }
427
562
  async function resolveNpmProvider(spec, options = {}) {
563
+ if (typeof spec !== "string" || spec.trim().length === 0) return null;
428
564
  const parsed = parseNpmProviderSpec(spec);
429
565
  if (!parsed) return null;
430
566
  const { packageName, version, provider: fragmentProvider } = parsed;
@@ -437,10 +573,14 @@ async function resolveNpmProvider(spec, options = {}) {
437
573
  await load();
438
574
  } catch (err) {
439
575
  if (installNpmIfMissing && isModuleNotFoundError(err)) {
440
- await ensureNpmPackageInstalled(packageName, { version, cwd });
441
- await load();
576
+ try {
577
+ await ensureNpmPackageInstalled(packageName, { version, cwd });
578
+ await load();
579
+ } catch (installErr) {
580
+ throw normalizeAgentLlMError2(installErr, `agent-llm: failed to install or load npm provider "${packageName}"`);
581
+ }
442
582
  } else {
443
- throw err;
583
+ throw normalizeAgentLlMError2(err, `agent-llm: failed to load npm provider "${packageName}"`);
444
584
  }
445
585
  }
446
586
  if (fragmentProvider) return fragmentProvider;
@@ -462,8 +602,12 @@ async function resolveLlmSectionWithNpm(llmSection, options = {}) {
462
602
  if (llmSection == null) return llmSection;
463
603
  if (Array.isArray(llmSection)) {
464
604
  const out = [];
465
- for (const item of llmSection) {
466
- out.push(await resolveLlmSectionWithNpm(item, options));
605
+ for (let i = 0; i < llmSection.length; i++) {
606
+ try {
607
+ out.push(await resolveLlmSectionWithNpm(llmSection[i], options));
608
+ } catch (e) {
609
+ throw normalizeAgentLlMError2(e, `agent-llm: failed to resolve llm section at index ${i}`);
610
+ }
467
611
  }
468
612
  return out;
469
613
  }
@@ -471,60 +615,82 @@ async function resolveLlmSectionWithNpm(llmSection, options = {}) {
471
615
  const out = {};
472
616
  for (const [k, v] of Object.entries(llmSection)) {
473
617
  if (k === "provider" && isNpmProviderSpec(v)) {
474
- const resolved = await resolveNpmProvider(v, options);
475
- out[k] = resolved ?? v;
618
+ try {
619
+ const resolved = await resolveNpmProvider(v, options);
620
+ out[k] = resolved ?? v;
621
+ } catch (e) {
622
+ throw normalizeAgentLlMError2(e, `agent-llm: failed to resolve provider "${String(v)}"`);
623
+ }
476
624
  continue;
477
625
  }
478
- out[k] = await resolveLlmSectionWithNpm(v, options);
626
+ try {
627
+ out[k] = await resolveLlmSectionWithNpm(v, options);
628
+ } catch (e) {
629
+ throw normalizeAgentLlMError2(e, `agent-llm: failed to resolve llm section key "${k}"`);
630
+ }
479
631
  }
480
632
  return out;
481
633
  }
482
634
  return llmSection;
483
635
  }
484
636
  async function createChatModelFromLlmConfigWithNpm(options) {
485
- const { installNpmIfMissing, cwd, ...rest } = options;
486
- const resolvedSection = await resolveLlmSectionWithNpm(options.llmSection ?? null, {
487
- installNpmIfMissing,
488
- cwd
489
- });
490
- return createChatModelFromLlmConfig({
491
- ...rest,
492
- llmSection: resolvedSection
493
- });
637
+ try {
638
+ const { installNpmIfMissing, cwd, ...rest } = options;
639
+ const resolvedSection = await resolveLlmSectionWithNpm(options.llmSection ?? null, {
640
+ installNpmIfMissing,
641
+ cwd
642
+ });
643
+ return createChatModelFromLlmConfig({
644
+ ...rest,
645
+ llmSection: resolvedSection
646
+ });
647
+ } catch (e) {
648
+ throw normalizeAgentLlMError2(e, "agent-llm: createChatModelFromLlmConfigWithNpm failed");
649
+ }
494
650
  }
495
651
 
496
652
  // src/createAgentLlM.ts
497
- import { join } from "path";
498
- import { existsSync as existsSync2 } from "fs";
653
+ import { join as join2 } from "path";
654
+ import { existsSync as existsSync3 } from "fs";
499
655
  function resolveDefaultConfigPath() {
500
656
  const cwd = process.cwd();
501
- if (existsSync2(join(cwd, "llm.yaml"))) return join(cwd, "llm.yaml");
502
- if (existsSync2(join(cwd, "config", "llm.yaml"))) return join(cwd, "config", "llm.yaml");
503
- const parentConfig = join(cwd, "..", "config", "llm.yaml");
504
- if (existsSync2(parentConfig)) return parentConfig;
505
- return join(cwd, "config", "llm.yaml");
506
- }
507
- function createAgentLlM(options = {}) {
508
- const configPath = options.configPath ?? resolveDefaultConfigPath();
509
- const llmSection = loadLlmConfig(configPath);
510
- if (llmSection == null) {
511
- throw new Error(`No LLM config at ${configPath}. Add llm.yaml or config/llm.yaml, or pass configPath.`);
512
- }
513
- return createChatModelFromLlmConfig({ llmSection });
514
- }
515
- async function createAgentLlMAsync(options = {}) {
516
- const configPath = options.configPath ?? resolveDefaultConfigPath();
517
- const llmSection = loadLlmConfig(configPath);
518
- if (llmSection == null) {
519
- throw new Error(`No LLM config at ${configPath}. Add llm.yaml or config/llm.yaml, or pass configPath.`);
520
- }
521
- return createChatModelFromLlmConfigWithNpm({
522
- llmSection,
523
- installNpmIfMissing: options.installNpmIfMissing !== false,
524
- cwd: process.cwd()
525
- });
657
+ if (existsSync3(join2(cwd, "llm.yaml"))) return join2(cwd, "llm.yaml");
658
+ if (existsSync3(join2(cwd, "config", "llm.yaml"))) return join2(cwd, "config", "llm.yaml");
659
+ const parentConfig = join2(cwd, "..", "config", "llm.yaml");
660
+ if (existsSync3(parentConfig)) return parentConfig;
661
+ return join2(cwd, "config", "llm.yaml");
662
+ }
663
+ function normalizeCreateOptions(configPathOrOptions) {
664
+ if (configPathOrOptions == null) return {};
665
+ if (typeof configPathOrOptions === "string") return { configPath: configPathOrOptions };
666
+ return configPathOrOptions;
667
+ }
668
+ function normalizeAgentLlMError3(e, context) {
669
+ if (e instanceof Error) return new Error(`${context}: ${e.message}`, { cause: e });
670
+ return new Error(`${context}: ${String(e)}`);
671
+ }
672
+ async function createAgentLlM(configPathOrOptions) {
673
+ try {
674
+ const options = normalizeCreateOptions(configPathOrOptions);
675
+ const configPath = options.configPath ?? resolveDefaultConfigPath();
676
+ const llmSection = loadLlmConfig(configPath);
677
+ if (llmSection == null) {
678
+ throw new Error(`No LLM config at ${configPath}. Add llm.yaml or config/llm.yaml, or pass configPath.`);
679
+ }
680
+ return await createChatModelFromLlmConfigWithNpm({
681
+ llmSection,
682
+ installNpmIfMissing: options.installNpmIfMissing !== false,
683
+ cwd: process.cwd()
684
+ });
685
+ } catch (e) {
686
+ if (e instanceof Error && e.message.includes("No LLM config")) throw e;
687
+ throw normalizeAgentLlMError3(e, "agent-llm: createAgentLlM failed");
688
+ }
526
689
  }
527
690
 
691
+ // src/types.ts
692
+ var AGENT_LLM_PROVIDER_FIELD = "agentLlmProvider";
693
+
528
694
  export {
529
695
  parseLlmSection,
530
696
  substituteEnv,
@@ -540,7 +706,9 @@ export {
540
706
  getChatModelFactory,
541
707
  createChatModelFromLlmConfig,
542
708
  resolveLLMExtensionPackages,
709
+ discoverLLMExtensions,
543
710
  loadLLMExtensions,
711
+ loadDiscoveredExtensions,
544
712
  NPM_PROTOCOL_PREFIX,
545
713
  parseNpmProviderSpec,
546
714
  isNpmProviderSpec,
@@ -549,6 +717,6 @@ export {
549
717
  resolveLlmSectionWithNpm,
550
718
  createChatModelFromLlmConfigWithNpm,
551
719
  createAgentLlM,
552
- createAgentLlMAsync
720
+ AGENT_LLM_PROVIDER_FIELD
553
721
  };
554
- //# sourceMappingURL=chunk-UZOGGJK7.js.map
722
+ //# sourceMappingURL=chunk-QKN7FTFV.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/config.ts","../src/loadLlmConfig.ts","../src/providers/openai.ts","../src/providers/index.ts","../src/factory.ts","../src/chatModelRegistry.ts","../src/llmAdapter.ts","../src/npmProviderProtocol.ts","../src/loadLLMExtensions.ts","../src/createAgentLlM.ts","../src/types.ts"],"sourcesContent":["/**\n * Parse agent.yaml llm section into normalized LLMConfig[] and default id.\n * Supports: flat (each model keyed by name), instances[], or single object.\n */\n\nimport type { LLMConfig } from \"./types.js\";\n\nconst DEFAULT_LLM_ID = \"default\";\n\nconst RESERVED_KEYS = new Set([\n \"default\",\n \"instances\",\n \"catalog\",\n \"provider\",\n \"model\",\n \"temperature\",\n \"apiKey\",\n \"baseURL\",\n \"base_url\",\n \"type\",\n \"id\",\n]);\n\n/**\n * Parse llm section: flat (each model keyed by name), default+instances, or single object.\n */\nexport function parseLlmSection(section: unknown): { defaultId: string; configs: LLMConfig[] } {\n if (section == null || typeof section !== \"object\") {\n return { defaultId: DEFAULT_LLM_ID, configs: [] };\n }\n\n if (Array.isArray(section)) {\n const configs = section\n .filter((i): i is Record<string, unknown> => i != null && typeof i === \"object\")\n .map((item, i) => normalizeLlmConfig({ ...item, id: item.id ?? item.name ?? String(i) }))\n .filter((c): c is LLMConfig => c != null);\n const defaultId = configs.length > 0 ? configs[0]!.id : DEFAULT_LLM_ID;\n return { defaultId, configs };\n }\n\n const s = section as Record<string, unknown>;\n\n const flatEntries = Object.entries(s).filter(\n ([k, v]) => !RESERVED_KEYS.has(k) && v != null && typeof v === \"object\" && !Array.isArray(v)\n );\n if (flatEntries.length > 0) {\n const configs: LLMConfig[] = [];\n for (const [id, entry] of flatEntries) {\n const c = entryToLlmConfig(id, entry as Record<string, unknown>);\n if (c) configs.push(c);\n }\n const defaultId =\n typeof s.default === \"string\" && s.default && flatEntries.some(([k]) => k === s.default)\n ? s.default\n : configs.length > 0\n ? configs[0]!.id\n : DEFAULT_LLM_ID;\n return { defaultId, configs };\n }\n\n if (Array.isArray(s.instances)) {\n const configs = (s.instances as unknown[])\n .filter((i): i is Record<string, unknown> => i != null && typeof i === \"object\")\n .map((i) => normalizeLlmConfig(i))\n .filter((c): c is LLMConfig => c != null);\n const defaultId =\n typeof s.default === \"string\" && s.default\n ? s.default\n : configs.length > 0\n ? configs[0]!.id\n : DEFAULT_LLM_ID;\n return { defaultId, configs };\n }\n\n if (typeof s.provider === \"string\" || typeof s.model === \"string\" || typeof (s as { name?: string }).name === \"string\") {\n const one = singleObjectToLlmConfig(s);\n return { defaultId: one.id, configs: [one] };\n }\n\n return { defaultId: DEFAULT_LLM_ID, configs: [] };\n}\n\nconst EXTENSION_OPTION_KEYS = [\"featureKey\", \"tenant\", \"authToken\", \"verifySSL\", \"bypassAuth\", \"host\", \"resolveHost\", \"timeoutMs\", \"options\"];\n\nfunction entryToLlmConfig(id: string, entry: Record<string, unknown>): LLMConfig | null {\n const opts = entry.options as Record<string, unknown> | undefined;\n const baseURL =\n typeof entry.base_url === \"string\"\n ? entry.base_url\n : typeof entry.baseURL === \"string\"\n ? entry.baseURL\n : undefined;\n const model = typeof entry.name === \"string\" ? entry.name : typeof entry.model === \"string\" ? entry.model : undefined;\n const provider = typeof entry.provider === \"string\" && entry.provider ? entry.provider : \"openai\";\n const config: LLMConfig = {\n id,\n type: \"chat\",\n provider,\n model,\n temperature: typeof opts?.temperature === \"number\" ? opts.temperature : typeof entry.temperature === \"number\" ? entry.temperature : undefined,\n apiKey: typeof opts?.apiKey === \"string\" ? opts.apiKey : typeof entry.apiKey === \"string\" ? entry.apiKey : undefined,\n baseURL,\n };\n if (typeof entry.type === \"string\" && entry.type === \"image\") config.type = \"image\";\n if (opts && typeof opts === \"object\") (config as Record<string, unknown>).options = opts;\n for (const k of EXTENSION_OPTION_KEYS) {\n if (entry[k] !== undefined) (config as Record<string, unknown>)[k] = entry[k];\n else if (opts && opts[k] !== undefined) (config as Record<string, unknown>)[k] = opts[k];\n }\n return config;\n}\n\nfunction singleObjectToLlmConfig(s: Record<string, unknown>): LLMConfig {\n const one: LLMConfig = {\n id: DEFAULT_LLM_ID,\n type: \"chat\",\n provider: typeof s.provider === \"string\" ? s.provider : \"openai\",\n model: typeof s.model === \"string\" ? s.model : (typeof (s as { name?: string }).name === \"string\" ? (s as { name: string }).name : undefined),\n temperature: typeof s.temperature === \"number\" ? s.temperature : undefined,\n apiKey: typeof s.apiKey === \"string\" ? s.apiKey : undefined,\n baseURL:\n typeof s.baseURL === \"string\" ? s.baseURL : typeof s.base_url === \"string\" ? s.base_url : undefined,\n };\n Object.keys(s).forEach((k) => {\n if (![\"id\", \"type\", \"provider\", \"model\", \"name\", \"temperature\", \"apiKey\", \"baseURL\", \"base_url\", \"default\", \"instances\"].includes(k)) {\n (one as Record<string, unknown>)[k] = s[k];\n }\n });\n return one;\n}\n\nfunction normalizeLlmConfig(o: Record<string, unknown>): LLMConfig | null {\n const id = typeof o.id === \"string\" && o.id ? o.id : DEFAULT_LLM_ID;\n const type = o.type === \"image\" ? \"image\" : \"chat\";\n const provider = typeof o.provider === \"string\" && o.provider ? o.provider : \"openai\";\n const opts = o.options as Record<string, unknown> | undefined;\n const config: LLMConfig = {\n id,\n type,\n provider,\n model: typeof o.model === \"string\" ? o.model : (typeof o.name === \"string\" ? o.name : undefined),\n temperature:\n typeof o.temperature === \"number\"\n ? o.temperature\n : typeof opts?.temperature === \"number\"\n ? opts.temperature\n : undefined,\n apiKey:\n typeof o.apiKey === \"string\"\n ? o.apiKey\n : typeof opts?.apiKey === \"string\"\n ? opts.apiKey\n : undefined,\n baseURL: typeof o.baseURL === \"string\" ? o.baseURL : (typeof o.base_url === \"string\" ? o.base_url : undefined),\n };\n Object.keys(o).forEach((k) => {\n if (![\"id\", \"type\", \"provider\", \"model\", \"name\", \"temperature\", \"apiKey\", \"baseURL\", \"base_url\"].includes(k)) {\n (config as Record<string, unknown>)[k] = o[k];\n }\n });\n return config;\n}\n","/**\n * Load and parse LLM config from YAML (e.g. config/llm.yaml).\n * Supports ${VAR} substitution from process.env.\n */\n\nimport { readFileSync, existsSync } from \"node:fs\";\nimport { parse as parseYaml } from \"yaml\";\n\nexport interface LoadLlmConfigOptions {\n /** Replace ${VAR} with process.env.VAR. Default true. */\n substituteEnv?: boolean;\n}\n\n/**\n * Recursively replace ${VAR} in strings with process.env.VAR.\n */\nexport function substituteEnv(obj: unknown): unknown {\n if (obj === null || obj === undefined) return obj;\n if (typeof obj === \"string\") {\n const m = obj.match(/^\\$\\{(\\w+)\\}$/);\n return m ? (process.env[m[1]] ?? obj) : obj;\n }\n if (Array.isArray(obj)) return obj.map(substituteEnv);\n if (typeof obj === \"object\") {\n const out: Record<string, unknown> = {};\n for (const [k, v] of Object.entries(obj)) out[k] = substituteEnv(v);\n return out;\n }\n return obj;\n}\n\n/**\n * Parse YAML string and return the llm section (top-level key \"llm\").\n * Returns undefined if content has no llm key.\n */\nexport function parseLlmYaml(\n content: string,\n options: LoadLlmConfigOptions = {}\n): unknown {\n const { substituteEnv: doSub = true } = options;\n const parsed = parseYaml(content) as { llm?: unknown };\n const llm = parsed?.llm;\n if (llm == null) return undefined;\n return doSub ? substituteEnv(llm) : llm;\n}\n\n/**\n * Load LLM config from a YAML file (e.g. config/llm.yaml).\n * Returns the llm section for use with createChatModelFromLlmConfig or parseLlmSection.\n * Returns null if file does not exist or has no llm key.\n * Throws with clear message if file exists but read or parse fails.\n */\nexport function loadLlmConfig(\n filePath: string,\n options: LoadLlmConfigOptions = {}\n): unknown | null {\n if (typeof filePath !== \"string\" || filePath.trim().length === 0) {\n throw new Error(\"agent-llm: loadLlmConfig requires a non-empty file path\");\n }\n if (!existsSync(filePath)) return null;\n let raw: string;\n try {\n raw = readFileSync(filePath, \"utf8\");\n } catch (e) {\n const msg = e instanceof Error ? e.message : String(e);\n throw new Error(`agent-llm: failed to read config file ${filePath}: ${msg}`, { cause: e });\n }\n try {\n const llm = parseLlmYaml(raw, options);\n return llm ?? null;\n } catch (e) {\n const msg = e instanceof Error ? e.message : String(e);\n throw new Error(`agent-llm: failed to parse config file ${filePath}: ${msg}`, { cause: e });\n }\n}\n","/**\n * OpenAI-compatible format: chat (/v1/chat/completions) and image.\n * Supports baseURL for Azure, local proxy, and other compatible endpoints.\n */\n\nimport OpenAI from \"openai\";\nimport type {\n LLMConfig,\n ChatMessage,\n ChatResult,\n ImageResult,\n ILLMClient,\n ChatWithToolsMessage,\n ChatWithToolsResult,\n ToolDefinition,\n} from \"../types.js\";\n\nfunction getApiKey(config: LLMConfig): string {\n const key = config.apiKey ?? process.env.OPENAI_API_KEY ?? \"\";\n if (!key) throw new Error(\"OpenAI-compatible apiKey required (config.apiKey or OPENAI_API_KEY)\");\n return key;\n}\n\nfunction createOpenAIClientOptions(config: LLMConfig): { apiKey: string; baseURL?: string } {\n const opts: { apiKey: string; baseURL?: string } = { apiKey: getApiKey(config) };\n if (typeof config.baseURL === \"string\" && config.baseURL) opts.baseURL = config.baseURL;\n return opts;\n}\n\nfunction serializeMessage(\n m: ChatWithToolsMessage\n): OpenAI.Chat.Completions.ChatCompletionMessageParam {\n if (m.role === \"tool\")\n return { role: \"tool\", content: m.content, tool_call_id: m.tool_call_id };\n if (m.role === \"assistant\" && \"tool_calls\" in m && m.tool_calls?.length) {\n return {\n role: \"assistant\",\n content: m.content ?? null,\n tool_calls: m.tool_calls.map((tc) => ({\n id: tc.id,\n type: \"function\" as const,\n function: { name: tc.function.name, arguments: tc.function.arguments },\n })),\n };\n }\n return { role: m.role, content: (m as ChatMessage).content };\n}\n\nexport function createOpenAIChatClient(config: LLMConfig): ILLMClient {\n const client = new OpenAI(createOpenAIClientOptions(config));\n const model = config.model ?? process.env.OPENAI_MODEL ?? \"gpt-4o-mini\";\n const temperature = config.temperature ?? 0;\n\n return {\n id: config.id,\n type: \"chat\",\n async chat(messages: ChatMessage[]): Promise<ChatResult> {\n const resp = await client.chat.completions.create({\n model,\n temperature,\n messages: messages.map((m) => ({ role: m.role, content: m.content })),\n });\n const content = resp.choices[0]?.message?.content ?? \"\";\n const usage = resp.usage\n ? { promptTokens: resp.usage.prompt_tokens, completionTokens: resp.usage.completion_tokens }\n : undefined;\n return { content, usage };\n },\n async chatWithTools(\n messages: ChatWithToolsMessage[],\n tools: ToolDefinition[],\n _options?: { timeoutMs?: number }\n ): Promise<ChatWithToolsResult> {\n const resp = await client.chat.completions.create({\n model,\n temperature,\n messages: messages.map(serializeMessage),\n tools: tools.map((t) => ({\n type: \"function\" as const,\n function: {\n name: t.function.name,\n description: t.function.description,\n parameters: (t.function.parameters ?? undefined) as Record<string, unknown> | undefined,\n },\n })),\n });\n const msg = resp.choices[0]?.message;\n const usage = resp.usage\n ? { promptTokens: resp.usage.prompt_tokens, completionTokens: resp.usage.completion_tokens }\n : undefined;\n return {\n message: {\n role: \"assistant\",\n content: msg?.content ?? null,\n tool_calls: msg?.tool_calls?.map((tc) => ({\n id: tc.id,\n type: \"function\" as const,\n function: {\n name: tc.function?.name ?? \"\",\n arguments: tc.function?.arguments ?? \"\",\n },\n })),\n },\n usage,\n };\n },\n };\n}\n\nexport function createOpenAIImageClient(config: LLMConfig): ILLMClient {\n const client = new OpenAI(createOpenAIClientOptions(config));\n const model = (config.model as string) ?? \"dall-e-3\";\n\n return {\n id: config.id,\n type: \"image\",\n async chat(): Promise<ChatResult> {\n throw new Error(\"OpenAI image model does not support chat; use generateImage()\");\n },\n async generateImage(options: { prompt: string; size?: string; n?: number }): Promise<ImageResult> {\n const resp = await client.images.generate({\n model,\n prompt: options.prompt,\n size: (options.size as \"1024x1024\" | \"1792x1024\" | \"1024x1792\") ?? \"1024x1024\",\n n: options.n ?? 1,\n response_format: \"url\",\n });\n const url = resp.data?.[0]?.url ?? undefined;\n return { url };\n },\n };\n}\n\nexport function createOpenAIClient(config: LLMConfig): ILLMClient {\n if (config.type === \"image\") return createOpenAIImageClient(config);\n return createOpenAIChatClient(config);\n}\n","/**\n * Supports OpenAI-compatible and extension providers.\n */\n\nimport type { LLMConfig, ILLMClient } from \"../types.js\";\nimport { createOpenAIClient } from \"./openai.js\";\n\nconst OPENAI_COMPATIBLE = \"openai-compatible\";\n\nfunction createOpenAICompat(config: LLMConfig): ILLMClient {\n return createOpenAIClient(config);\n}\n\nconst PROVIDERS: Record<string, (config: LLMConfig) => ILLMClient> = {\n openai: createOpenAICompat,\n [OPENAI_COMPATIBLE]: createOpenAICompat,\n};\n\nexport function createClient(config: LLMConfig): ILLMClient {\n const p = (config.provider ?? \"\").toLowerCase();\n const fn = PROVIDERS[p];\n if (!fn) {\n const supported = [...new Set([...Object.keys(PROVIDERS), \"extension providers\"])].sort().join(\", \");\n throw new Error(\n `Unsupported LLM provider: ${config.provider}. Supported: ${supported}.`\n );\n }\n return fn(config);\n}\n\nexport function registerProvider(name: string, factory: (config: LLMConfig) => ILLMClient): void {\n PROVIDERS[name.toLowerCase()] = factory;\n}\n","/**\n * Create LLM registry from agent.yaml llm section.\n */\n\nimport { parseLlmSection } from \"./config.js\";\nimport { createClient } from \"./providers/index.js\";\nimport type { AgentConfigLlmSection, ILLMClient, ILLMRegistry } from \"./types.js\";\n\nexport interface CreateLLMRegistryOptions {\n /** Parsed llm section (e.g. from loadAgentConfig's config.llm) */\n llmSection: AgentConfigLlmSection | null | undefined;\n}\n\n/**\n * Create LLM registry from agent config llm section; supports multiple providers/models, each LLM has id and type.\n */\nexport function createLLMRegistry(options: CreateLLMRegistryOptions): ILLMRegistry {\n const { defaultId, configs } = parseLlmSection(options.llmSection);\n const map = new Map<string, ILLMClient>();\n\n for (const config of configs) {\n try {\n const client = createClient(config);\n map.set(config.id, client);\n } catch (err) {\n console.warn(`[agent-llm] Skip LLM \"${config.id}\": ${err instanceof Error ? err.message : String(err)}`);\n }\n }\n\n return {\n get(id: string): ILLMClient | undefined {\n return map.get(id);\n },\n defaultId(): string | undefined {\n if (map.has(defaultId)) return defaultId;\n return map.size > 0 ? [...map.keys()][0] : undefined;\n },\n ids(): string[] {\n return [...map.keys()];\n },\n };\n}\n","/**\n * Registry for LangChain ChatModel by provider name.\n * Extensions register via registerChatModelProvider; llmAdapter uses getChatModelFactory.\n */\n\nimport type { BaseChatModel } from \"@langchain/core/language_models/chat_models\";\nimport type { LLMConfig } from \"./types.js\";\n\nexport type ChatModelFactory = (config: LLMConfig) => BaseChatModel;\n\nconst CHAT_MODEL_FACTORIES = new Map<string, ChatModelFactory>();\n\n/**\n * Register a ChatModel factory for a provider name.\n * Called by extensions (e.g. wallee-llm) on load.\n */\nexport function registerChatModelProvider(providerName: string, factory: ChatModelFactory): void {\n CHAT_MODEL_FACTORIES.set(providerName.toLowerCase(), factory);\n}\n\n/**\n * Get the ChatModel factory for a provider name, if registered.\n */\nexport function getChatModelFactory(providerName: string): ChatModelFactory | undefined {\n return CHAT_MODEL_FACTORIES.get(providerName.toLowerCase());\n}\n","/**\n * Build LangChain ChatModel from agent.yaml llm section.\n * Supports single object, default + instances, and flat keyed configs.\n * When provider is registered by an extension, uses that extension's ChatModel;\n * otherwise uses ChatOpenAI.\n */\n\nimport { ChatOpenAI } from \"@langchain/openai\";\nimport type { BaseChatModel } from \"@langchain/core/language_models/chat_models\";\nimport { parseLlmSection } from \"./config.js\";\nimport { getChatModelFactory } from \"./chatModelRegistry.js\";\n\nconst DEFAULT_MODEL = \"gpt-4o-mini\";\n\nexport interface CreateChatModelFromLlmConfigOptions {\n /** agent.yaml llm section (raw or parsed); compatible with AgentConfigLlmSection / AgentConfigLlm */\n llmSection?: unknown;\n /** Override model from env */\n modelEnv?: string;\n /** Override API key from env */\n apiKeyEnv?: string;\n}\n\nfunction normalizeAgentLlMError(e: unknown, context: string): Error {\n if (e instanceof Error) return new Error(`${context}: ${e.message}`, { cause: e });\n return new Error(`${context}: ${String(e)}`);\n}\n\n/**\n * Create a LangChain ChatModel from agent config llm section.\n * Uses extension-registered ChatModel when available; otherwise ChatOpenAI.\n */\nexport function createChatModelFromLlmConfig(\n options: CreateChatModelFromLlmConfigOptions\n): BaseChatModel {\n const { llmSection, modelEnv, apiKeyEnv } = options;\n let defaultId: string;\n let configs: ReturnType<typeof parseLlmSection>[\"configs\"];\n try {\n const parsed = parseLlmSection(llmSection ?? null);\n defaultId = parsed.defaultId;\n configs = parsed.configs;\n } catch (e) {\n throw normalizeAgentLlMError(e, \"agent-llm: failed to parse llm section\");\n }\n const defaultConfig = configs.find((c) => c.id === defaultId) ?? configs[0];\n\n if (!defaultConfig) {\n const model =\n modelEnv ?? process.env.OPENAI_MODEL ?? DEFAULT_MODEL;\n const apiKey = apiKeyEnv ?? process.env.OPENAI_API_KEY;\n try {\n return new ChatOpenAI({\n model,\n temperature: 0,\n ...(apiKey ? { apiKey } : {}),\n });\n } catch (e) {\n throw normalizeAgentLlMError(e, \"agent-llm: failed to create default ChatOpenAI\");\n }\n }\n\n const provider = (defaultConfig as { provider?: string }).provider ?? \"openai\";\n const chatModelFactory = getChatModelFactory(provider);\n if (chatModelFactory) {\n const config = {\n ...defaultConfig,\n model: modelEnv ?? defaultConfig.model,\n temperature:\n typeof defaultConfig.temperature === \"number\"\n ? defaultConfig.temperature\n : 0,\n };\n try {\n return chatModelFactory(config);\n } catch (e) {\n throw normalizeAgentLlMError(e, `agent-llm: failed to create ChatModel for provider \"${provider}\"`);\n }\n }\n\n const model =\n modelEnv ??\n defaultConfig?.model ??\n process.env.OPENAI_MODEL ??\n DEFAULT_MODEL;\n\n let apiKey =\n apiKeyEnv ?? defaultConfig?.apiKey ?? process.env.OPENAI_API_KEY;\n let baseURL = defaultConfig?.baseURL;\n // OpenAI client appends path (e.g. /chat/completions) to baseURL; Ollama and OpenAI-compatible APIs expect /v1/chat/completions.\n if (baseURL && !baseURL.replace(/\\/$/, \"\").endsWith(\"/v1\")) {\n baseURL = baseURL.replace(/\\/$/, \"\") + \"/v1\";\n }\n // OpenAI client throws if apiKey is undefined; Ollama and many compatible endpoints accept a dummy.\n if (baseURL && apiKey === undefined) {\n apiKey = \"ollama\";\n }\n\n const temperature =\n typeof defaultConfig?.temperature === \"number\" ? defaultConfig.temperature : 0;\n\n const constructorOptions: ConstructorParameters<typeof ChatOpenAI>[0] = {\n model,\n temperature,\n ...(apiKey ? { apiKey } : {}),\n ...(baseURL ? { configuration: { baseURL } } : {}),\n };\n\n try {\n return new ChatOpenAI(constructorOptions);\n } catch (e) {\n throw normalizeAgentLlMError(e, \"agent-llm: failed to create ChatOpenAI from config\");\n }\n}\n","/**\n * npm: protocol in provider — specify an npm package name (and optional version) in config's model provider.\n * Format: npm:<package-name> or npm:<package-name>@<version> or npm:<package-name>#<provider-name> or npm:<package-name>@<version>#<provider-name>\n * When installNpmIfMissing is true, the framework will run npm install <package>[@version] if the package is not found.\n */\n\nimport { execSync } from \"node:child_process\";\nimport { loadLLMExtensions } from \"./loadLLMExtensions.js\";\nimport { createChatModelFromLlmConfig } from \"./llmAdapter.js\";\nimport type { CreateChatModelFromLlmConfigOptions } from \"./llmAdapter.js\";\nimport type { BaseChatModel } from \"@langchain/core/language_models/chat_models\";\n\nexport const NPM_PROTOCOL_PREFIX = \"npm:\";\n\n/** Parse npm: spec into package name, optional version, and optional #provider fragment. */\nexport function parseNpmProviderSpec(spec: string): { packageName: string; version?: string; provider?: string } | null {\n if (typeof spec !== \"string\" || !spec.startsWith(NPM_PROTOCOL_PREFIX)) return null;\n const rest = spec.slice(NPM_PROTOCOL_PREFIX.length);\n const hashIdx = rest.indexOf(\"#\");\n const beforeHash = hashIdx >= 0 ? rest.slice(0, hashIdx).trim() : rest.trim();\n const fragmentProvider = hashIdx >= 0 ? rest.slice(hashIdx + 1).trim() : null;\n // Extract version: last @ not part of scoped package (e.g. pkg@0.1.0 or @scope/pkg@1.0.0)\n const versionMatch = beforeHash.match(/@([^/]+)$/);\n const packageName = versionMatch ? beforeHash.slice(0, -versionMatch[0].length).trim() : beforeHash;\n const version = versionMatch ? versionMatch[1] : undefined;\n if (!packageName) return null;\n return { packageName, version, provider: fragmentProvider ?? undefined };\n}\n\nexport interface EnsureNpmPackageInstalledOptions {\n /** Version to install (e.g. 0.1.0, latest). If set, runs npm install <package>@<version>. */\n version?: string;\n /** Working directory for npm install. Default: process.cwd() */\n cwd?: string;\n}\n\n/**\n * Check if a provider spec uses the npm: protocol.\n */\nexport function isNpmProviderSpec(spec: unknown): spec is string {\n return typeof spec === \"string\" && spec.startsWith(NPM_PROTOCOL_PREFIX);\n}\n\nfunction isModuleNotFoundError(err: unknown): boolean {\n const msg = err instanceof Error ? err.message : String(err);\n return (\n msg.includes(\"Cannot find module\") ||\n msg.includes(\"Cannot find package\") ||\n msg.includes(\"MODULE_NOT_FOUND\") ||\n msg.includes(\"ERR_MODULE_NOT_FOUND\")\n );\n}\n\n/**\n * Ensure an npm package is installed: if it cannot be resolved, run npm install <packageName>[@version] from cwd.\n * Call this before loading a provider package when the config uses npm:<package-name>[@version].\n */\nexport async function ensureNpmPackageInstalled(\n packageName: string,\n options: EnsureNpmPackageInstalledOptions = {}\n): Promise<void> {\n if (typeof packageName !== \"string\" || packageName.trim().length === 0) {\n throw new Error(\"agent-llm: ensureNpmPackageInstalled requires a non-empty package name\");\n }\n const cwd = options.cwd ?? process.cwd();\n const version = options.version;\n try {\n await import(/* @vite-ignore */ packageName);\n return;\n } catch (err) {\n if (!isModuleNotFoundError(err)) throw err;\n }\n const installSpec = version ? `${packageName}@${version}` : packageName;\n try {\n execSync(`npm install ${installSpec}`, {\n cwd,\n stdio: \"inherit\",\n encoding: \"utf-8\",\n });\n } catch (e) {\n const msg = e instanceof Error ? e.message : String(e);\n throw new Error(`agent-llm: npm install failed for ${installSpec}: ${msg}`, { cause: e });\n }\n}\n\nexport interface ResolveNpmProviderOptions {\n /** If true (default), run npm install <package> when the package is not found. */\n installNpmIfMissing?: boolean;\n /** Working directory for npm install. Default: process.cwd() */\n cwd?: string;\n}\n\n/**\n * Resolve an npm provider spec to the concrete provider name.\n * - npm:wallee-llm → load wallee-llm, use its default provider (e.g. cis)\n * - npm:wallee-llm@0.1.0 → load wallee-llm@0.1.0, use its default provider\n * - npm:wallee-llm#cis → load wallee-llm, use provider \"cis\"\n * - npm:wallee-llm@0.1.0#cis → load wallee-llm@0.1.0, use provider \"cis\"\n * When installNpmIfMissing is true, installs the package (with optional version) if not found.\n * Returns the provider name to use, or null if spec is not npm: protocol.\n */\nfunction normalizeAgentLlMError(e: unknown, context: string): Error {\n if (e instanceof Error) {\n return new Error(`${context}: ${e.message}`, { cause: e });\n }\n return new Error(`${context}: ${String(e)}`);\n}\n\nexport async function resolveNpmProvider(\n spec: string,\n options: ResolveNpmProviderOptions = {}\n): Promise<string | null> {\n if (typeof spec !== \"string\" || spec.trim().length === 0) return null;\n const parsed = parseNpmProviderSpec(spec);\n if (!parsed) return null;\n const { packageName, version, provider: fragmentProvider } = parsed;\n\n const installNpmIfMissing = options.installNpmIfMissing !== false;\n const cwd = options.cwd ?? process.cwd();\n\n const load = async (): Promise<void> => {\n await loadLLMExtensions([packageName]);\n };\n\n try {\n await load();\n } catch (err) {\n if (installNpmIfMissing && isModuleNotFoundError(err)) {\n try {\n await ensureNpmPackageInstalled(packageName, { version, cwd });\n await load();\n } catch (installErr) {\n throw normalizeAgentLlMError(installErr, `agent-llm: failed to install or load npm provider \"${packageName}\"`);\n }\n } else {\n throw normalizeAgentLlMError(err, `agent-llm: failed to load npm provider \"${packageName}\"`);\n }\n }\n\n if (fragmentProvider) return fragmentProvider;\n\n try {\n const m = await import(/* @vite-ignore */ packageName);\n if (\n typeof (m as { getDefaultProviderName?: () => string }).getDefaultProviderName === \"function\"\n ) {\n return (m as { getDefaultProviderName: () => string }).getDefaultProviderName();\n }\n } catch {\n // ignore\n }\n throw new Error(\n `Provider spec ${spec} has no #provider fragment and the package does not export getDefaultProviderName(). Use e.g. npm:${packageName}#<provider-name>.`\n );\n}\n\nexport interface ResolveLlmSectionWithNpmOptions extends ResolveNpmProviderOptions {}\n\n/**\n * Recursively resolve all provider values that use the npm: protocol in a clone of the llm section.\n * When installNpmIfMissing is true, installs any npm: package that is not found.\n */\nexport async function resolveLlmSectionWithNpm(\n llmSection: unknown,\n options: ResolveLlmSectionWithNpmOptions = {}\n): Promise<unknown> {\n if (llmSection == null) return llmSection;\n if (Array.isArray(llmSection)) {\n const out: unknown[] = [];\n for (let i = 0; i < llmSection.length; i++) {\n try {\n out.push(await resolveLlmSectionWithNpm(llmSection[i], options));\n } catch (e) {\n throw normalizeAgentLlMError(e, `agent-llm: failed to resolve llm section at index ${i}`);\n }\n }\n return out;\n }\n if (typeof llmSection === \"object\") {\n const out: Record<string, unknown> = {};\n for (const [k, v] of Object.entries(llmSection)) {\n if (k === \"provider\" && isNpmProviderSpec(v)) {\n try {\n const resolved = await resolveNpmProvider(v as string, options);\n out[k] = resolved ?? v;\n } catch (e) {\n throw normalizeAgentLlMError(e, `agent-llm: failed to resolve provider \"${String(v)}\"`);\n }\n continue;\n }\n try {\n out[k] = await resolveLlmSectionWithNpm(v, options);\n } catch (e) {\n throw normalizeAgentLlMError(e, `agent-llm: failed to resolve llm section key \"${k}\"`);\n }\n }\n return out;\n }\n return llmSection;\n}\n\nexport interface CreateChatModelFromLlmConfigWithNpmOptions\n extends CreateChatModelFromLlmConfigOptions,\n ResolveNpmProviderOptions {}\n\n/**\n * Create a LangChain ChatModel from llm section, resolving any provider values that use the npm: protocol.\n * Use when config has provider: \"npm:wallee-llm\" or provider: \"npm:wallee-llm#cis\".\n * When installNpmIfMissing is true (default), the framework will run npm install <package> if the package is not found.\n */\nexport async function createChatModelFromLlmConfigWithNpm(\n options: CreateChatModelFromLlmConfigWithNpmOptions\n): Promise<BaseChatModel> {\n try {\n const { installNpmIfMissing, cwd, ...rest } = options;\n const resolvedSection = await resolveLlmSectionWithNpm(options.llmSection ?? null, {\n installNpmIfMissing,\n cwd,\n });\n return createChatModelFromLlmConfig({\n ...rest,\n llmSection: resolvedSection,\n });\n } catch (e) {\n throw normalizeAgentLlMError(e, \"agent-llm: createChatModelFromLlmConfigWithNpm failed\");\n }\n}\n","/**\n * Load LLM extensions by npm package name or by dynamic discovery.\n * Extensions implement ILLMExtension (getLLMExtension()). Framework does registration so extension need not call register.\n */\n\nimport { readdirSync, readFileSync, existsSync } from \"node:fs\";\nimport { join } from \"node:path\";\nimport type { ILLMExtension } from \"./types.js\";\nimport { AGENT_LLM_PROVIDER_FIELD } from \"./types.js\";\nimport { registerProvider } from \"./providers/index.js\";\nimport { registerChatModelProvider, type ChatModelFactory } from \"./chatModelRegistry.js\";\n\nconst loadedPackages = new Set<string>();\n\nfunction isLLMExtension(m: unknown): m is ILLMExtension {\n if (m == null || typeof m !== \"object\") return false;\n const e = m as ILLMExtension;\n if (typeof e.providerName !== \"string\") return false;\n const hasRegister = typeof e.register === \"function\";\n const hasFactories = typeof e.createClient === \"function\" && typeof e.createChatModel === \"function\";\n return hasRegister || hasFactories;\n}\n\nfunction registerExtension(ext: ILLMExtension): void {\n if (typeof ext.createClient === \"function\" && typeof ext.createChatModel === \"function\") {\n registerProvider(ext.providerName, ext.createClient);\n registerChatModelProvider(ext.providerName, ext.createChatModel as ChatModelFactory);\n return;\n }\n if (typeof ext.register === \"function\") {\n ext.register();\n }\n}\n\nfunction getExtensionFromModule(m: Record<string, unknown>): ILLMExtension | null {\n if (isLLMExtension(m)) return m;\n if (typeof m.getLLMExtension === \"function\") {\n const ext = m.getLLMExtension();\n return isLLMExtension(ext) ? ext : null;\n }\n if (isLLMExtension(m.default)) return m.default;\n return null;\n}\n\n/**\n * Resolve llm.type to a list of npm package names to load.\n */\nexport function resolveLLMExtensionPackages(types?: string | string[]): string[] {\n if (types == null) return [];\n const typeList = Array.isArray(types) ? types : [types];\n return typeList.filter(\n (t): t is string => typeof t === \"string\" && String(t).trim().length > 0\n );\n}\n\n/**\n * Scan node_modules (optionally under cwd) for packages that declare agent-llm provider.\n * A package is included if package.json has \"agentLlmProvider\": true or keywords includes \"agent-llm-provider\".\n * @param cwd Directory containing node_modules; default process.cwd()\n * @returns List of package names that can be passed to loadLLMExtensions()\n */\nfunction readPackageProviderName(pkgPath: string): string | null {\n if (!existsSync(pkgPath)) return null;\n try {\n const raw = readFileSync(pkgPath, \"utf-8\");\n const pkg = JSON.parse(raw) as { agentLlmProvider?: boolean; keywords?: string[]; name?: string };\n const declared =\n pkg.agentLlmProvider === true ||\n (Array.isArray(pkg.keywords) && pkg.keywords.includes(\"agent-llm-provider\"));\n return declared && typeof pkg.name === \"string\" ? pkg.name : null;\n } catch {\n return null;\n }\n}\n\nexport function discoverLLMExtensions(cwd: string = process.cwd()): string[] {\n const dir = typeof cwd === \"string\" && cwd.trim().length > 0 ? cwd : process.cwd();\n const nodeModules = join(dir, \"node_modules\");\n if (!existsSync(nodeModules)) return [];\n const names: string[] = [];\n const seen = new Set<string>();\n try {\n const entries = readdirSync(nodeModules, { withFileTypes: true });\n for (const e of entries) {\n if (e.name.startsWith(\".\") || e.name === \"node\") continue;\n if (e.name.startsWith(\"@\") && e.isDirectory()) {\n const scopePath = join(nodeModules, e.name);\n const scopeEntries = readdirSync(scopePath, { withFileTypes: true });\n for (const se of scopeEntries) {\n if (!se.isDirectory()) continue;\n const name = readPackageProviderName(join(scopePath, se.name, \"package.json\"));\n if (name && !seen.has(name)) {\n seen.add(name);\n names.push(name);\n }\n }\n continue;\n }\n const name = readPackageProviderName(join(nodeModules, e.name, \"package.json\"));\n if (name && !seen.has(name)) {\n seen.add(name);\n names.push(name);\n }\n }\n } catch {\n // no node_modules or not readable\n }\n return names;\n}\n\n/**\n * Load LLM extensions by npm package name.\n * Prefers ILLMExtension (getLLMExtension() or default export); falls back to registerLLMExtension().\n * Safe to call multiple times; each package is loaded at most once.\n * @param extensionPackages npm package names; when omitted, loads none\n */\nexport async function loadLLMExtensions(\n extensionPackages?: string[]\n): Promise<void> {\n const packages = Array.isArray(extensionPackages)\n ? extensionPackages.filter((p): p is string => typeof p === \"string\" && String(p).trim().length > 0)\n : [];\n for (const pkg of packages) {\n if (loadedPackages.has(pkg)) continue;\n loadedPackages.add(pkg);\n try {\n const m = await import(/* @vite-ignore */ pkg) as Record<string, unknown>;\n const ext = getExtensionFromModule(m);\n if (ext) {\n registerExtension(ext);\n continue;\n }\n if (typeof (m as { registerLLMExtension?: () => void }).registerLLMExtension === \"function\") {\n (m as { registerLLMExtension: () => void }).registerLLMExtension();\n }\n } catch (e) {\n // extension not installed or load failed — continue with other packages (fault tolerance)\n const msg = e instanceof Error ? e.message : String(e);\n if (typeof process !== \"undefined\" && process.emitWarning) {\n process.emitWarning(`[agent-llm] Failed to load extension \"${pkg}\": ${msg}`, { code: \"AGENT_LLM_EXTENSION_LOAD\" });\n }\n }\n }\n}\n\n/**\n * Discover provider packages in node_modules and load them.\n * Equivalent to loadLLMExtensions(discoverLLMExtensions(cwd)).\n * @param cwd Directory containing node_modules; default process.cwd()\n */\nexport async function loadDiscoveredExtensions(cwd: string = process.cwd()): Promise<string[]> {\n const dir = typeof cwd === \"string\" && cwd.trim().length > 0 ? cwd : process.cwd();\n const names = discoverLLMExtensions(dir);\n await loadLLMExtensions(names);\n return names;\n}\n","/**\n * Return a LangChain-formatted LLM from config (llm.yaml or config/llm.yaml).\n * Use this LLM with LangChain's createAgent (e.g. createToolCallingAgent + AgentExecutor).\n */\nimport { join } from \"node:path\";\nimport { existsSync } from \"node:fs\";\nimport { createChatModelFromLlmConfig } from \"./llmAdapter.js\";\nimport { createChatModelFromLlmConfigWithNpm } from \"./npmProviderProtocol.js\";\nimport { loadLlmConfig } from \"./loadLlmConfig.js\";\n\nexport interface CreateAgentLlMOptions {\n /** Path to YAML config file. If omitted, uses llm.yaml in cwd or config/llm.yaml in cwd/parent. */\n configPath?: string;\n /** If true (default), run npm install when provider is npm:<package> and package is not found. */\n installNpmIfMissing?: boolean;\n}\n\nfunction resolveDefaultConfigPath(): string {\n const cwd = process.cwd();\n if (existsSync(join(cwd, \"llm.yaml\"))) return join(cwd, \"llm.yaml\");\n if (existsSync(join(cwd, \"config\", \"llm.yaml\"))) return join(cwd, \"config\", \"llm.yaml\");\n const parentConfig = join(cwd, \"..\", \"config\", \"llm.yaml\");\n if (existsSync(parentConfig)) return parentConfig;\n return join(cwd, \"config\", \"llm.yaml\");\n}\n\nfunction normalizeOptions(\n configPathOrOptions?: string | CreateAgentLlMOptions\n): CreateAgentLlMOptions {\n if (configPathOrOptions == null) return {};\n if (typeof configPathOrOptions === \"string\") return { configPath: configPathOrOptions };\n return configPathOrOptions;\n}\n\nfunction normalizeCreateOptions(\n configPathOrOptions?: string | CreateAgentLlMOptions\n): CreateAgentLlMOptions {\n if (configPathOrOptions == null) return {};\n if (typeof configPathOrOptions === \"string\") return { configPath: configPathOrOptions };\n return configPathOrOptions;\n}\n\nfunction normalizeAgentLlMError(e: unknown, context: string): Error {\n if (e instanceof Error) return new Error(`${context}: ${e.message}`, { cause: e });\n return new Error(`${context}: ${String(e)}`);\n}\n\n/**\n * Internal: create LLM from config without resolving npm: providers (sync).\n */\nfunction createAgentLlMInternal(\n configPathOrOptions?: string | CreateAgentLlMOptions\n): ReturnType<typeof createChatModelFromLlmConfig> {\n try {\n const options = normalizeOptions(configPathOrOptions);\n const configPath = options.configPath ?? resolveDefaultConfigPath();\n const llmSection = loadLlmConfig(configPath);\n if (llmSection == null) {\n throw new Error(`No LLM config at ${configPath}. Add llm.yaml or config/llm.yaml, or pass configPath.`);\n }\n return createChatModelFromLlmConfig({ llmSection });\n } catch (e) {\n if (e instanceof Error && e.message.includes(\"No LLM config\")) throw e;\n throw normalizeAgentLlMError(e, \"agent-llm: createAgentLlM failed\");\n }\n}\n\n/**\n * Create a LangChain-formatted LLM from config. Resolves npm: providers and optionally installs packages.\n * - createAgentLlM() — uses llm.yaml (cwd) or config/llm.yaml\n * - createAgentLlM(\"path/to/llm.yaml\") — use specific config file\n * - createAgentLlM({ configPath: \"...\", installNpmIfMissing: true }) — options object\n */\nexport async function createAgentLlM(\n configPathOrOptions?: string | CreateAgentLlMOptions\n): Promise<ReturnType<typeof createChatModelFromLlmConfig>> {\n try {\n const options = normalizeCreateOptions(configPathOrOptions);\n const configPath = options.configPath ?? resolveDefaultConfigPath();\n const llmSection = loadLlmConfig(configPath);\n if (llmSection == null) {\n throw new Error(`No LLM config at ${configPath}. Add llm.yaml or config/llm.yaml, or pass configPath.`);\n }\n return await createChatModelFromLlmConfigWithNpm({\n llmSection,\n installNpmIfMissing: options.installNpmIfMissing !== false,\n cwd: process.cwd(),\n });\n } catch (e) {\n if (e instanceof Error && e.message.includes(\"No LLM config\")) throw e;\n throw normalizeAgentLlMError(e, \"agent-llm: createAgentLlM failed\");\n }\n}\n","/**\n * Agent LLM: OpenAI-compatible format only (/v1/chat/completions etc.).\n * Multi-instance: each LLM has id and type; optional baseURL for other compatible endpoints.\n */\n\n/** LLM type: chat = conversation, image = image generation (OpenAI-compatible format) */\nexport type LLMType = \"chat\" | \"image\";\n\n/** Single LLM config: id, type, model; OpenAI-compatible API only */\nexport interface LLMConfig {\n /** Unique id for fetching the instance from the registry */\n id: string;\n /** chat | image */\n type: LLMType;\n /** Must be openai or openai-compatible; only this format is supported */\n provider: string;\n /** Model name, e.g. gpt-4o-mini, dall-e-3 */\n model?: string;\n /** Temperature etc.; commonly used for chat */\n temperature?: number;\n /** API key; can also be set via env */\n apiKey?: string;\n /** OpenAI-compatible endpoint baseURL (e.g. Azure, local proxy, other /v1-compatible vendors) */\n baseURL?: string;\n /** Other options (passed through) */\n [key: string]: unknown;\n}\n\n/** agent.yaml llm section: flat (each model keyed by name), default+instances, or single object */\nexport interface AgentConfigLlmSection {\n /** Default model name (id) to use */\n default?: string;\n /** npm package name or array for dynamic load; e.g. \"wallee-llm\" or [\"wallee-llm\"] */\n type?: string | string[];\n /** Array of LLM configs */\n instances?: LLMConfig[];\n /** Single-object form: provider, model etc., parsed as id=default chat */\n provider?: string;\n model?: string;\n name?: string;\n temperature?: number;\n apiKey?: string;\n baseURL?: string;\n base_url?: string;\n /** Flat: strong/medium/fast etc. name → config (provider, base_url, name, options) */\n [key: string]: unknown;\n}\n\n/** Single chat message */\nexport interface ChatMessage {\n role: \"system\" | \"user\" | \"assistant\";\n content: string;\n}\n\n/** Minimal chat result */\nexport interface ChatResult {\n content: string;\n usage?: { promptTokens?: number; completionTokens?: number };\n}\n\n/** OpenAI-compatible tool definition (function) */\nexport interface ToolDefinition {\n type: \"function\";\n function: {\n name: string;\n description?: string;\n parameters?: object;\n };\n}\n\n/** Message with tool calls (assistant may include tool_calls; tool = tool result) */\nexport type ChatWithToolsMessage =\n | ChatMessage\n | { role: \"tool\"; content: string; tool_call_id: string }\n | {\n role: \"assistant\";\n content?: string | null;\n tool_calls?: Array<{\n id: string;\n type: \"function\";\n function: { name: string; arguments: string };\n }>;\n };\n\n/** Chat result with tool calls */\nexport interface ChatWithToolsResult {\n message: {\n role: \"assistant\";\n content?: string | null;\n tool_calls?: Array<{\n id: string;\n type: \"function\";\n function: { name: string; arguments: string };\n }>;\n };\n usage?: { promptTokens?: number; completionTokens?: number };\n}\n\n/** Image generation result */\nexport interface ImageResult {\n url?: string;\n b64?: string;\n}\n\n/**\n * Minimal interface for a single LLM instance.\n * type=chat provides chat; type=image provides generateImage.\n * chat type may optionally provide chatWithTools for ReAct/Agent tool calling.\n */\nexport interface ILLMClient {\n readonly id: string;\n readonly type: LLMType;\n /** Chat (available when type=chat) */\n chat(messages: ChatMessage[]): Promise<ChatResult>;\n /**\n * Chat with tools (optional when type=chat; for ReAct/Agent).\n * If not implemented, caller may poll with chat or use another client.\n */\n chatWithTools?(\n messages: ChatWithToolsMessage[],\n tools: ToolDefinition[],\n options?: { timeoutMs?: number }\n ): Promise<ChatWithToolsResult>;\n /** Image generation (when type=image); otherwise may throw or be ignored */\n generateImage?(options: { prompt: string; size?: string; n?: number }): Promise<ImageResult>;\n}\n\n/**\n * LLM registry created from llm section: get instance by id.\n */\nexport interface ILLMRegistry {\n get(id: string): ILLMClient | undefined;\n defaultId(): string | undefined;\n ids(): string[];\n}\n\n/**\n * Generic interface for an LLM provider extension (npm package).\n * Implement getLLMExtension() returning this; framework will register the provider when loading.\n * Prefer createClient + createChatModel so the framework does registration (extension need not call register).\n */\nexport interface ILLMExtension {\n /** Provider name (e.g. \"cis\"). Framework registers under this name. */\n readonly providerName: string;\n /** ILLMClient factory. If present with createChatModel, framework calls registerProvider(providerName, createClient). */\n readonly createClient?: (config: LLMConfig) => ILLMClient;\n /** LangChain ChatModel factory. If present with createClient, framework calls registerChatModelProvider(providerName, createChatModel). */\n readonly createChatModel?: (config: LLMConfig) => unknown;\n /** Legacy: if extension provides register(), framework calls it. Prefer createClient + createChatModel so framework registers. */\n register?(): void;\n}\n\n/** package.json field: when true, the package is an agent-llm provider (for discovery). */\nexport const AGENT_LLM_PROVIDER_FIELD = \"agentLlmProvider\";\n"],"mappings":";AAOA,IAAM,iBAAiB;AAEvB,IAAM,gBAAgB,oBAAI,IAAI;AAAA,EAC5B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAKM,SAAS,gBAAgB,SAA+D;AAC7F,MAAI,WAAW,QAAQ,OAAO,YAAY,UAAU;AAClD,WAAO,EAAE,WAAW,gBAAgB,SAAS,CAAC,EAAE;AAAA,EAClD;AAEA,MAAI,MAAM,QAAQ,OAAO,GAAG;AAC1B,UAAM,UAAU,QACb,OAAO,CAAC,MAAoC,KAAK,QAAQ,OAAO,MAAM,QAAQ,EAC9E,IAAI,CAAC,MAAM,MAAM,mBAAmB,EAAE,GAAG,MAAM,IAAI,KAAK,MAAM,KAAK,QAAQ,OAAO,CAAC,EAAE,CAAC,CAAC,EACvF,OAAO,CAAC,MAAsB,KAAK,IAAI;AAC1C,UAAM,YAAY,QAAQ,SAAS,IAAI,QAAQ,CAAC,EAAG,KAAK;AACxD,WAAO,EAAE,WAAW,QAAQ;AAAA,EAC9B;AAEA,QAAM,IAAI;AAEV,QAAM,cAAc,OAAO,QAAQ,CAAC,EAAE;AAAA,IACpC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,cAAc,IAAI,CAAC,KAAK,KAAK,QAAQ,OAAO,MAAM,YAAY,CAAC,MAAM,QAAQ,CAAC;AAAA,EAC7F;AACA,MAAI,YAAY,SAAS,GAAG;AAC1B,UAAM,UAAuB,CAAC;AAC9B,eAAW,CAAC,IAAI,KAAK,KAAK,aAAa;AACrC,YAAM,IAAI,iBAAiB,IAAI,KAAgC;AAC/D,UAAI,EAAG,SAAQ,KAAK,CAAC;AAAA,IACvB;AACA,UAAM,YACJ,OAAO,EAAE,YAAY,YAAY,EAAE,WAAW,YAAY,KAAK,CAAC,CAAC,CAAC,MAAM,MAAM,EAAE,OAAO,IACnF,EAAE,UACF,QAAQ,SAAS,IACf,QAAQ,CAAC,EAAG,KACZ;AACR,WAAO,EAAE,WAAW,QAAQ;AAAA,EAC9B;AAEA,MAAI,MAAM,QAAQ,EAAE,SAAS,GAAG;AAC9B,UAAM,UAAW,EAAE,UAChB,OAAO,CAAC,MAAoC,KAAK,QAAQ,OAAO,MAAM,QAAQ,EAC9E,IAAI,CAAC,MAAM,mBAAmB,CAAC,CAAC,EAChC,OAAO,CAAC,MAAsB,KAAK,IAAI;AAC1C,UAAM,YACJ,OAAO,EAAE,YAAY,YAAY,EAAE,UAC/B,EAAE,UACF,QAAQ,SAAS,IACf,QAAQ,CAAC,EAAG,KACZ;AACR,WAAO,EAAE,WAAW,QAAQ;AAAA,EAC9B;AAEA,MAAI,OAAO,EAAE,aAAa,YAAY,OAAO,EAAE,UAAU,YAAY,OAAQ,EAAwB,SAAS,UAAU;AACtH,UAAM,MAAM,wBAAwB,CAAC;AACrC,WAAO,EAAE,WAAW,IAAI,IAAI,SAAS,CAAC,GAAG,EAAE;AAAA,EAC7C;AAEA,SAAO,EAAE,WAAW,gBAAgB,SAAS,CAAC,EAAE;AAClD;AAEA,IAAM,wBAAwB,CAAC,cAAc,UAAU,aAAa,aAAa,cAAc,QAAQ,eAAe,aAAa,SAAS;AAE5I,SAAS,iBAAiB,IAAY,OAAkD;AACtF,QAAM,OAAO,MAAM;AACnB,QAAM,UACJ,OAAO,MAAM,aAAa,WACtB,MAAM,WACN,OAAO,MAAM,YAAY,WACvB,MAAM,UACN;AACR,QAAM,QAAQ,OAAO,MAAM,SAAS,WAAW,MAAM,OAAO,OAAO,MAAM,UAAU,WAAW,MAAM,QAAQ;AAC5G,QAAM,WAAW,OAAO,MAAM,aAAa,YAAY,MAAM,WAAW,MAAM,WAAW;AACzF,QAAM,SAAoB;AAAA,IACxB;AAAA,IACA,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA,aAAa,OAAO,MAAM,gBAAgB,WAAW,KAAK,cAAc,OAAO,MAAM,gBAAgB,WAAW,MAAM,cAAc;AAAA,IACpI,QAAQ,OAAO,MAAM,WAAW,WAAW,KAAK,SAAS,OAAO,MAAM,WAAW,WAAW,MAAM,SAAS;AAAA,IAC3G;AAAA,EACF;AACA,MAAI,OAAO,MAAM,SAAS,YAAY,MAAM,SAAS,QAAS,QAAO,OAAO;AAC5E,MAAI,QAAQ,OAAO,SAAS,SAAU,CAAC,OAAmC,UAAU;AACpF,aAAW,KAAK,uBAAuB;AACrC,QAAI,MAAM,CAAC,MAAM,OAAW,CAAC,OAAmC,CAAC,IAAI,MAAM,CAAC;AAAA,aACnE,QAAQ,KAAK,CAAC,MAAM,OAAW,CAAC,OAAmC,CAAC,IAAI,KAAK,CAAC;AAAA,EACzF;AACA,SAAO;AACT;AAEA,SAAS,wBAAwB,GAAuC;AACtE,QAAM,MAAiB;AAAA,IACrB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,UAAU,OAAO,EAAE,aAAa,WAAW,EAAE,WAAW;AAAA,IACxD,OAAO,OAAO,EAAE,UAAU,WAAW,EAAE,QAAS,OAAQ,EAAwB,SAAS,WAAY,EAAuB,OAAO;AAAA,IACnI,aAAa,OAAO,EAAE,gBAAgB,WAAW,EAAE,cAAc;AAAA,IACjE,QAAQ,OAAO,EAAE,WAAW,WAAW,EAAE,SAAS;AAAA,IAClD,SACE,OAAO,EAAE,YAAY,WAAW,EAAE,UAAU,OAAO,EAAE,aAAa,WAAW,EAAE,WAAW;AAAA,EAC9F;AACA,SAAO,KAAK,CAAC,EAAE,QAAQ,CAAC,MAAM;AAC5B,QAAI,CAAC,CAAC,MAAM,QAAQ,YAAY,SAAS,QAAQ,eAAe,UAAU,WAAW,YAAY,WAAW,WAAW,EAAE,SAAS,CAAC,GAAG;AACpI,MAAC,IAAgC,CAAC,IAAI,EAAE,CAAC;AAAA,IAC3C;AAAA,EACF,CAAC;AACD,SAAO;AACT;AAEA,SAAS,mBAAmB,GAA8C;AACxE,QAAM,KAAK,OAAO,EAAE,OAAO,YAAY,EAAE,KAAK,EAAE,KAAK;AACrD,QAAM,OAAO,EAAE,SAAS,UAAU,UAAU;AAC5C,QAAM,WAAW,OAAO,EAAE,aAAa,YAAY,EAAE,WAAW,EAAE,WAAW;AAC7E,QAAM,OAAO,EAAE;AACf,QAAM,SAAoB;AAAA,IACxB;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO,OAAO,EAAE,UAAU,WAAW,EAAE,QAAS,OAAO,EAAE,SAAS,WAAW,EAAE,OAAO;AAAA,IACtF,aACE,OAAO,EAAE,gBAAgB,WACrB,EAAE,cACF,OAAO,MAAM,gBAAgB,WAC3B,KAAK,cACL;AAAA,IACR,QACE,OAAO,EAAE,WAAW,WAChB,EAAE,SACF,OAAO,MAAM,WAAW,WACtB,KAAK,SACL;AAAA,IACR,SAAS,OAAO,EAAE,YAAY,WAAW,EAAE,UAAW,OAAO,EAAE,aAAa,WAAW,EAAE,WAAW;AAAA,EACtG;AACA,SAAO,KAAK,CAAC,EAAE,QAAQ,CAAC,MAAM;AAC5B,QAAI,CAAC,CAAC,MAAM,QAAQ,YAAY,SAAS,QAAQ,eAAe,UAAU,WAAW,UAAU,EAAE,SAAS,CAAC,GAAG;AAC5G,MAAC,OAAmC,CAAC,IAAI,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF,CAAC;AACD,SAAO;AACT;;;AC5JA,SAAS,cAAc,kBAAkB;AACzC,SAAS,SAAS,iBAAiB;AAU5B,SAAS,cAAc,KAAuB;AACnD,MAAI,QAAQ,QAAQ,QAAQ,OAAW,QAAO;AAC9C,MAAI,OAAO,QAAQ,UAAU;AAC3B,UAAM,IAAI,IAAI,MAAM,eAAe;AACnC,WAAO,IAAK,QAAQ,IAAI,EAAE,CAAC,CAAC,KAAK,MAAO;AAAA,EAC1C;AACA,MAAI,MAAM,QAAQ,GAAG,EAAG,QAAO,IAAI,IAAI,aAAa;AACpD,MAAI,OAAO,QAAQ,UAAU;AAC3B,UAAM,MAA+B,CAAC;AACtC,eAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,GAAG,EAAG,KAAI,CAAC,IAAI,cAAc,CAAC;AAClE,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAMO,SAAS,aACd,SACA,UAAgC,CAAC,GACxB;AACT,QAAM,EAAE,eAAe,QAAQ,KAAK,IAAI;AACxC,QAAM,SAAS,UAAU,OAAO;AAChC,QAAM,MAAM,QAAQ;AACpB,MAAI,OAAO,KAAM,QAAO;AACxB,SAAO,QAAQ,cAAc,GAAG,IAAI;AACtC;AAQO,SAAS,cACd,UACA,UAAgC,CAAC,GACjB;AAChB,MAAI,OAAO,aAAa,YAAY,SAAS,KAAK,EAAE,WAAW,GAAG;AAChE,UAAM,IAAI,MAAM,yDAAyD;AAAA,EAC3E;AACA,MAAI,CAAC,WAAW,QAAQ,EAAG,QAAO;AAClC,MAAI;AACJ,MAAI;AACF,UAAM,aAAa,UAAU,MAAM;AAAA,EACrC,SAAS,GAAG;AACV,UAAM,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC;AACrD,UAAM,IAAI,MAAM,yCAAyC,QAAQ,KAAK,GAAG,IAAI,EAAE,OAAO,EAAE,CAAC;AAAA,EAC3F;AACA,MAAI;AACF,UAAM,MAAM,aAAa,KAAK,OAAO;AACrC,WAAO,OAAO;AAAA,EAChB,SAAS,GAAG;AACV,UAAM,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC;AACrD,UAAM,IAAI,MAAM,0CAA0C,QAAQ,KAAK,GAAG,IAAI,EAAE,OAAO,EAAE,CAAC;AAAA,EAC5F;AACF;;;ACrEA,OAAO,YAAY;AAYnB,SAAS,UAAU,QAA2B;AAC5C,QAAM,MAAM,OAAO,UAAU,QAAQ,IAAI,kBAAkB;AAC3D,MAAI,CAAC,IAAK,OAAM,IAAI,MAAM,qEAAqE;AAC/F,SAAO;AACT;AAEA,SAAS,0BAA0B,QAAyD;AAC1F,QAAM,OAA6C,EAAE,QAAQ,UAAU,MAAM,EAAE;AAC/E,MAAI,OAAO,OAAO,YAAY,YAAY,OAAO,QAAS,MAAK,UAAU,OAAO;AAChF,SAAO;AACT;AAEA,SAAS,iBACP,GACoD;AACpD,MAAI,EAAE,SAAS;AACb,WAAO,EAAE,MAAM,QAAQ,SAAS,EAAE,SAAS,cAAc,EAAE,aAAa;AAC1E,MAAI,EAAE,SAAS,eAAe,gBAAgB,KAAK,EAAE,YAAY,QAAQ;AACvE,WAAO;AAAA,MACL,MAAM;AAAA,MACN,SAAS,EAAE,WAAW;AAAA,MACtB,YAAY,EAAE,WAAW,IAAI,CAAC,QAAQ;AAAA,QACpC,IAAI,GAAG;AAAA,QACP,MAAM;AAAA,QACN,UAAU,EAAE,MAAM,GAAG,SAAS,MAAM,WAAW,GAAG,SAAS,UAAU;AAAA,MACvE,EAAE;AAAA,IACJ;AAAA,EACF;AACA,SAAO,EAAE,MAAM,EAAE,MAAM,SAAU,EAAkB,QAAQ;AAC7D;AAEO,SAAS,uBAAuB,QAA+B;AACpE,QAAM,SAAS,IAAI,OAAO,0BAA0B,MAAM,CAAC;AAC3D,QAAM,QAAQ,OAAO,SAAS,QAAQ,IAAI,gBAAgB;AAC1D,QAAM,cAAc,OAAO,eAAe;AAE1C,SAAO;AAAA,IACL,IAAI,OAAO;AAAA,IACX,MAAM;AAAA,IACN,MAAM,KAAK,UAA8C;AACvD,YAAM,OAAO,MAAM,OAAO,KAAK,YAAY,OAAO;AAAA,QAChD;AAAA,QACA;AAAA,QACA,UAAU,SAAS,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,SAAS,EAAE,QAAQ,EAAE;AAAA,MACtE,CAAC;AACD,YAAM,UAAU,KAAK,QAAQ,CAAC,GAAG,SAAS,WAAW;AACrD,YAAM,QAAQ,KAAK,QACf,EAAE,cAAc,KAAK,MAAM,eAAe,kBAAkB,KAAK,MAAM,kBAAkB,IACzF;AACJ,aAAO,EAAE,SAAS,MAAM;AAAA,IAC1B;AAAA,IACA,MAAM,cACJ,UACA,OACA,UAC8B;AAC9B,YAAM,OAAO,MAAM,OAAO,KAAK,YAAY,OAAO;AAAA,QAChD;AAAA,QACA;AAAA,QACA,UAAU,SAAS,IAAI,gBAAgB;AAAA,QACvC,OAAO,MAAM,IAAI,CAAC,OAAO;AAAA,UACvB,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,EAAE,SAAS;AAAA,YACjB,aAAa,EAAE,SAAS;AAAA,YACxB,YAAa,EAAE,SAAS,cAAc;AAAA,UACxC;AAAA,QACF,EAAE;AAAA,MACJ,CAAC;AACD,YAAM,MAAM,KAAK,QAAQ,CAAC,GAAG;AAC7B,YAAM,QAAQ,KAAK,QACf,EAAE,cAAc,KAAK,MAAM,eAAe,kBAAkB,KAAK,MAAM,kBAAkB,IACzF;AACJ,aAAO;AAAA,QACL,SAAS;AAAA,UACP,MAAM;AAAA,UACN,SAAS,KAAK,WAAW;AAAA,UACzB,YAAY,KAAK,YAAY,IAAI,CAAC,QAAQ;AAAA,YACxC,IAAI,GAAG;AAAA,YACP,MAAM;AAAA,YACN,UAAU;AAAA,cACR,MAAM,GAAG,UAAU,QAAQ;AAAA,cAC3B,WAAW,GAAG,UAAU,aAAa;AAAA,YACvC;AAAA,UACF,EAAE;AAAA,QACJ;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAEO,SAAS,wBAAwB,QAA+B;AACrE,QAAM,SAAS,IAAI,OAAO,0BAA0B,MAAM,CAAC;AAC3D,QAAM,QAAS,OAAO,SAAoB;AAE1C,SAAO;AAAA,IACL,IAAI,OAAO;AAAA,IACX,MAAM;AAAA,IACN,MAAM,OAA4B;AAChC,YAAM,IAAI,MAAM,+DAA+D;AAAA,IACjF;AAAA,IACA,MAAM,cAAc,SAA8E;AAChG,YAAM,OAAO,MAAM,OAAO,OAAO,SAAS;AAAA,QACxC;AAAA,QACA,QAAQ,QAAQ;AAAA,QAChB,MAAO,QAAQ,QAAoD;AAAA,QACnE,GAAG,QAAQ,KAAK;AAAA,QAChB,iBAAiB;AAAA,MACnB,CAAC;AACD,YAAM,MAAM,KAAK,OAAO,CAAC,GAAG,OAAO;AACnC,aAAO,EAAE,IAAI;AAAA,IACf;AAAA,EACF;AACF;AAEO,SAAS,mBAAmB,QAA+B;AAChE,MAAI,OAAO,SAAS,QAAS,QAAO,wBAAwB,MAAM;AAClE,SAAO,uBAAuB,MAAM;AACtC;;;ACjIA,IAAM,oBAAoB;AAE1B,SAAS,mBAAmB,QAA+B;AACzD,SAAO,mBAAmB,MAAM;AAClC;AAEA,IAAM,YAA+D;AAAA,EACnE,QAAQ;AAAA,EACR,CAAC,iBAAiB,GAAG;AACvB;AAEO,SAAS,aAAa,QAA+B;AAC1D,QAAM,KAAK,OAAO,YAAY,IAAI,YAAY;AAC9C,QAAM,KAAK,UAAU,CAAC;AACtB,MAAI,CAAC,IAAI;AACP,UAAM,YAAY,CAAC,GAAG,oBAAI,IAAI,CAAC,GAAG,OAAO,KAAK,SAAS,GAAG,qBAAqB,CAAC,CAAC,EAAE,KAAK,EAAE,KAAK,IAAI;AACnG,UAAM,IAAI;AAAA,MACR,6BAA6B,OAAO,QAAQ,gBAAgB,SAAS;AAAA,IACvE;AAAA,EACF;AACA,SAAO,GAAG,MAAM;AAClB;AAEO,SAAS,iBAAiB,MAAc,SAAkD;AAC/F,YAAU,KAAK,YAAY,CAAC,IAAI;AAClC;;;AChBO,SAAS,kBAAkB,SAAiD;AACjF,QAAM,EAAE,WAAW,QAAQ,IAAI,gBAAgB,QAAQ,UAAU;AACjE,QAAM,MAAM,oBAAI,IAAwB;AAExC,aAAW,UAAU,SAAS;AAC5B,QAAI;AACF,YAAM,SAAS,aAAa,MAAM;AAClC,UAAI,IAAI,OAAO,IAAI,MAAM;AAAA,IAC3B,SAAS,KAAK;AACZ,cAAQ,KAAK,yBAAyB,OAAO,EAAE,MAAM,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAAA,IACzG;AAAA,EACF;AAEA,SAAO;AAAA,IACL,IAAI,IAAoC;AACtC,aAAO,IAAI,IAAI,EAAE;AAAA,IACnB;AAAA,IACA,YAAgC;AAC9B,UAAI,IAAI,IAAI,SAAS,EAAG,QAAO;AAC/B,aAAO,IAAI,OAAO,IAAI,CAAC,GAAG,IAAI,KAAK,CAAC,EAAE,CAAC,IAAI;AAAA,IAC7C;AAAA,IACA,MAAgB;AACd,aAAO,CAAC,GAAG,IAAI,KAAK,CAAC;AAAA,IACvB;AAAA,EACF;AACF;;;AC/BA,IAAM,uBAAuB,oBAAI,IAA8B;AAMxD,SAAS,0BAA0B,cAAsB,SAAiC;AAC/F,uBAAqB,IAAI,aAAa,YAAY,GAAG,OAAO;AAC9D;AAKO,SAAS,oBAAoB,cAAoD;AACtF,SAAO,qBAAqB,IAAI,aAAa,YAAY,CAAC;AAC5D;;;AClBA,SAAS,kBAAkB;AAK3B,IAAM,gBAAgB;AAWtB,SAAS,uBAAuB,GAAY,SAAwB;AAClE,MAAI,aAAa,MAAO,QAAO,IAAI,MAAM,GAAG,OAAO,KAAK,EAAE,OAAO,IAAI,EAAE,OAAO,EAAE,CAAC;AACjF,SAAO,IAAI,MAAM,GAAG,OAAO,KAAK,OAAO,CAAC,CAAC,EAAE;AAC7C;AAMO,SAAS,6BACd,SACe;AACf,QAAM,EAAE,YAAY,UAAU,UAAU,IAAI;AAC5C,MAAI;AACJ,MAAI;AACJ,MAAI;AACF,UAAM,SAAS,gBAAgB,cAAc,IAAI;AACjD,gBAAY,OAAO;AACnB,cAAU,OAAO;AAAA,EACnB,SAAS,GAAG;AACV,UAAM,uBAAuB,GAAG,wCAAwC;AAAA,EAC1E;AACA,QAAM,gBAAgB,QAAQ,KAAK,CAAC,MAAM,EAAE,OAAO,SAAS,KAAK,QAAQ,CAAC;AAE1E,MAAI,CAAC,eAAe;AAClB,UAAMA,SACJ,YAAY,QAAQ,IAAI,gBAAgB;AAC1C,UAAMC,UAAS,aAAa,QAAQ,IAAI;AACxC,QAAI;AACF,aAAO,IAAI,WAAW;AAAA,QACpB,OAAAD;AAAA,QACA,aAAa;AAAA,QACb,GAAIC,UAAS,EAAE,QAAAA,QAAO,IAAI,CAAC;AAAA,MAC7B,CAAC;AAAA,IACH,SAAS,GAAG;AACV,YAAM,uBAAuB,GAAG,gDAAgD;AAAA,IAClF;AAAA,EACF;AAEA,QAAM,WAAY,cAAwC,YAAY;AACtE,QAAM,mBAAmB,oBAAoB,QAAQ;AACrD,MAAI,kBAAkB;AACpB,UAAM,SAAS;AAAA,MACb,GAAG;AAAA,MACH,OAAO,YAAY,cAAc;AAAA,MACjC,aACE,OAAO,cAAc,gBAAgB,WACjC,cAAc,cACd;AAAA,IACR;AACA,QAAI;AACF,aAAO,iBAAiB,MAAM;AAAA,IAChC,SAAS,GAAG;AACV,YAAM,uBAAuB,GAAG,uDAAuD,QAAQ,GAAG;AAAA,IACpG;AAAA,EACF;AAEA,QAAM,QACJ,YACA,eAAe,SACf,QAAQ,IAAI,gBACZ;AAEF,MAAI,SACF,aAAa,eAAe,UAAU,QAAQ,IAAI;AACpD,MAAI,UAAU,eAAe;AAE7B,MAAI,WAAW,CAAC,QAAQ,QAAQ,OAAO,EAAE,EAAE,SAAS,KAAK,GAAG;AAC1D,cAAU,QAAQ,QAAQ,OAAO,EAAE,IAAI;AAAA,EACzC;AAEA,MAAI,WAAW,WAAW,QAAW;AACnC,aAAS;AAAA,EACX;AAEA,QAAM,cACJ,OAAO,eAAe,gBAAgB,WAAW,cAAc,cAAc;AAE/E,QAAM,qBAAkE;AAAA,IACtE;AAAA,IACA;AAAA,IACA,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,IAC3B,GAAI,UAAU,EAAE,eAAe,EAAE,QAAQ,EAAE,IAAI,CAAC;AAAA,EAClD;AAEA,MAAI;AACF,WAAO,IAAI,WAAW,kBAAkB;AAAA,EAC1C,SAAS,GAAG;AACV,UAAM,uBAAuB,GAAG,oDAAoD;AAAA,EACtF;AACF;;;AC3GA,SAAS,gBAAgB;;;ACDzB,SAAS,aAAa,gBAAAC,eAAc,cAAAC,mBAAkB;AACtD,SAAS,YAAY;AAMrB,IAAM,iBAAiB,oBAAI,IAAY;AAEvC,SAAS,eAAe,GAAgC;AACtD,MAAI,KAAK,QAAQ,OAAO,MAAM,SAAU,QAAO;AAC/C,QAAM,IAAI;AACV,MAAI,OAAO,EAAE,iBAAiB,SAAU,QAAO;AAC/C,QAAM,cAAc,OAAO,EAAE,aAAa;AAC1C,QAAM,eAAe,OAAO,EAAE,iBAAiB,cAAc,OAAO,EAAE,oBAAoB;AAC1F,SAAO,eAAe;AACxB;AAEA,SAAS,kBAAkB,KAA0B;AACnD,MAAI,OAAO,IAAI,iBAAiB,cAAc,OAAO,IAAI,oBAAoB,YAAY;AACvF,qBAAiB,IAAI,cAAc,IAAI,YAAY;AACnD,8BAA0B,IAAI,cAAc,IAAI,eAAmC;AACnF;AAAA,EACF;AACA,MAAI,OAAO,IAAI,aAAa,YAAY;AACtC,QAAI,SAAS;AAAA,EACf;AACF;AAEA,SAAS,uBAAuB,GAAkD;AAChF,MAAI,eAAe,CAAC,EAAG,QAAO;AAC9B,MAAI,OAAO,EAAE,oBAAoB,YAAY;AAC3C,UAAM,MAAM,EAAE,gBAAgB;AAC9B,WAAO,eAAe,GAAG,IAAI,MAAM;AAAA,EACrC;AACA,MAAI,eAAe,EAAE,OAAO,EAAG,QAAO,EAAE;AACxC,SAAO;AACT;AAKO,SAAS,4BAA4B,OAAqC;AAC/E,MAAI,SAAS,KAAM,QAAO,CAAC;AAC3B,QAAM,WAAW,MAAM,QAAQ,KAAK,IAAI,QAAQ,CAAC,KAAK;AACtD,SAAO,SAAS;AAAA,IACd,CAAC,MAAmB,OAAO,MAAM,YAAY,OAAO,CAAC,EAAE,KAAK,EAAE,SAAS;AAAA,EACzE;AACF;AAQA,SAAS,wBAAwB,SAAgC;AAC/D,MAAI,CAACC,YAAW,OAAO,EAAG,QAAO;AACjC,MAAI;AACF,UAAM,MAAMC,cAAa,SAAS,OAAO;AACzC,UAAM,MAAM,KAAK,MAAM,GAAG;AAC1B,UAAM,WACJ,IAAI,qBAAqB,QACxB,MAAM,QAAQ,IAAI,QAAQ,KAAK,IAAI,SAAS,SAAS,oBAAoB;AAC5E,WAAO,YAAY,OAAO,IAAI,SAAS,WAAW,IAAI,OAAO;AAAA,EAC/D,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEO,SAAS,sBAAsB,MAAc,QAAQ,IAAI,GAAa;AAC3E,QAAM,MAAM,OAAO,QAAQ,YAAY,IAAI,KAAK,EAAE,SAAS,IAAI,MAAM,QAAQ,IAAI;AACjF,QAAM,cAAc,KAAK,KAAK,cAAc;AAC5C,MAAI,CAACD,YAAW,WAAW,EAAG,QAAO,CAAC;AACtC,QAAM,QAAkB,CAAC;AACzB,QAAM,OAAO,oBAAI,IAAY;AAC7B,MAAI;AACF,UAAM,UAAU,YAAY,aAAa,EAAE,eAAe,KAAK,CAAC;AAChE,eAAW,KAAK,SAAS;AACvB,UAAI,EAAE,KAAK,WAAW,GAAG,KAAK,EAAE,SAAS,OAAQ;AACjD,UAAI,EAAE,KAAK,WAAW,GAAG,KAAK,EAAE,YAAY,GAAG;AAC7C,cAAM,YAAY,KAAK,aAAa,EAAE,IAAI;AAC1C,cAAM,eAAe,YAAY,WAAW,EAAE,eAAe,KAAK,CAAC;AACnE,mBAAW,MAAM,cAAc;AAC7B,cAAI,CAAC,GAAG,YAAY,EAAG;AACvB,gBAAME,QAAO,wBAAwB,KAAK,WAAW,GAAG,MAAM,cAAc,CAAC;AAC7E,cAAIA,SAAQ,CAAC,KAAK,IAAIA,KAAI,GAAG;AAC3B,iBAAK,IAAIA,KAAI;AACb,kBAAM,KAAKA,KAAI;AAAA,UACjB;AAAA,QACF;AACA;AAAA,MACF;AACA,YAAM,OAAO,wBAAwB,KAAK,aAAa,EAAE,MAAM,cAAc,CAAC;AAC9E,UAAI,QAAQ,CAAC,KAAK,IAAI,IAAI,GAAG;AAC3B,aAAK,IAAI,IAAI;AACb,cAAM,KAAK,IAAI;AAAA,MACjB;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AACA,SAAO;AACT;AAQA,eAAsB,kBACpB,mBACe;AACf,QAAM,WAAW,MAAM,QAAQ,iBAAiB,IAC5C,kBAAkB,OAAO,CAAC,MAAmB,OAAO,MAAM,YAAY,OAAO,CAAC,EAAE,KAAK,EAAE,SAAS,CAAC,IACjG,CAAC;AACL,aAAW,OAAO,UAAU;AAC1B,QAAI,eAAe,IAAI,GAAG,EAAG;AAC7B,mBAAe,IAAI,GAAG;AACtB,QAAI;AACF,YAAM,IAAI,MAAM;AAAA;AAAA,QAA0B;AAAA;AAC1C,YAAM,MAAM,uBAAuB,CAAC;AACpC,UAAI,KAAK;AACP,0BAAkB,GAAG;AACrB;AAAA,MACF;AACA,UAAI,OAAQ,EAA4C,yBAAyB,YAAY;AAC3F,QAAC,EAA2C,qBAAqB;AAAA,MACnE;AAAA,IACF,SAAS,GAAG;AAEV,YAAM,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC;AACrD,UAAI,OAAO,YAAY,eAAe,QAAQ,aAAa;AACzD,gBAAQ,YAAY,yCAAyC,GAAG,MAAM,GAAG,IAAI,EAAE,MAAM,2BAA2B,CAAC;AAAA,MACnH;AAAA,IACF;AAAA,EACF;AACF;AAOA,eAAsB,yBAAyB,MAAc,QAAQ,IAAI,GAAsB;AAC7F,QAAM,MAAM,OAAO,QAAQ,YAAY,IAAI,KAAK,EAAE,SAAS,IAAI,MAAM,QAAQ,IAAI;AACjF,QAAM,QAAQ,sBAAsB,GAAG;AACvC,QAAM,kBAAkB,KAAK;AAC7B,SAAO;AACT;;;AD/IO,IAAM,sBAAsB;AAG5B,SAAS,qBAAqB,MAAmF;AACtH,MAAI,OAAO,SAAS,YAAY,CAAC,KAAK,WAAW,mBAAmB,EAAG,QAAO;AAC9E,QAAM,OAAO,KAAK,MAAM,oBAAoB,MAAM;AAClD,QAAM,UAAU,KAAK,QAAQ,GAAG;AAChC,QAAM,aAAa,WAAW,IAAI,KAAK,MAAM,GAAG,OAAO,EAAE,KAAK,IAAI,KAAK,KAAK;AAC5E,QAAM,mBAAmB,WAAW,IAAI,KAAK,MAAM,UAAU,CAAC,EAAE,KAAK,IAAI;AAEzE,QAAM,eAAe,WAAW,MAAM,WAAW;AACjD,QAAM,cAAc,eAAe,WAAW,MAAM,GAAG,CAAC,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,IAAI;AACzF,QAAM,UAAU,eAAe,aAAa,CAAC,IAAI;AACjD,MAAI,CAAC,YAAa,QAAO;AACzB,SAAO,EAAE,aAAa,SAAS,UAAU,oBAAoB,OAAU;AACzE;AAYO,SAAS,kBAAkB,MAA+B;AAC/D,SAAO,OAAO,SAAS,YAAY,KAAK,WAAW,mBAAmB;AACxE;AAEA,SAAS,sBAAsB,KAAuB;AACpD,QAAM,MAAM,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC3D,SACE,IAAI,SAAS,oBAAoB,KACjC,IAAI,SAAS,qBAAqB,KAClC,IAAI,SAAS,kBAAkB,KAC/B,IAAI,SAAS,sBAAsB;AAEvC;AAMA,eAAsB,0BACpB,aACA,UAA4C,CAAC,GAC9B;AACf,MAAI,OAAO,gBAAgB,YAAY,YAAY,KAAK,EAAE,WAAW,GAAG;AACtE,UAAM,IAAI,MAAM,wEAAwE;AAAA,EAC1F;AACA,QAAM,MAAM,QAAQ,OAAO,QAAQ,IAAI;AACvC,QAAM,UAAU,QAAQ;AACxB,MAAI;AACF,UAAM;AAAA;AAAA,MAA0B;AAAA;AAChC;AAAA,EACF,SAAS,KAAK;AACZ,QAAI,CAAC,sBAAsB,GAAG,EAAG,OAAM;AAAA,EACzC;AACA,QAAM,cAAc,UAAU,GAAG,WAAW,IAAI,OAAO,KAAK;AAC5D,MAAI;AACF,aAAS,eAAe,WAAW,IAAI;AAAA,MACrC;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,CAAC;AAAA,EACH,SAAS,GAAG;AACV,UAAM,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC;AACrD,UAAM,IAAI,MAAM,qCAAqC,WAAW,KAAK,GAAG,IAAI,EAAE,OAAO,EAAE,CAAC;AAAA,EAC1F;AACF;AAkBA,SAASC,wBAAuB,GAAY,SAAwB;AAClE,MAAI,aAAa,OAAO;AACtB,WAAO,IAAI,MAAM,GAAG,OAAO,KAAK,EAAE,OAAO,IAAI,EAAE,OAAO,EAAE,CAAC;AAAA,EAC3D;AACA,SAAO,IAAI,MAAM,GAAG,OAAO,KAAK,OAAO,CAAC,CAAC,EAAE;AAC7C;AAEA,eAAsB,mBACpB,MACA,UAAqC,CAAC,GACd;AACxB,MAAI,OAAO,SAAS,YAAY,KAAK,KAAK,EAAE,WAAW,EAAG,QAAO;AACjE,QAAM,SAAS,qBAAqB,IAAI;AACxC,MAAI,CAAC,OAAQ,QAAO;AACpB,QAAM,EAAE,aAAa,SAAS,UAAU,iBAAiB,IAAI;AAE7D,QAAM,sBAAsB,QAAQ,wBAAwB;AAC5D,QAAM,MAAM,QAAQ,OAAO,QAAQ,IAAI;AAEvC,QAAM,OAAO,YAA2B;AACtC,UAAM,kBAAkB,CAAC,WAAW,CAAC;AAAA,EACvC;AAEA,MAAI;AACF,UAAM,KAAK;AAAA,EACb,SAAS,KAAK;AACZ,QAAI,uBAAuB,sBAAsB,GAAG,GAAG;AACrD,UAAI;AACF,cAAM,0BAA0B,aAAa,EAAE,SAAS,IAAI,CAAC;AAC7D,cAAM,KAAK;AAAA,MACb,SAAS,YAAY;AACnB,cAAMA,wBAAuB,YAAY,sDAAsD,WAAW,GAAG;AAAA,MAC/G;AAAA,IACF,OAAO;AACL,YAAMA,wBAAuB,KAAK,2CAA2C,WAAW,GAAG;AAAA,IAC7F;AAAA,EACF;AAEA,MAAI,iBAAkB,QAAO;AAE7B,MAAI;AACF,UAAM,IAAI,MAAM;AAAA;AAAA,MAA0B;AAAA;AAC1C,QACE,OAAQ,EAAgD,2BAA2B,YACnF;AACA,aAAQ,EAA+C,uBAAuB;AAAA,IAChF;AAAA,EACF,QAAQ;AAAA,EAER;AACA,QAAM,IAAI;AAAA,IACR,iBAAiB,IAAI,qGAAqG,WAAW;AAAA,EACvI;AACF;AAQA,eAAsB,yBACpB,YACA,UAA2C,CAAC,GAC1B;AAClB,MAAI,cAAc,KAAM,QAAO;AAC/B,MAAI,MAAM,QAAQ,UAAU,GAAG;AAC7B,UAAM,MAAiB,CAAC;AACxB,aAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;AAC1C,UAAI;AACF,YAAI,KAAK,MAAM,yBAAyB,WAAW,CAAC,GAAG,OAAO,CAAC;AAAA,MACjE,SAAS,GAAG;AACV,cAAMA,wBAAuB,GAAG,qDAAqD,CAAC,EAAE;AAAA,MAC1F;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACA,MAAI,OAAO,eAAe,UAAU;AAClC,UAAM,MAA+B,CAAC;AACtC,eAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,UAAU,GAAG;AAC/C,UAAI,MAAM,cAAc,kBAAkB,CAAC,GAAG;AAC5C,YAAI;AACF,gBAAM,WAAW,MAAM,mBAAmB,GAAa,OAAO;AAC9D,cAAI,CAAC,IAAI,YAAY;AAAA,QACvB,SAAS,GAAG;AACV,gBAAMA,wBAAuB,GAAG,0CAA0C,OAAO,CAAC,CAAC,GAAG;AAAA,QACxF;AACA;AAAA,MACF;AACA,UAAI;AACF,YAAI,CAAC,IAAI,MAAM,yBAAyB,GAAG,OAAO;AAAA,MACpD,SAAS,GAAG;AACV,cAAMA,wBAAuB,GAAG,iDAAiD,CAAC,GAAG;AAAA,MACvF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAWA,eAAsB,oCACpB,SACwB;AACxB,MAAI;AACF,UAAM,EAAE,qBAAqB,KAAK,GAAG,KAAK,IAAI;AAC9C,UAAM,kBAAkB,MAAM,yBAAyB,QAAQ,cAAc,MAAM;AAAA,MACjF;AAAA,MACA;AAAA,IACF,CAAC;AACD,WAAO,6BAA6B;AAAA,MAClC,GAAG;AAAA,MACH,YAAY;AAAA,IACd,CAAC;AAAA,EACH,SAAS,GAAG;AACV,UAAMA,wBAAuB,GAAG,uDAAuD;AAAA,EACzF;AACF;;;AE9NA,SAAS,QAAAC,aAAY;AACrB,SAAS,cAAAC,mBAAkB;AAY3B,SAAS,2BAAmC;AAC1C,QAAM,MAAM,QAAQ,IAAI;AACxB,MAAIC,YAAWC,MAAK,KAAK,UAAU,CAAC,EAAG,QAAOA,MAAK,KAAK,UAAU;AAClE,MAAID,YAAWC,MAAK,KAAK,UAAU,UAAU,CAAC,EAAG,QAAOA,MAAK,KAAK,UAAU,UAAU;AACtF,QAAM,eAAeA,MAAK,KAAK,MAAM,UAAU,UAAU;AACzD,MAAID,YAAW,YAAY,EAAG,QAAO;AACrC,SAAOC,MAAK,KAAK,UAAU,UAAU;AACvC;AAUA,SAAS,uBACP,qBACuB;AACvB,MAAI,uBAAuB,KAAM,QAAO,CAAC;AACzC,MAAI,OAAO,wBAAwB,SAAU,QAAO,EAAE,YAAY,oBAAoB;AACtF,SAAO;AACT;AAEA,SAASC,wBAAuB,GAAY,SAAwB;AAClE,MAAI,aAAa,MAAO,QAAO,IAAI,MAAM,GAAG,OAAO,KAAK,EAAE,OAAO,IAAI,EAAE,OAAO,EAAE,CAAC;AACjF,SAAO,IAAI,MAAM,GAAG,OAAO,KAAK,OAAO,CAAC,CAAC,EAAE;AAC7C;AA4BA,eAAsB,eACpB,qBAC0D;AAC1D,MAAI;AACF,UAAM,UAAU,uBAAuB,mBAAmB;AAC1D,UAAM,aAAa,QAAQ,cAAc,yBAAyB;AAClE,UAAM,aAAa,cAAc,UAAU;AAC3C,QAAI,cAAc,MAAM;AACtB,YAAM,IAAI,MAAM,oBAAoB,UAAU,wDAAwD;AAAA,IACxG;AACA,WAAO,MAAM,oCAAoC;AAAA,MAC/C;AAAA,MACA,qBAAqB,QAAQ,wBAAwB;AAAA,MACrD,KAAK,QAAQ,IAAI;AAAA,IACnB,CAAC;AAAA,EACH,SAAS,GAAG;AACV,QAAI,aAAa,SAAS,EAAE,QAAQ,SAAS,eAAe,EAAG,OAAM;AACrE,UAAMC,wBAAuB,GAAG,kCAAkC;AAAA,EACpE;AACF;;;AC6DO,IAAM,2BAA2B;","names":["model","apiKey","readFileSync","existsSync","existsSync","readFileSync","name","normalizeAgentLlMError","join","existsSync","existsSync","join","normalizeAgentLlMError","normalizeAgentLlMError"]}
package/dist/cli.js CHANGED
@@ -2,7 +2,7 @@
2
2
  import {
3
3
  createChatModelFromLlmConfig,
4
4
  loadLlmConfig
5
- } from "./chunk-UZOGGJK7.js";
5
+ } from "./chunk-QKN7FTFV.js";
6
6
 
7
7
  // src/cli.ts
8
8
  import { join } from "path";
@@ -2,19 +2,14 @@ import { createChatModelFromLlmConfig } from "./llmAdapter.js";
2
2
  export interface CreateAgentLlMOptions {
3
3
  /** Path to YAML config file. If omitted, uses llm.yaml in cwd or config/llm.yaml in cwd/parent. */
4
4
  configPath?: string;
5
- }
6
- export interface CreateAgentLlMAsyncOptions extends CreateAgentLlMOptions {
7
5
  /** If true (default), run npm install when provider is npm:<package> and package is not found. */
8
6
  installNpmIfMissing?: boolean;
9
7
  }
10
8
  /**
11
- * Create a LangChain-formatted LLM from config.
12
- * Pass configPath to use a specific YAML file; otherwise uses llm.yaml (cwd) or config/llm.yaml (cwd/parent).
13
- */
14
- export declare function createAgentLlM(options?: CreateAgentLlMOptions): import("@langchain/core/language_models/chat_models").BaseChatModel<import("@langchain/core/language_models/chat_models").BaseChatModelCallOptions, import("@langchain/core/messages").AIMessageChunk>;
15
- /**
16
- * Create a LangChain-formatted LLM from config, resolving npm: providers and installing packages if missing.
17
- * Use when your config has provider: "npm:wallee-llm" or similar.
9
+ * Create a LangChain-formatted LLM from config. Resolves npm: providers and optionally installs packages.
10
+ * - createAgentLlM() uses llm.yaml (cwd) or config/llm.yaml
11
+ * - createAgentLlM("path/to/llm.yaml") — use specific config file
12
+ * - createAgentLlM({ configPath: "...", installNpmIfMissing: true }) — options object
18
13
  */
19
- export declare function createAgentLlMAsync(options?: CreateAgentLlMAsyncOptions): Promise<ReturnType<typeof createChatModelFromLlmConfig>>;
14
+ export declare function createAgentLlM(configPathOrOptions?: string | CreateAgentLlMOptions): Promise<ReturnType<typeof createChatModelFromLlmConfig>>;
20
15
  //# sourceMappingURL=createAgentLlM.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"createAgentLlM.d.ts","sourceRoot":"","sources":["../src/createAgentLlM.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,4BAA4B,EAAE,MAAM,iBAAiB,CAAC;AAI/D,MAAM,WAAW,qBAAqB;IACpC,mGAAmG;IACnG,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,MAAM,WAAW,0BAA2B,SAAQ,qBAAqB;IACvE,kGAAkG;IAClG,mBAAmB,CAAC,EAAE,OAAO,CAAC;CAC/B;AAWD;;;GAGG;AACH,wBAAgB,cAAc,CAAC,OAAO,GAAE,qBAA0B,0MAOjE;AAED;;;GAGG;AACH,wBAAsB,mBAAmB,CACvC,OAAO,GAAE,0BAA+B,GACvC,OAAO,CAAC,UAAU,CAAC,OAAO,4BAA4B,CAAC,CAAC,CAW1D"}
1
+ {"version":3,"file":"createAgentLlM.d.ts","sourceRoot":"","sources":["../src/createAgentLlM.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,4BAA4B,EAAE,MAAM,iBAAiB,CAAC;AAI/D,MAAM,WAAW,qBAAqB;IACpC,mGAAmG;IACnG,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,kGAAkG;IAClG,mBAAmB,CAAC,EAAE,OAAO,CAAC;CAC/B;AAoDD;;;;;GAKG;AACH,wBAAsB,cAAc,CAClC,mBAAmB,CAAC,EAAE,MAAM,GAAG,qBAAqB,GACnD,OAAO,CAAC,UAAU,CAAC,OAAO,4BAA4B,CAAC,CAAC,CAiB1D"}
package/dist/index.d.ts CHANGED
@@ -15,8 +15,9 @@ export { createChatModelFromLlmConfig } from "./llmAdapter.js";
15
15
  export type { CreateChatModelFromLlmConfigOptions } from "./llmAdapter.js";
16
16
  export { NPM_PROTOCOL_PREFIX, parseNpmProviderSpec, isNpmProviderSpec, ensureNpmPackageInstalled, resolveNpmProvider, resolveLlmSectionWithNpm, createChatModelFromLlmConfigWithNpm, } from "./npmProviderProtocol.js";
17
17
  export type { EnsureNpmPackageInstalledOptions, ResolveNpmProviderOptions, ResolveLlmSectionWithNpmOptions, CreateChatModelFromLlmConfigWithNpmOptions, } from "./npmProviderProtocol.js";
18
- export { createAgentLlM, createAgentLlMAsync } from "./createAgentLlM.js";
19
- export type { CreateAgentLlMOptions, CreateAgentLlMAsyncOptions } from "./createAgentLlM.js";
20
- export { loadLLMExtensions, resolveLLMExtensionPackages } from "./loadLLMExtensions.js";
21
- export type { LLMType, LLMConfig, AgentConfigLlmSection, ChatMessage, ChatResult, ImageResult, ToolDefinition, ChatWithToolsMessage, ChatWithToolsResult, ILLMClient, ILLMRegistry, } from "./types.js";
18
+ export { createAgentLlM } from "./createAgentLlM.js";
19
+ export type { CreateAgentLlMOptions } from "./createAgentLlM.js";
20
+ export { loadLLMExtensions, resolveLLMExtensionPackages, discoverLLMExtensions, loadDiscoveredExtensions } from "./loadLLMExtensions.js";
21
+ export type { LLMType, LLMConfig, AgentConfigLlmSection, ChatMessage, ChatResult, ImageResult, ToolDefinition, ChatWithToolsMessage, ChatWithToolsResult, ILLMClient, ILLMRegistry, ILLMExtension, } from "./types.js";
22
+ export { AGENT_LLM_PROVIDER_FIELD } from "./types.js";
22
23
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,eAAe,EAAE,MAAM,aAAa,CAAC;AAC9C,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAC;AAChF,YAAY,EAAE,oBAAoB,EAAE,MAAM,oBAAoB,CAAC;AAC/D,OAAO,EAAE,iBAAiB,EAAE,MAAM,cAAc,CAAC;AACjD,YAAY,EAAE,wBAAwB,EAAE,MAAM,cAAc,CAAC;AAC7D,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAC;AACtE,OAAO,EAAE,yBAAyB,EAAE,mBAAmB,EAAE,MAAM,wBAAwB,CAAC;AACxF,OAAO,EAAE,kBAAkB,EAAE,sBAAsB,EAAE,uBAAuB,EAAE,MAAM,uBAAuB,CAAC;AAC5G,OAAO,EAAE,4BAA4B,EAAE,MAAM,iBAAiB,CAAC;AAC/D,YAAY,EAAE,mCAAmC,EAAE,MAAM,iBAAiB,CAAC;AAC3E,OAAO,EACL,mBAAmB,EACnB,oBAAoB,EACpB,iBAAiB,EACjB,yBAAyB,EACzB,kBAAkB,EAClB,wBAAwB,EACxB,mCAAmC,GACpC,MAAM,0BAA0B,CAAC;AAClC,YAAY,EACV,gCAAgC,EAChC,yBAAyB,EACzB,+BAA+B,EAC/B,0CAA0C,GAC3C,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,cAAc,EAAE,mBAAmB,EAAE,MAAM,qBAAqB,CAAC;AAC1E,YAAY,EAAE,qBAAqB,EAAE,0BAA0B,EAAE,MAAM,qBAAqB,CAAC;AAC7F,OAAO,EAAE,iBAAiB,EAAE,2BAA2B,EAAE,MAAM,wBAAwB,CAAC;AAExF,YAAY,EACV,OAAO,EACP,SAAS,EACT,qBAAqB,EACrB,WAAW,EACX,UAAU,EACV,WAAW,EACX,cAAc,EACd,oBAAoB,EACpB,mBAAmB,EACnB,UAAU,EACV,YAAY,GACb,MAAM,YAAY,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,eAAe,EAAE,MAAM,aAAa,CAAC;AAC9C,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAC;AAChF,YAAY,EAAE,oBAAoB,EAAE,MAAM,oBAAoB,CAAC;AAC/D,OAAO,EAAE,iBAAiB,EAAE,MAAM,cAAc,CAAC;AACjD,YAAY,EAAE,wBAAwB,EAAE,MAAM,cAAc,CAAC;AAC7D,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAC;AACtE,OAAO,EAAE,yBAAyB,EAAE,mBAAmB,EAAE,MAAM,wBAAwB,CAAC;AACxF,OAAO,EAAE,kBAAkB,EAAE,sBAAsB,EAAE,uBAAuB,EAAE,MAAM,uBAAuB,CAAC;AAC5G,OAAO,EAAE,4BAA4B,EAAE,MAAM,iBAAiB,CAAC;AAC/D,YAAY,EAAE,mCAAmC,EAAE,MAAM,iBAAiB,CAAC;AAC3E,OAAO,EACL,mBAAmB,EACnB,oBAAoB,EACpB,iBAAiB,EACjB,yBAAyB,EACzB,kBAAkB,EAClB,wBAAwB,EACxB,mCAAmC,GACpC,MAAM,0BAA0B,CAAC;AAClC,YAAY,EACV,gCAAgC,EAChC,yBAAyB,EACzB,+BAA+B,EAC/B,0CAA0C,GAC3C,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACrD,YAAY,EAAE,qBAAqB,EAAE,MAAM,qBAAqB,CAAC;AACjE,OAAO,EAAE,iBAAiB,EAAE,2BAA2B,EAAE,qBAAqB,EAAE,wBAAwB,EAAE,MAAM,wBAAwB,CAAC;AAEzI,YAAY,EACV,OAAO,EACP,SAAS,EACT,qBAAqB,EACrB,WAAW,EACX,UAAU,EACV,WAAW,EACX,cAAc,EACd,oBAAoB,EACpB,mBAAmB,EACnB,UAAU,EACV,YAAY,EACZ,aAAa,GACd,MAAM,YAAY,CAAC;AACpB,OAAO,EAAE,wBAAwB,EAAE,MAAM,YAAY,CAAC"}
package/dist/index.js CHANGED
@@ -1,7 +1,7 @@
1
1
  import {
2
+ AGENT_LLM_PROVIDER_FIELD,
2
3
  NPM_PROTOCOL_PREFIX,
3
4
  createAgentLlM,
4
- createAgentLlMAsync,
5
5
  createChatModelFromLlmConfig,
6
6
  createChatModelFromLlmConfigWithNpm,
7
7
  createClient,
@@ -9,9 +9,11 @@ import {
9
9
  createOpenAIChatClient,
10
10
  createOpenAIClient,
11
11
  createOpenAIImageClient,
12
+ discoverLLMExtensions,
12
13
  ensureNpmPackageInstalled,
13
14
  getChatModelFactory,
14
15
  isNpmProviderSpec,
16
+ loadDiscoveredExtensions,
15
17
  loadLLMExtensions,
16
18
  loadLlmConfig,
17
19
  parseLlmSection,
@@ -23,11 +25,11 @@ import {
23
25
  resolveLlmSectionWithNpm,
24
26
  resolveNpmProvider,
25
27
  substituteEnv
26
- } from "./chunk-UZOGGJK7.js";
28
+ } from "./chunk-QKN7FTFV.js";
27
29
  export {
30
+ AGENT_LLM_PROVIDER_FIELD,
28
31
  NPM_PROTOCOL_PREFIX,
29
32
  createAgentLlM,
30
- createAgentLlMAsync,
31
33
  createChatModelFromLlmConfig,
32
34
  createChatModelFromLlmConfigWithNpm,
33
35
  createClient,
@@ -35,9 +37,11 @@ export {
35
37
  createOpenAIChatClient,
36
38
  createOpenAIClient,
37
39
  createOpenAIImageClient,
40
+ discoverLLMExtensions,
38
41
  ensureNpmPackageInstalled,
39
42
  getChatModelFactory,
40
43
  isNpmProviderSpec,
44
+ loadDiscoveredExtensions,
41
45
  loadLLMExtensions,
42
46
  loadLlmConfig,
43
47
  parseLlmSection,
@@ -1 +1 @@
1
- {"version":3,"file":"llmAdapter.d.ts","sourceRoot":"","sources":["../src/llmAdapter.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAGH,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,6CAA6C,CAAC;AAMjF,MAAM,WAAW,mCAAmC;IAClD,qGAAqG;IACrG,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB,8BAA8B;IAC9B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,gCAAgC;IAChC,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED;;;GAGG;AACH,wBAAgB,4BAA4B,CAC1C,OAAO,EAAE,mCAAmC,GAC3C,aAAa,CA2Df"}
1
+ {"version":3,"file":"llmAdapter.d.ts","sourceRoot":"","sources":["../src/llmAdapter.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAGH,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,6CAA6C,CAAC;AAMjF,MAAM,WAAW,mCAAmC;IAClD,qGAAqG;IACrG,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB,8BAA8B;IAC9B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,gCAAgC;IAChC,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAOD;;;GAGG;AACH,wBAAgB,4BAA4B,CAC1C,OAAO,EAAE,mCAAmC,GAC3C,aAAa,CA+Ef"}
@@ -1,18 +1,23 @@
1
1
  /**
2
- * Load optional LLM extensions by npm package name (e.g. wallee-llm).
3
- * Call before createChatModelFromLlmConfig when using extension providers.
4
- * Config llm.type = npm package name(s); we dynamic load those packages. No extensions field.
2
+ * Load LLM extensions by npm package name or by dynamic discovery.
3
+ * Extensions implement ILLMExtension (getLLMExtension()). Framework does registration so extension need not call register.
5
4
  */
6
5
  /**
7
6
  * Resolve llm.type to a list of npm package names to load.
8
- * type is the npm package name or array of package names; we load them directly (no mapping).
9
7
  */
10
8
  export declare function resolveLLMExtensionPackages(types?: string | string[]): string[];
9
+ export declare function discoverLLMExtensions(cwd?: string): string[];
11
10
  /**
12
- * Dynamically load LLM extensions by npm package name.
13
- * Each package must export registerLLMExtension() and will register its provider(s) and ChatModel factory.
11
+ * Load LLM extensions by npm package name.
12
+ * Prefers ILLMExtension (getLLMExtension() or default export); falls back to registerLLMExtension().
14
13
  * Safe to call multiple times; each package is loaded at most once.
15
- * @param extensionPackages npm package names; default ["wallee-llm"] when omitted
14
+ * @param extensionPackages npm package names; when omitted, loads none
16
15
  */
17
16
  export declare function loadLLMExtensions(extensionPackages?: string[]): Promise<void>;
17
+ /**
18
+ * Discover provider packages in node_modules and load them.
19
+ * Equivalent to loadLLMExtensions(discoverLLMExtensions(cwd)).
20
+ * @param cwd Directory containing node_modules; default process.cwd()
21
+ */
22
+ export declare function loadDiscoveredExtensions(cwd?: string): Promise<string[]>;
18
23
  //# sourceMappingURL=loadLLMExtensions.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"loadLLMExtensions.d.ts","sourceRoot":"","sources":["../src/loadLLMExtensions.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAMH;;;GAGG;AACH,wBAAgB,2BAA2B,CAAC,KAAK,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,MAAM,EAAE,CAM/E;AAED;;;;;GAKG;AACH,wBAAsB,iBAAiB,CACrC,iBAAiB,CAAC,EAAE,MAAM,EAAE,GAC3B,OAAO,CAAC,IAAI,CAAC,CAiBf"}
1
+ {"version":3,"file":"loadLLMExtensions.d.ts","sourceRoot":"","sources":["../src/loadLLMExtensions.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAyCH;;GAEG;AACH,wBAAgB,2BAA2B,CAAC,KAAK,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,MAAM,EAAE,CAM/E;AAsBD,wBAAgB,qBAAqB,CAAC,GAAG,GAAE,MAAsB,GAAG,MAAM,EAAE,CAiC3E;AAED;;;;;GAKG;AACH,wBAAsB,iBAAiB,CACrC,iBAAiB,CAAC,EAAE,MAAM,EAAE,GAC3B,OAAO,CAAC,IAAI,CAAC,CAyBf;AAED;;;;GAIG;AACH,wBAAsB,wBAAwB,CAAC,GAAG,GAAE,MAAsB,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,CAK7F"}
@@ -19,6 +19,7 @@ export declare function parseLlmYaml(content: string, options?: LoadLlmConfigOpt
19
19
  * Load LLM config from a YAML file (e.g. config/llm.yaml).
20
20
  * Returns the llm section for use with createChatModelFromLlmConfig or parseLlmSection.
21
21
  * Returns null if file does not exist or has no llm key.
22
+ * Throws with clear message if file exists but read or parse fails.
22
23
  */
23
24
  export declare function loadLlmConfig(filePath: string, options?: LoadLlmConfigOptions): unknown | null;
24
25
  //# sourceMappingURL=loadLlmConfig.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"loadLlmConfig.d.ts","sourceRoot":"","sources":["../src/loadLlmConfig.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAKH,MAAM,WAAW,oBAAoB;IACnC,yDAAyD;IACzD,aAAa,CAAC,EAAE,OAAO,CAAC;CACzB;AAED;;GAEG;AACH,wBAAgB,aAAa,CAAC,GAAG,EAAE,OAAO,GAAG,OAAO,CAanD;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAC1B,OAAO,EAAE,MAAM,EACf,OAAO,GAAE,oBAAyB,GACjC,OAAO,CAMT;AAED;;;;GAIG;AACH,wBAAgB,aAAa,CAC3B,QAAQ,EAAE,MAAM,EAChB,OAAO,GAAE,oBAAyB,GACjC,OAAO,GAAG,IAAI,CAShB"}
1
+ {"version":3,"file":"loadLlmConfig.d.ts","sourceRoot":"","sources":["../src/loadLlmConfig.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAKH,MAAM,WAAW,oBAAoB;IACnC,yDAAyD;IACzD,aAAa,CAAC,EAAE,OAAO,CAAC;CACzB;AAED;;GAEG;AACH,wBAAgB,aAAa,CAAC,GAAG,EAAE,OAAO,GAAG,OAAO,CAanD;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAC1B,OAAO,EAAE,MAAM,EACf,OAAO,GAAE,oBAAyB,GACjC,OAAO,CAMT;AAED;;;;;GAKG;AACH,wBAAgB,aAAa,CAC3B,QAAQ,EAAE,MAAM,EAChB,OAAO,GAAE,oBAAyB,GACjC,OAAO,GAAG,IAAI,CAmBhB"}
@@ -33,15 +33,6 @@ export interface ResolveNpmProviderOptions {
33
33
  /** Working directory for npm install. Default: process.cwd() */
34
34
  cwd?: string;
35
35
  }
36
- /**
37
- * Resolve an npm provider spec to the concrete provider name.
38
- * - npm:wallee-llm → load wallee-llm, use its default provider (e.g. cis)
39
- * - npm:wallee-llm@0.1.0 → load wallee-llm@0.1.0, use its default provider
40
- * - npm:wallee-llm#cis → load wallee-llm, use provider "cis"
41
- * - npm:wallee-llm@0.1.0#cis → load wallee-llm@0.1.0, use provider "cis"
42
- * When installNpmIfMissing is true, installs the package (with optional version) if not found.
43
- * Returns the provider name to use, or null if spec is not npm: protocol.
44
- */
45
36
  export declare function resolveNpmProvider(spec: string, options?: ResolveNpmProviderOptions): Promise<string | null>;
46
37
  export interface ResolveLlmSectionWithNpmOptions extends ResolveNpmProviderOptions {
47
38
  }
@@ -1 +1 @@
1
- {"version":3,"file":"npmProviderProtocol.d.ts","sourceRoot":"","sources":["../src/npmProviderProtocol.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAKH,OAAO,KAAK,EAAE,mCAAmC,EAAE,MAAM,iBAAiB,CAAC;AAC3E,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,6CAA6C,CAAC;AAEjF,eAAO,MAAM,mBAAmB,SAAS,CAAC;AAE1C,4FAA4F;AAC5F,wBAAgB,oBAAoB,CAAC,IAAI,EAAE,MAAM,GAAG;IAAE,WAAW,EAAE,MAAM,CAAC;IAAC,OAAO,CAAC,EAAE,MAAM,CAAC;IAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;CAAE,GAAG,IAAI,CAYtH;AAED,MAAM,WAAW,gCAAgC;IAC/C,6FAA6F;IAC7F,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,gEAAgE;IAChE,GAAG,CAAC,EAAE,MAAM,CAAC;CACd;AAED;;GAEG;AACH,wBAAgB,iBAAiB,CAAC,IAAI,EAAE,OAAO,GAAG,IAAI,IAAI,MAAM,CAE/D;AAYD;;;GAGG;AACH,wBAAsB,yBAAyB,CAC7C,WAAW,EAAE,MAAM,EACnB,OAAO,GAAE,gCAAqC,GAC7C,OAAO,CAAC,IAAI,CAAC,CAef;AAED,MAAM,WAAW,yBAAyB;IACxC,kFAAkF;IAClF,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAC9B,gEAAgE;IAChE,GAAG,CAAC,EAAE,MAAM,CAAC;CACd;AAED;;;;;;;;GAQG;AACH,wBAAsB,kBAAkB,CACtC,IAAI,EAAE,MAAM,EACZ,OAAO,GAAE,yBAA8B,GACtC,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAsCxB;AAED,MAAM,WAAW,+BAAgC,SAAQ,yBAAyB;CAAG;AAErF;;;GAGG;AACH,wBAAsB,wBAAwB,CAC5C,UAAU,EAAE,OAAO,EACnB,OAAO,GAAE,+BAAoC,GAC5C,OAAO,CAAC,OAAO,CAAC,CAsBlB;AAED,MAAM,WAAW,0CACf,SAAQ,mCAAmC,EACzC,yBAAyB;CAAG;AAEhC;;;;GAIG;AACH,wBAAsB,mCAAmC,CACvD,OAAO,EAAE,0CAA0C,GAClD,OAAO,CAAC,aAAa,CAAC,CAUxB"}
1
+ {"version":3,"file":"npmProviderProtocol.d.ts","sourceRoot":"","sources":["../src/npmProviderProtocol.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAKH,OAAO,KAAK,EAAE,mCAAmC,EAAE,MAAM,iBAAiB,CAAC;AAC3E,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,6CAA6C,CAAC;AAEjF,eAAO,MAAM,mBAAmB,SAAS,CAAC;AAE1C,4FAA4F;AAC5F,wBAAgB,oBAAoB,CAAC,IAAI,EAAE,MAAM,GAAG;IAAE,WAAW,EAAE,MAAM,CAAC;IAAC,OAAO,CAAC,EAAE,MAAM,CAAC;IAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;CAAE,GAAG,IAAI,CAYtH;AAED,MAAM,WAAW,gCAAgC;IAC/C,6FAA6F;IAC7F,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,gEAAgE;IAChE,GAAG,CAAC,EAAE,MAAM,CAAC;CACd;AAED;;GAEG;AACH,wBAAgB,iBAAiB,CAAC,IAAI,EAAE,OAAO,GAAG,IAAI,IAAI,MAAM,CAE/D;AAYD;;;GAGG;AACH,wBAAsB,yBAAyB,CAC7C,WAAW,EAAE,MAAM,EACnB,OAAO,GAAE,gCAAqC,GAC7C,OAAO,CAAC,IAAI,CAAC,CAuBf;AAED,MAAM,WAAW,yBAAyB;IACxC,kFAAkF;IAClF,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAC9B,gEAAgE;IAChE,GAAG,CAAC,EAAE,MAAM,CAAC;CACd;AAkBD,wBAAsB,kBAAkB,CACtC,IAAI,EAAE,MAAM,EACZ,OAAO,GAAE,yBAA8B,GACtC,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CA2CxB;AAED,MAAM,WAAW,+BAAgC,SAAQ,yBAAyB;CAAG;AAErF;;;GAGG;AACH,wBAAsB,wBAAwB,CAC5C,UAAU,EAAE,OAAO,EACnB,OAAO,GAAE,+BAAoC,GAC5C,OAAO,CAAC,OAAO,CAAC,CAkClB;AAED,MAAM,WAAW,0CACf,SAAQ,mCAAmC,EACzC,yBAAyB;CAAG;AAEhC;;;;GAIG;AACH,wBAAsB,mCAAmC,CACvD,OAAO,EAAE,0CAA0C,GAClD,OAAO,CAAC,aAAa,CAAC,CAcxB"}
package/dist/types.d.ts CHANGED
@@ -137,4 +137,21 @@ export interface ILLMRegistry {
137
137
  defaultId(): string | undefined;
138
138
  ids(): string[];
139
139
  }
140
+ /**
141
+ * Generic interface for an LLM provider extension (npm package).
142
+ * Implement getLLMExtension() returning this; framework will register the provider when loading.
143
+ * Prefer createClient + createChatModel so the framework does registration (extension need not call register).
144
+ */
145
+ export interface ILLMExtension {
146
+ /** Provider name (e.g. "cis"). Framework registers under this name. */
147
+ readonly providerName: string;
148
+ /** ILLMClient factory. If present with createChatModel, framework calls registerProvider(providerName, createClient). */
149
+ readonly createClient?: (config: LLMConfig) => ILLMClient;
150
+ /** LangChain ChatModel factory. If present with createClient, framework calls registerChatModelProvider(providerName, createChatModel). */
151
+ readonly createChatModel?: (config: LLMConfig) => unknown;
152
+ /** Legacy: if extension provides register(), framework calls it. Prefer createClient + createChatModel so framework registers. */
153
+ register?(): void;
154
+ }
155
+ /** package.json field: when true, the package is an agent-llm provider (for discovery). */
156
+ export declare const AGENT_LLM_PROVIDER_FIELD = "agentLlmProvider";
140
157
  //# sourceMappingURL=types.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,yFAAyF;AACzF,MAAM,MAAM,OAAO,GAAG,MAAM,GAAG,OAAO,CAAC;AAEvC,qEAAqE;AACrE,MAAM,WAAW,SAAS;IACxB,4DAA4D;IAC5D,EAAE,EAAE,MAAM,CAAC;IACX,mBAAmB;IACnB,IAAI,EAAE,OAAO,CAAC;IACd,yEAAyE;IACzE,QAAQ,EAAE,MAAM,CAAC;IACjB,6CAA6C;IAC7C,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,+CAA+C;IAC/C,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,uCAAuC;IACvC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,iGAAiG;IACjG,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,qCAAqC;IACrC,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC;CACxB;AAED,mGAAmG;AACnG,MAAM,WAAW,qBAAqB;IACpC,qCAAqC;IACrC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,sFAAsF;IACtF,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;IACzB,2BAA2B;IAC3B,SAAS,CAAC,EAAE,SAAS,EAAE,CAAC;IACxB,0EAA0E;IAC1E,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,sFAAsF;IACtF,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC;CACxB;AAED,0BAA0B;AAC1B,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,QAAQ,GAAG,MAAM,GAAG,WAAW,CAAC;IACtC,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,0BAA0B;AAC1B,MAAM,WAAW,UAAU;IACzB,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,CAAC,EAAE;QAAE,YAAY,CAAC,EAAE,MAAM,CAAC;QAAC,gBAAgB,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC;CAC9D;AAED,mDAAmD;AACnD,MAAM,WAAW,cAAc;IAC7B,IAAI,EAAE,UAAU,CAAC;IACjB,QAAQ,EAAE;QACR,IAAI,EAAE,MAAM,CAAC;QACb,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,UAAU,CAAC,EAAE,MAAM,CAAC;KACrB,CAAC;CACH;AAED,qFAAqF;AACrF,MAAM,MAAM,oBAAoB,GAC5B,WAAW,GACX;IAAE,IAAI,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,MAAM,CAAC;IAAC,YAAY,EAAE,MAAM,CAAA;CAAE,GACvD;IACE,IAAI,EAAE,WAAW,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,UAAU,CAAC,EAAE,KAAK,CAAC;QACjB,EAAE,EAAE,MAAM,CAAC;QACX,IAAI,EAAE,UAAU,CAAC;QACjB,QAAQ,EAAE;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,SAAS,EAAE,MAAM,CAAA;SAAE,CAAC;KAC/C,CAAC,CAAC;CACJ,CAAC;AAEN,kCAAkC;AAClC,MAAM,WAAW,mBAAmB;IAClC,OAAO,EAAE;QACP,IAAI,EAAE,WAAW,CAAC;QAClB,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;QACxB,UAAU,CAAC,EAAE,KAAK,CAAC;YACjB,EAAE,EAAE,MAAM,CAAC;YACX,IAAI,EAAE,UAAU,CAAC;YACjB,QAAQ,EAAE;gBAAE,IAAI,EAAE,MAAM,CAAC;gBAAC,SAAS,EAAE,MAAM,CAAA;aAAE,CAAC;SAC/C,CAAC,CAAC;KACJ,CAAC;IACF,KAAK,CAAC,EAAE;QAAE,YAAY,CAAC,EAAE,MAAM,CAAC;QAAC,gBAAgB,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC;CAC9D;AAED,8BAA8B;AAC9B,MAAM,WAAW,WAAW;IAC1B,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,GAAG,CAAC,EAAE,MAAM,CAAC;CACd;AAED;;;;GAIG;AACH,MAAM,WAAW,UAAU;IACzB,QAAQ,CAAC,EAAE,EAAE,MAAM,CAAC;IACpB,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC;IACvB,sCAAsC;IACtC,IAAI,CAAC,QAAQ,EAAE,WAAW,EAAE,GAAG,OAAO,CAAC,UAAU,CAAC,CAAC;IACnD;;;OAGG;IACH,aAAa,CAAC,CACZ,QAAQ,EAAE,oBAAoB,EAAE,EAChC,KAAK,EAAE,cAAc,EAAE,EACvB,OAAO,CAAC,EAAE;QAAE,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE,GAC/B,OAAO,CAAC,mBAAmB,CAAC,CAAC;IAChC,4EAA4E;IAC5E,aAAa,CAAC,CAAC,OAAO,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,IAAI,CAAC,EAAE,MAAM,CAAC;QAAC,CAAC,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,WAAW,CAAC,CAAC;CAC9F;AAED;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B,GAAG,CAAC,EAAE,EAAE,MAAM,GAAG,UAAU,GAAG,SAAS,CAAC;IACxC,SAAS,IAAI,MAAM,GAAG,SAAS,CAAC;IAChC,GAAG,IAAI,MAAM,EAAE,CAAC;CACjB"}
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,yFAAyF;AACzF,MAAM,MAAM,OAAO,GAAG,MAAM,GAAG,OAAO,CAAC;AAEvC,qEAAqE;AACrE,MAAM,WAAW,SAAS;IACxB,4DAA4D;IAC5D,EAAE,EAAE,MAAM,CAAC;IACX,mBAAmB;IACnB,IAAI,EAAE,OAAO,CAAC;IACd,yEAAyE;IACzE,QAAQ,EAAE,MAAM,CAAC;IACjB,6CAA6C;IAC7C,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,+CAA+C;IAC/C,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,uCAAuC;IACvC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,iGAAiG;IACjG,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,qCAAqC;IACrC,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC;CACxB;AAED,mGAAmG;AACnG,MAAM,WAAW,qBAAqB;IACpC,qCAAqC;IACrC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,sFAAsF;IACtF,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;IACzB,2BAA2B;IAC3B,SAAS,CAAC,EAAE,SAAS,EAAE,CAAC;IACxB,0EAA0E;IAC1E,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,sFAAsF;IACtF,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC;CACxB;AAED,0BAA0B;AAC1B,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,QAAQ,GAAG,MAAM,GAAG,WAAW,CAAC;IACtC,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,0BAA0B;AAC1B,MAAM,WAAW,UAAU;IACzB,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,CAAC,EAAE;QAAE,YAAY,CAAC,EAAE,MAAM,CAAC;QAAC,gBAAgB,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC;CAC9D;AAED,mDAAmD;AACnD,MAAM,WAAW,cAAc;IAC7B,IAAI,EAAE,UAAU,CAAC;IACjB,QAAQ,EAAE;QACR,IAAI,EAAE,MAAM,CAAC;QACb,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,UAAU,CAAC,EAAE,MAAM,CAAC;KACrB,CAAC;CACH;AAED,qFAAqF;AACrF,MAAM,MAAM,oBAAoB,GAC5B,WAAW,GACX;IAAE,IAAI,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,MAAM,CAAC;IAAC,YAAY,EAAE,MAAM,CAAA;CAAE,GACvD;IACE,IAAI,EAAE,WAAW,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,UAAU,CAAC,EAAE,KAAK,CAAC;QACjB,EAAE,EAAE,MAAM,CAAC;QACX,IAAI,EAAE,UAAU,CAAC;QACjB,QAAQ,EAAE;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,SAAS,EAAE,MAAM,CAAA;SAAE,CAAC;KAC/C,CAAC,CAAC;CACJ,CAAC;AAEN,kCAAkC;AAClC,MAAM,WAAW,mBAAmB;IAClC,OAAO,EAAE;QACP,IAAI,EAAE,WAAW,CAAC;QAClB,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;QACxB,UAAU,CAAC,EAAE,KAAK,CAAC;YACjB,EAAE,EAAE,MAAM,CAAC;YACX,IAAI,EAAE,UAAU,CAAC;YACjB,QAAQ,EAAE;gBAAE,IAAI,EAAE,MAAM,CAAC;gBAAC,SAAS,EAAE,MAAM,CAAA;aAAE,CAAC;SAC/C,CAAC,CAAC;KACJ,CAAC;IACF,KAAK,CAAC,EAAE;QAAE,YAAY,CAAC,EAAE,MAAM,CAAC;QAAC,gBAAgB,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC;CAC9D;AAED,8BAA8B;AAC9B,MAAM,WAAW,WAAW;IAC1B,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,GAAG,CAAC,EAAE,MAAM,CAAC;CACd;AAED;;;;GAIG;AACH,MAAM,WAAW,UAAU;IACzB,QAAQ,CAAC,EAAE,EAAE,MAAM,CAAC;IACpB,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC;IACvB,sCAAsC;IACtC,IAAI,CAAC,QAAQ,EAAE,WAAW,EAAE,GAAG,OAAO,CAAC,UAAU,CAAC,CAAC;IACnD;;;OAGG;IACH,aAAa,CAAC,CACZ,QAAQ,EAAE,oBAAoB,EAAE,EAChC,KAAK,EAAE,cAAc,EAAE,EACvB,OAAO,CAAC,EAAE;QAAE,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE,GAC/B,OAAO,CAAC,mBAAmB,CAAC,CAAC;IAChC,4EAA4E;IAC5E,aAAa,CAAC,CAAC,OAAO,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,IAAI,CAAC,EAAE,MAAM,CAAC;QAAC,CAAC,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,WAAW,CAAC,CAAC;CAC9F;AAED;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B,GAAG,CAAC,EAAE,EAAE,MAAM,GAAG,UAAU,GAAG,SAAS,CAAC;IACxC,SAAS,IAAI,MAAM,GAAG,SAAS,CAAC;IAChC,GAAG,IAAI,MAAM,EAAE,CAAC;CACjB;AAED;;;;GAIG;AACH,MAAM,WAAW,aAAa;IAC5B,uEAAuE;IACvE,QAAQ,CAAC,YAAY,EAAE,MAAM,CAAC;IAC9B,yHAAyH;IACzH,QAAQ,CAAC,YAAY,CAAC,EAAE,CAAC,MAAM,EAAE,SAAS,KAAK,UAAU,CAAC;IAC1D,2IAA2I;IAC3I,QAAQ,CAAC,eAAe,CAAC,EAAE,CAAC,MAAM,EAAE,SAAS,KAAK,OAAO,CAAC;IAC1D,kIAAkI;IAClI,QAAQ,CAAC,IAAI,IAAI,CAAC;CACnB;AAED,2FAA2F;AAC3F,eAAO,MAAM,wBAAwB,qBAAqB,CAAC"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@easynet/agent-llm",
3
- "version": "1.0.4",
3
+ "version": "1.0.6",
4
4
  "description": "Agent LLM: multi-provider, multi-model, simple chat/image API. Consumes agent.yaml llm section.",
5
5
  "type": "module",
6
6
  "bin": {
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/config.ts","../src/loadLlmConfig.ts","../src/providers/openai.ts","../src/providers/index.ts","../src/factory.ts","../src/chatModelRegistry.ts","../src/llmAdapter.ts","../src/npmProviderProtocol.ts","../src/loadLLMExtensions.ts","../src/createAgentLlM.ts"],"sourcesContent":["/**\n * Parse agent.yaml llm section into normalized LLMConfig[] and default id.\n * Supports: flat (each model keyed by name), instances[], or single object.\n */\n\nimport type { LLMConfig } from \"./types.js\";\n\nconst DEFAULT_LLM_ID = \"default\";\n\nconst RESERVED_KEYS = new Set([\n \"default\",\n \"instances\",\n \"catalog\",\n \"provider\",\n \"model\",\n \"temperature\",\n \"apiKey\",\n \"baseURL\",\n \"base_url\",\n \"type\",\n \"id\",\n]);\n\n/**\n * Parse llm section: flat (each model keyed by name), default+instances, or single object.\n */\nexport function parseLlmSection(section: unknown): { defaultId: string; configs: LLMConfig[] } {\n if (section == null || typeof section !== \"object\") {\n return { defaultId: DEFAULT_LLM_ID, configs: [] };\n }\n\n if (Array.isArray(section)) {\n const configs = section\n .filter((i): i is Record<string, unknown> => i != null && typeof i === \"object\")\n .map((item, i) => normalizeLlmConfig({ ...item, id: item.id ?? item.name ?? String(i) }))\n .filter((c): c is LLMConfig => c != null);\n const defaultId = configs.length > 0 ? configs[0]!.id : DEFAULT_LLM_ID;\n return { defaultId, configs };\n }\n\n const s = section as Record<string, unknown>;\n\n const flatEntries = Object.entries(s).filter(\n ([k, v]) => !RESERVED_KEYS.has(k) && v != null && typeof v === \"object\" && !Array.isArray(v)\n );\n if (flatEntries.length > 0) {\n const configs: LLMConfig[] = [];\n for (const [id, entry] of flatEntries) {\n const c = entryToLlmConfig(id, entry as Record<string, unknown>);\n if (c) configs.push(c);\n }\n const defaultId =\n typeof s.default === \"string\" && s.default && flatEntries.some(([k]) => k === s.default)\n ? s.default\n : configs.length > 0\n ? configs[0]!.id\n : DEFAULT_LLM_ID;\n return { defaultId, configs };\n }\n\n if (Array.isArray(s.instances)) {\n const configs = (s.instances as unknown[])\n .filter((i): i is Record<string, unknown> => i != null && typeof i === \"object\")\n .map((i) => normalizeLlmConfig(i))\n .filter((c): c is LLMConfig => c != null);\n const defaultId =\n typeof s.default === \"string\" && s.default\n ? s.default\n : configs.length > 0\n ? configs[0]!.id\n : DEFAULT_LLM_ID;\n return { defaultId, configs };\n }\n\n if (typeof s.provider === \"string\" || typeof s.model === \"string\" || typeof (s as { name?: string }).name === \"string\") {\n const one = singleObjectToLlmConfig(s);\n return { defaultId: one.id, configs: [one] };\n }\n\n return { defaultId: DEFAULT_LLM_ID, configs: [] };\n}\n\nconst EXTENSION_OPTION_KEYS = [\"featureKey\", \"tenant\", \"authToken\", \"verifySSL\", \"bypassAuth\", \"host\", \"resolveHost\", \"timeoutMs\", \"options\"];\n\nfunction entryToLlmConfig(id: string, entry: Record<string, unknown>): LLMConfig | null {\n const opts = entry.options as Record<string, unknown> | undefined;\n const baseURL =\n typeof entry.base_url === \"string\"\n ? entry.base_url\n : typeof entry.baseURL === \"string\"\n ? entry.baseURL\n : undefined;\n const model = typeof entry.name === \"string\" ? entry.name : typeof entry.model === \"string\" ? entry.model : undefined;\n const provider = typeof entry.provider === \"string\" && entry.provider ? entry.provider : \"openai\";\n const config: LLMConfig = {\n id,\n type: \"chat\",\n provider,\n model,\n temperature: typeof opts?.temperature === \"number\" ? opts.temperature : typeof entry.temperature === \"number\" ? entry.temperature : undefined,\n apiKey: typeof opts?.apiKey === \"string\" ? opts.apiKey : typeof entry.apiKey === \"string\" ? entry.apiKey : undefined,\n baseURL,\n };\n if (typeof entry.type === \"string\" && entry.type === \"image\") config.type = \"image\";\n if (opts && typeof opts === \"object\") (config as Record<string, unknown>).options = opts;\n for (const k of EXTENSION_OPTION_KEYS) {\n if (entry[k] !== undefined) (config as Record<string, unknown>)[k] = entry[k];\n else if (opts && opts[k] !== undefined) (config as Record<string, unknown>)[k] = opts[k];\n }\n return config;\n}\n\nfunction singleObjectToLlmConfig(s: Record<string, unknown>): LLMConfig {\n const one: LLMConfig = {\n id: DEFAULT_LLM_ID,\n type: \"chat\",\n provider: typeof s.provider === \"string\" ? s.provider : \"openai\",\n model: typeof s.model === \"string\" ? s.model : (typeof (s as { name?: string }).name === \"string\" ? (s as { name: string }).name : undefined),\n temperature: typeof s.temperature === \"number\" ? s.temperature : undefined,\n apiKey: typeof s.apiKey === \"string\" ? s.apiKey : undefined,\n baseURL:\n typeof s.baseURL === \"string\" ? s.baseURL : typeof s.base_url === \"string\" ? s.base_url : undefined,\n };\n Object.keys(s).forEach((k) => {\n if (![\"id\", \"type\", \"provider\", \"model\", \"name\", \"temperature\", \"apiKey\", \"baseURL\", \"base_url\", \"default\", \"instances\"].includes(k)) {\n (one as Record<string, unknown>)[k] = s[k];\n }\n });\n return one;\n}\n\nfunction normalizeLlmConfig(o: Record<string, unknown>): LLMConfig | null {\n const id = typeof o.id === \"string\" && o.id ? o.id : DEFAULT_LLM_ID;\n const type = o.type === \"image\" ? \"image\" : \"chat\";\n const provider = typeof o.provider === \"string\" && o.provider ? o.provider : \"openai\";\n const opts = o.options as Record<string, unknown> | undefined;\n const config: LLMConfig = {\n id,\n type,\n provider,\n model: typeof o.model === \"string\" ? o.model : (typeof o.name === \"string\" ? o.name : undefined),\n temperature:\n typeof o.temperature === \"number\"\n ? o.temperature\n : typeof opts?.temperature === \"number\"\n ? opts.temperature\n : undefined,\n apiKey:\n typeof o.apiKey === \"string\"\n ? o.apiKey\n : typeof opts?.apiKey === \"string\"\n ? opts.apiKey\n : undefined,\n baseURL: typeof o.baseURL === \"string\" ? o.baseURL : (typeof o.base_url === \"string\" ? o.base_url : undefined),\n };\n Object.keys(o).forEach((k) => {\n if (![\"id\", \"type\", \"provider\", \"model\", \"name\", \"temperature\", \"apiKey\", \"baseURL\", \"base_url\"].includes(k)) {\n (config as Record<string, unknown>)[k] = o[k];\n }\n });\n return config;\n}\n","/**\n * Load and parse LLM config from YAML (e.g. config/llm.yaml).\n * Supports ${VAR} substitution from process.env.\n */\n\nimport { readFileSync, existsSync } from \"node:fs\";\nimport { parse as parseYaml } from \"yaml\";\n\nexport interface LoadLlmConfigOptions {\n /** Replace ${VAR} with process.env.VAR. Default true. */\n substituteEnv?: boolean;\n}\n\n/**\n * Recursively replace ${VAR} in strings with process.env.VAR.\n */\nexport function substituteEnv(obj: unknown): unknown {\n if (obj === null || obj === undefined) return obj;\n if (typeof obj === \"string\") {\n const m = obj.match(/^\\$\\{(\\w+)\\}$/);\n return m ? (process.env[m[1]] ?? obj) : obj;\n }\n if (Array.isArray(obj)) return obj.map(substituteEnv);\n if (typeof obj === \"object\") {\n const out: Record<string, unknown> = {};\n for (const [k, v] of Object.entries(obj)) out[k] = substituteEnv(v);\n return out;\n }\n return obj;\n}\n\n/**\n * Parse YAML string and return the llm section (top-level key \"llm\").\n * Returns undefined if content has no llm key.\n */\nexport function parseLlmYaml(\n content: string,\n options: LoadLlmConfigOptions = {}\n): unknown {\n const { substituteEnv: doSub = true } = options;\n const parsed = parseYaml(content) as { llm?: unknown };\n const llm = parsed?.llm;\n if (llm == null) return undefined;\n return doSub ? substituteEnv(llm) : llm;\n}\n\n/**\n * Load LLM config from a YAML file (e.g. config/llm.yaml).\n * Returns the llm section for use with createChatModelFromLlmConfig or parseLlmSection.\n * Returns null if file does not exist or has no llm key.\n */\nexport function loadLlmConfig(\n filePath: string,\n options: LoadLlmConfigOptions = {}\n): unknown | null {\n if (!existsSync(filePath)) return null;\n try {\n const raw = readFileSync(filePath, \"utf8\");\n const llm = parseLlmYaml(raw, options);\n return llm ?? null;\n } catch {\n return null;\n }\n}\n","/**\n * OpenAI-compatible format: chat (/v1/chat/completions) and image.\n * Supports baseURL for Azure, local proxy, and other compatible endpoints.\n */\n\nimport OpenAI from \"openai\";\nimport type {\n LLMConfig,\n ChatMessage,\n ChatResult,\n ImageResult,\n ILLMClient,\n ChatWithToolsMessage,\n ChatWithToolsResult,\n ToolDefinition,\n} from \"../types.js\";\n\nfunction getApiKey(config: LLMConfig): string {\n const key = config.apiKey ?? process.env.OPENAI_API_KEY ?? \"\";\n if (!key) throw new Error(\"OpenAI-compatible apiKey required (config.apiKey or OPENAI_API_KEY)\");\n return key;\n}\n\nfunction createOpenAIClientOptions(config: LLMConfig): { apiKey: string; baseURL?: string } {\n const opts: { apiKey: string; baseURL?: string } = { apiKey: getApiKey(config) };\n if (typeof config.baseURL === \"string\" && config.baseURL) opts.baseURL = config.baseURL;\n return opts;\n}\n\nfunction serializeMessage(\n m: ChatWithToolsMessage\n): OpenAI.Chat.Completions.ChatCompletionMessageParam {\n if (m.role === \"tool\")\n return { role: \"tool\", content: m.content, tool_call_id: m.tool_call_id };\n if (m.role === \"assistant\" && \"tool_calls\" in m && m.tool_calls?.length) {\n return {\n role: \"assistant\",\n content: m.content ?? null,\n tool_calls: m.tool_calls.map((tc) => ({\n id: tc.id,\n type: \"function\" as const,\n function: { name: tc.function.name, arguments: tc.function.arguments },\n })),\n };\n }\n return { role: m.role, content: (m as ChatMessage).content };\n}\n\nexport function createOpenAIChatClient(config: LLMConfig): ILLMClient {\n const client = new OpenAI(createOpenAIClientOptions(config));\n const model = config.model ?? process.env.OPENAI_MODEL ?? \"gpt-4o-mini\";\n const temperature = config.temperature ?? 0;\n\n return {\n id: config.id,\n type: \"chat\",\n async chat(messages: ChatMessage[]): Promise<ChatResult> {\n const resp = await client.chat.completions.create({\n model,\n temperature,\n messages: messages.map((m) => ({ role: m.role, content: m.content })),\n });\n const content = resp.choices[0]?.message?.content ?? \"\";\n const usage = resp.usage\n ? { promptTokens: resp.usage.prompt_tokens, completionTokens: resp.usage.completion_tokens }\n : undefined;\n return { content, usage };\n },\n async chatWithTools(\n messages: ChatWithToolsMessage[],\n tools: ToolDefinition[],\n _options?: { timeoutMs?: number }\n ): Promise<ChatWithToolsResult> {\n const resp = await client.chat.completions.create({\n model,\n temperature,\n messages: messages.map(serializeMessage),\n tools: tools.map((t) => ({\n type: \"function\" as const,\n function: {\n name: t.function.name,\n description: t.function.description,\n parameters: (t.function.parameters ?? undefined) as Record<string, unknown> | undefined,\n },\n })),\n });\n const msg = resp.choices[0]?.message;\n const usage = resp.usage\n ? { promptTokens: resp.usage.prompt_tokens, completionTokens: resp.usage.completion_tokens }\n : undefined;\n return {\n message: {\n role: \"assistant\",\n content: msg?.content ?? null,\n tool_calls: msg?.tool_calls?.map((tc) => ({\n id: tc.id,\n type: \"function\" as const,\n function: {\n name: tc.function?.name ?? \"\",\n arguments: tc.function?.arguments ?? \"\",\n },\n })),\n },\n usage,\n };\n },\n };\n}\n\nexport function createOpenAIImageClient(config: LLMConfig): ILLMClient {\n const client = new OpenAI(createOpenAIClientOptions(config));\n const model = (config.model as string) ?? \"dall-e-3\";\n\n return {\n id: config.id,\n type: \"image\",\n async chat(): Promise<ChatResult> {\n throw new Error(\"OpenAI image model does not support chat; use generateImage()\");\n },\n async generateImage(options: { prompt: string; size?: string; n?: number }): Promise<ImageResult> {\n const resp = await client.images.generate({\n model,\n prompt: options.prompt,\n size: (options.size as \"1024x1024\" | \"1792x1024\" | \"1024x1792\") ?? \"1024x1024\",\n n: options.n ?? 1,\n response_format: \"url\",\n });\n const url = resp.data?.[0]?.url ?? undefined;\n return { url };\n },\n };\n}\n\nexport function createOpenAIClient(config: LLMConfig): ILLMClient {\n if (config.type === \"image\") return createOpenAIImageClient(config);\n return createOpenAIChatClient(config);\n}\n","/**\n * Supports OpenAI-compatible and extension providers.\n */\n\nimport type { LLMConfig, ILLMClient } from \"../types.js\";\nimport { createOpenAIClient } from \"./openai.js\";\n\nconst OPENAI_COMPATIBLE = \"openai-compatible\";\n\nfunction createOpenAICompat(config: LLMConfig): ILLMClient {\n return createOpenAIClient(config);\n}\n\nconst PROVIDERS: Record<string, (config: LLMConfig) => ILLMClient> = {\n openai: createOpenAICompat,\n [OPENAI_COMPATIBLE]: createOpenAICompat,\n};\n\nexport function createClient(config: LLMConfig): ILLMClient {\n const p = (config.provider ?? \"\").toLowerCase();\n const fn = PROVIDERS[p];\n if (!fn) {\n const supported = [...new Set([...Object.keys(PROVIDERS), \"extension providers\"])].sort().join(\", \");\n throw new Error(\n `Unsupported LLM provider: ${config.provider}. Supported: ${supported}.`\n );\n }\n return fn(config);\n}\n\nexport function registerProvider(name: string, factory: (config: LLMConfig) => ILLMClient): void {\n PROVIDERS[name.toLowerCase()] = factory;\n}\n","/**\n * Create LLM registry from agent.yaml llm section.\n */\n\nimport { parseLlmSection } from \"./config.js\";\nimport { createClient } from \"./providers/index.js\";\nimport type { AgentConfigLlmSection, ILLMClient, ILLMRegistry } from \"./types.js\";\n\nexport interface CreateLLMRegistryOptions {\n /** Parsed llm section (e.g. from loadAgentConfig's config.llm) */\n llmSection: AgentConfigLlmSection | null | undefined;\n}\n\n/**\n * Create LLM registry from agent config llm section; supports multiple providers/models, each LLM has id and type.\n */\nexport function createLLMRegistry(options: CreateLLMRegistryOptions): ILLMRegistry {\n const { defaultId, configs } = parseLlmSection(options.llmSection);\n const map = new Map<string, ILLMClient>();\n\n for (const config of configs) {\n try {\n const client = createClient(config);\n map.set(config.id, client);\n } catch (err) {\n console.warn(`[agent-llm] Skip LLM \"${config.id}\": ${err instanceof Error ? err.message : String(err)}`);\n }\n }\n\n return {\n get(id: string): ILLMClient | undefined {\n return map.get(id);\n },\n defaultId(): string | undefined {\n if (map.has(defaultId)) return defaultId;\n return map.size > 0 ? [...map.keys()][0] : undefined;\n },\n ids(): string[] {\n return [...map.keys()];\n },\n };\n}\n","/**\n * Registry for LangChain ChatModel by provider name.\n * Extensions register via registerChatModelProvider; llmAdapter uses getChatModelFactory.\n */\n\nimport type { BaseChatModel } from \"@langchain/core/language_models/chat_models\";\nimport type { LLMConfig } from \"./types.js\";\n\nexport type ChatModelFactory = (config: LLMConfig) => BaseChatModel;\n\nconst CHAT_MODEL_FACTORIES = new Map<string, ChatModelFactory>();\n\n/**\n * Register a ChatModel factory for a provider name.\n * Called by extensions (e.g. wallee-llm) on load.\n */\nexport function registerChatModelProvider(providerName: string, factory: ChatModelFactory): void {\n CHAT_MODEL_FACTORIES.set(providerName.toLowerCase(), factory);\n}\n\n/**\n * Get the ChatModel factory for a provider name, if registered.\n */\nexport function getChatModelFactory(providerName: string): ChatModelFactory | undefined {\n return CHAT_MODEL_FACTORIES.get(providerName.toLowerCase());\n}\n","/**\n * Build LangChain ChatModel from agent.yaml llm section.\n * Supports single object, default + instances, and flat keyed configs.\n * When provider is registered by an extension, uses that extension's ChatModel;\n * otherwise uses ChatOpenAI.\n */\n\nimport { ChatOpenAI } from \"@langchain/openai\";\nimport type { BaseChatModel } from \"@langchain/core/language_models/chat_models\";\nimport { parseLlmSection } from \"./config.js\";\nimport { getChatModelFactory } from \"./chatModelRegistry.js\";\n\nconst DEFAULT_MODEL = \"gpt-4o-mini\";\n\nexport interface CreateChatModelFromLlmConfigOptions {\n /** agent.yaml llm section (raw or parsed); compatible with AgentConfigLlmSection / AgentConfigLlm */\n llmSection?: unknown;\n /** Override model from env */\n modelEnv?: string;\n /** Override API key from env */\n apiKeyEnv?: string;\n}\n\n/**\n * Create a LangChain ChatModel from agent config llm section.\n * Uses extension-registered ChatModel when available; otherwise ChatOpenAI.\n */\nexport function createChatModelFromLlmConfig(\n options: CreateChatModelFromLlmConfigOptions\n): BaseChatModel {\n const { llmSection, modelEnv, apiKeyEnv } = options;\n const { defaultId, configs } = parseLlmSection(llmSection ?? null);\n const defaultConfig = configs.find((c) => c.id === defaultId) ?? configs[0];\n\n if (!defaultConfig) {\n const model =\n modelEnv ?? process.env.OPENAI_MODEL ?? DEFAULT_MODEL;\n const apiKey = apiKeyEnv ?? process.env.OPENAI_API_KEY;\n return new ChatOpenAI({\n model,\n temperature: 0,\n ...(apiKey ? { apiKey } : {}),\n });\n }\n\n const provider = (defaultConfig as { provider?: string }).provider ?? \"openai\";\n const chatModelFactory = getChatModelFactory(provider);\n if (chatModelFactory) {\n const config = {\n ...defaultConfig,\n model: modelEnv ?? defaultConfig.model,\n temperature:\n typeof defaultConfig.temperature === \"number\"\n ? defaultConfig.temperature\n : 0,\n };\n return chatModelFactory(config);\n }\n\n const model =\n modelEnv ??\n defaultConfig?.model ??\n process.env.OPENAI_MODEL ??\n DEFAULT_MODEL;\n\n let apiKey =\n apiKeyEnv ?? defaultConfig?.apiKey ?? process.env.OPENAI_API_KEY;\n let baseURL = defaultConfig?.baseURL;\n // OpenAI client appends path (e.g. /chat/completions) to baseURL; Ollama and OpenAI-compatible APIs expect /v1/chat/completions.\n if (baseURL && !baseURL.replace(/\\/$/, \"\").endsWith(\"/v1\")) {\n baseURL = baseURL.replace(/\\/$/, \"\") + \"/v1\";\n }\n // OpenAI client throws if apiKey is undefined; Ollama and many compatible endpoints accept a dummy.\n if (baseURL && apiKey === undefined) {\n apiKey = \"ollama\";\n }\n\n const temperature =\n typeof defaultConfig?.temperature === \"number\" ? defaultConfig.temperature : 0;\n\n const constructorOptions: ConstructorParameters<typeof ChatOpenAI>[0] = {\n model,\n temperature,\n ...(apiKey ? { apiKey } : {}),\n ...(baseURL ? { configuration: { baseURL } } : {}),\n };\n\n return new ChatOpenAI(constructorOptions);\n}\n","/**\n * npm: protocol in provider — specify an npm package name (and optional version) in config's model provider.\n * Format: npm:<package-name> or npm:<package-name>@<version> or npm:<package-name>#<provider-name> or npm:<package-name>@<version>#<provider-name>\n * When installNpmIfMissing is true, the framework will run npm install <package>[@version] if the package is not found.\n */\n\nimport { execSync } from \"node:child_process\";\nimport { loadLLMExtensions } from \"./loadLLMExtensions.js\";\nimport { createChatModelFromLlmConfig } from \"./llmAdapter.js\";\nimport type { CreateChatModelFromLlmConfigOptions } from \"./llmAdapter.js\";\nimport type { BaseChatModel } from \"@langchain/core/language_models/chat_models\";\n\nexport const NPM_PROTOCOL_PREFIX = \"npm:\";\n\n/** Parse npm: spec into package name, optional version, and optional #provider fragment. */\nexport function parseNpmProviderSpec(spec: string): { packageName: string; version?: string; provider?: string } | null {\n if (!isNpmProviderSpec(spec)) return null;\n const rest = spec.slice(NPM_PROTOCOL_PREFIX.length);\n const hashIdx = rest.indexOf(\"#\");\n const beforeHash = hashIdx >= 0 ? rest.slice(0, hashIdx).trim() : rest.trim();\n const fragmentProvider = hashIdx >= 0 ? rest.slice(hashIdx + 1).trim() : null;\n // Extract version: last @ not part of scoped package (e.g. pkg@0.1.0 or @scope/pkg@1.0.0)\n const versionMatch = beforeHash.match(/@([^/]+)$/);\n const packageName = versionMatch ? beforeHash.slice(0, -versionMatch[0].length).trim() : beforeHash;\n const version = versionMatch ? versionMatch[1] : undefined;\n if (!packageName) return null;\n return { packageName, version, provider: fragmentProvider ?? undefined };\n}\n\nexport interface EnsureNpmPackageInstalledOptions {\n /** Version to install (e.g. 0.1.0, latest). If set, runs npm install <package>@<version>. */\n version?: string;\n /** Working directory for npm install. Default: process.cwd() */\n cwd?: string;\n}\n\n/**\n * Check if a provider spec uses the npm: protocol.\n */\nexport function isNpmProviderSpec(spec: unknown): spec is string {\n return typeof spec === \"string\" && spec.startsWith(NPM_PROTOCOL_PREFIX);\n}\n\nfunction isModuleNotFoundError(err: unknown): boolean {\n const msg = err instanceof Error ? err.message : String(err);\n return (\n msg.includes(\"Cannot find module\") ||\n msg.includes(\"Cannot find package\") ||\n msg.includes(\"MODULE_NOT_FOUND\") ||\n msg.includes(\"ERR_MODULE_NOT_FOUND\")\n );\n}\n\n/**\n * Ensure an npm package is installed: if it cannot be resolved, run npm install <packageName>[@version] from cwd.\n * Call this before loading a provider package when the config uses npm:<package-name>[@version].\n */\nexport async function ensureNpmPackageInstalled(\n packageName: string,\n options: EnsureNpmPackageInstalledOptions = {}\n): Promise<void> {\n const cwd = options.cwd ?? process.cwd();\n const version = options.version;\n try {\n await import(/* @vite-ignore */ packageName);\n return;\n } catch (err) {\n if (!isModuleNotFoundError(err)) throw err;\n }\n const installSpec = version ? `${packageName}@${version}` : packageName;\n execSync(`npm install ${installSpec}`, {\n cwd,\n stdio: \"inherit\",\n encoding: \"utf-8\",\n });\n}\n\nexport interface ResolveNpmProviderOptions {\n /** If true (default), run npm install <package> when the package is not found. */\n installNpmIfMissing?: boolean;\n /** Working directory for npm install. Default: process.cwd() */\n cwd?: string;\n}\n\n/**\n * Resolve an npm provider spec to the concrete provider name.\n * - npm:wallee-llm → load wallee-llm, use its default provider (e.g. cis)\n * - npm:wallee-llm@0.1.0 → load wallee-llm@0.1.0, use its default provider\n * - npm:wallee-llm#cis → load wallee-llm, use provider \"cis\"\n * - npm:wallee-llm@0.1.0#cis → load wallee-llm@0.1.0, use provider \"cis\"\n * When installNpmIfMissing is true, installs the package (with optional version) if not found.\n * Returns the provider name to use, or null if spec is not npm: protocol.\n */\nexport async function resolveNpmProvider(\n spec: string,\n options: ResolveNpmProviderOptions = {}\n): Promise<string | null> {\n const parsed = parseNpmProviderSpec(spec);\n if (!parsed) return null;\n const { packageName, version, provider: fragmentProvider } = parsed;\n\n const installNpmIfMissing = options.installNpmIfMissing !== false;\n const cwd = options.cwd ?? process.cwd();\n\n const load = async (): Promise<void> => {\n await loadLLMExtensions([packageName]);\n };\n\n try {\n await load();\n } catch (err) {\n if (installNpmIfMissing && isModuleNotFoundError(err)) {\n await ensureNpmPackageInstalled(packageName, { version, cwd });\n await load();\n } else {\n throw err;\n }\n }\n\n if (fragmentProvider) return fragmentProvider;\n\n try {\n const m = await import(/* @vite-ignore */ packageName);\n if (\n typeof (m as { getDefaultProviderName?: () => string }).getDefaultProviderName === \"function\"\n ) {\n return (m as { getDefaultProviderName: () => string }).getDefaultProviderName();\n }\n } catch {\n // ignore\n }\n throw new Error(\n `Provider spec ${spec} has no #provider fragment and the package does not export getDefaultProviderName(). Use e.g. npm:${packageName}#<provider-name>.`\n );\n}\n\nexport interface ResolveLlmSectionWithNpmOptions extends ResolveNpmProviderOptions {}\n\n/**\n * Recursively resolve all provider values that use the npm: protocol in a clone of the llm section.\n * When installNpmIfMissing is true, installs any npm: package that is not found.\n */\nexport async function resolveLlmSectionWithNpm(\n llmSection: unknown,\n options: ResolveLlmSectionWithNpmOptions = {}\n): Promise<unknown> {\n if (llmSection == null) return llmSection;\n if (Array.isArray(llmSection)) {\n const out: unknown[] = [];\n for (const item of llmSection) {\n out.push(await resolveLlmSectionWithNpm(item, options));\n }\n return out;\n }\n if (typeof llmSection === \"object\") {\n const out: Record<string, unknown> = {};\n for (const [k, v] of Object.entries(llmSection)) {\n if (k === \"provider\" && isNpmProviderSpec(v)) {\n const resolved = await resolveNpmProvider(v as string, options);\n out[k] = resolved ?? v;\n continue;\n }\n out[k] = await resolveLlmSectionWithNpm(v, options);\n }\n return out;\n }\n return llmSection;\n}\n\nexport interface CreateChatModelFromLlmConfigWithNpmOptions\n extends CreateChatModelFromLlmConfigOptions,\n ResolveNpmProviderOptions {}\n\n/**\n * Create a LangChain ChatModel from llm section, resolving any provider values that use the npm: protocol.\n * Use when config has provider: \"npm:wallee-llm\" or provider: \"npm:wallee-llm#cis\".\n * When installNpmIfMissing is true (default), the framework will run npm install <package> if the package is not found.\n */\nexport async function createChatModelFromLlmConfigWithNpm(\n options: CreateChatModelFromLlmConfigWithNpmOptions\n): Promise<BaseChatModel> {\n const { installNpmIfMissing, cwd, ...rest } = options;\n const resolvedSection = await resolveLlmSectionWithNpm(options.llmSection ?? null, {\n installNpmIfMissing,\n cwd,\n });\n return createChatModelFromLlmConfig({\n ...rest,\n llmSection: resolvedSection,\n });\n}\n","/**\n * Load optional LLM extensions by npm package name (e.g. wallee-llm).\n * Call before createChatModelFromLlmConfig when using extension providers.\n * Config llm.type = npm package name(s); we dynamic load those packages. No extensions field.\n */\n\nconst loadedPackages = new Set<string>();\n\nconst DEFAULT_EXTENSIONS = [\"wallee-llm\"];\n\n/**\n * Resolve llm.type to a list of npm package names to load.\n * type is the npm package name or array of package names; we load them directly (no mapping).\n */\nexport function resolveLLMExtensionPackages(types?: string | string[]): string[] {\n const typeList = types == null ? [] : Array.isArray(types) ? types : [types];\n const packages = typeList.filter(\n (t): t is string => typeof t === \"string\" && t.length > 0\n );\n return packages.length > 0 ? packages : DEFAULT_EXTENSIONS;\n}\n\n/**\n * Dynamically load LLM extensions by npm package name.\n * Each package must export registerLLMExtension() and will register its provider(s) and ChatModel factory.\n * Safe to call multiple times; each package is loaded at most once.\n * @param extensionPackages npm package names; default [\"wallee-llm\"] when omitted\n */\nexport async function loadLLMExtensions(\n extensionPackages?: string[]\n): Promise<void> {\n const packages = extensionPackages ?? DEFAULT_EXTENSIONS;\n for (const pkg of packages) {\n if (loadedPackages.has(pkg)) continue;\n loadedPackages.add(pkg);\n try {\n const m = await import(/* @vite-ignore */ pkg);\n if (\n typeof (m as { registerLLMExtension?: () => void })\n .registerLLMExtension === \"function\"\n ) {\n (m as { registerLLMExtension: () => void }).registerLLMExtension();\n }\n } catch {\n // extension not installed or load failed\n }\n }\n}\n","/**\n * Return a LangChain-formatted LLM from config (llm.yaml or config/llm.yaml).\n * Use this LLM with LangChain's createAgent (e.g. createToolCallingAgent + AgentExecutor).\n */\nimport { join } from \"node:path\";\nimport { existsSync } from \"node:fs\";\nimport { createChatModelFromLlmConfig } from \"./llmAdapter.js\";\nimport { createChatModelFromLlmConfigWithNpm } from \"./npmProviderProtocol.js\";\nimport { loadLlmConfig } from \"./loadLlmConfig.js\";\n\nexport interface CreateAgentLlMOptions {\n /** Path to YAML config file. If omitted, uses llm.yaml in cwd or config/llm.yaml in cwd/parent. */\n configPath?: string;\n}\n\nexport interface CreateAgentLlMAsyncOptions extends CreateAgentLlMOptions {\n /** If true (default), run npm install when provider is npm:<package> and package is not found. */\n installNpmIfMissing?: boolean;\n}\n\nfunction resolveDefaultConfigPath(): string {\n const cwd = process.cwd();\n if (existsSync(join(cwd, \"llm.yaml\"))) return join(cwd, \"llm.yaml\");\n if (existsSync(join(cwd, \"config\", \"llm.yaml\"))) return join(cwd, \"config\", \"llm.yaml\");\n const parentConfig = join(cwd, \"..\", \"config\", \"llm.yaml\");\n if (existsSync(parentConfig)) return parentConfig;\n return join(cwd, \"config\", \"llm.yaml\");\n}\n\n/**\n * Create a LangChain-formatted LLM from config.\n * Pass configPath to use a specific YAML file; otherwise uses llm.yaml (cwd) or config/llm.yaml (cwd/parent).\n */\nexport function createAgentLlM(options: CreateAgentLlMOptions = {}) {\n const configPath = options.configPath ?? resolveDefaultConfigPath();\n const llmSection = loadLlmConfig(configPath);\n if (llmSection == null) {\n throw new Error(`No LLM config at ${configPath}. Add llm.yaml or config/llm.yaml, or pass configPath.`);\n }\n return createChatModelFromLlmConfig({ llmSection });\n}\n\n/**\n * Create a LangChain-formatted LLM from config, resolving npm: providers and installing packages if missing.\n * Use when your config has provider: \"npm:wallee-llm\" or similar.\n */\nexport async function createAgentLlMAsync(\n options: CreateAgentLlMAsyncOptions = {}\n): Promise<ReturnType<typeof createChatModelFromLlmConfig>> {\n const configPath = options.configPath ?? resolveDefaultConfigPath();\n const llmSection = loadLlmConfig(configPath);\n if (llmSection == null) {\n throw new Error(`No LLM config at ${configPath}. Add llm.yaml or config/llm.yaml, or pass configPath.`);\n }\n return createChatModelFromLlmConfigWithNpm({\n llmSection,\n installNpmIfMissing: options.installNpmIfMissing !== false,\n cwd: process.cwd(),\n });\n}\n"],"mappings":";AAOA,IAAM,iBAAiB;AAEvB,IAAM,gBAAgB,oBAAI,IAAI;AAAA,EAC5B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAKM,SAAS,gBAAgB,SAA+D;AAC7F,MAAI,WAAW,QAAQ,OAAO,YAAY,UAAU;AAClD,WAAO,EAAE,WAAW,gBAAgB,SAAS,CAAC,EAAE;AAAA,EAClD;AAEA,MAAI,MAAM,QAAQ,OAAO,GAAG;AAC1B,UAAM,UAAU,QACb,OAAO,CAAC,MAAoC,KAAK,QAAQ,OAAO,MAAM,QAAQ,EAC9E,IAAI,CAAC,MAAM,MAAM,mBAAmB,EAAE,GAAG,MAAM,IAAI,KAAK,MAAM,KAAK,QAAQ,OAAO,CAAC,EAAE,CAAC,CAAC,EACvF,OAAO,CAAC,MAAsB,KAAK,IAAI;AAC1C,UAAM,YAAY,QAAQ,SAAS,IAAI,QAAQ,CAAC,EAAG,KAAK;AACxD,WAAO,EAAE,WAAW,QAAQ;AAAA,EAC9B;AAEA,QAAM,IAAI;AAEV,QAAM,cAAc,OAAO,QAAQ,CAAC,EAAE;AAAA,IACpC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,cAAc,IAAI,CAAC,KAAK,KAAK,QAAQ,OAAO,MAAM,YAAY,CAAC,MAAM,QAAQ,CAAC;AAAA,EAC7F;AACA,MAAI,YAAY,SAAS,GAAG;AAC1B,UAAM,UAAuB,CAAC;AAC9B,eAAW,CAAC,IAAI,KAAK,KAAK,aAAa;AACrC,YAAM,IAAI,iBAAiB,IAAI,KAAgC;AAC/D,UAAI,EAAG,SAAQ,KAAK,CAAC;AAAA,IACvB;AACA,UAAM,YACJ,OAAO,EAAE,YAAY,YAAY,EAAE,WAAW,YAAY,KAAK,CAAC,CAAC,CAAC,MAAM,MAAM,EAAE,OAAO,IACnF,EAAE,UACF,QAAQ,SAAS,IACf,QAAQ,CAAC,EAAG,KACZ;AACR,WAAO,EAAE,WAAW,QAAQ;AAAA,EAC9B;AAEA,MAAI,MAAM,QAAQ,EAAE,SAAS,GAAG;AAC9B,UAAM,UAAW,EAAE,UAChB,OAAO,CAAC,MAAoC,KAAK,QAAQ,OAAO,MAAM,QAAQ,EAC9E,IAAI,CAAC,MAAM,mBAAmB,CAAC,CAAC,EAChC,OAAO,CAAC,MAAsB,KAAK,IAAI;AAC1C,UAAM,YACJ,OAAO,EAAE,YAAY,YAAY,EAAE,UAC/B,EAAE,UACF,QAAQ,SAAS,IACf,QAAQ,CAAC,EAAG,KACZ;AACR,WAAO,EAAE,WAAW,QAAQ;AAAA,EAC9B;AAEA,MAAI,OAAO,EAAE,aAAa,YAAY,OAAO,EAAE,UAAU,YAAY,OAAQ,EAAwB,SAAS,UAAU;AACtH,UAAM,MAAM,wBAAwB,CAAC;AACrC,WAAO,EAAE,WAAW,IAAI,IAAI,SAAS,CAAC,GAAG,EAAE;AAAA,EAC7C;AAEA,SAAO,EAAE,WAAW,gBAAgB,SAAS,CAAC,EAAE;AAClD;AAEA,IAAM,wBAAwB,CAAC,cAAc,UAAU,aAAa,aAAa,cAAc,QAAQ,eAAe,aAAa,SAAS;AAE5I,SAAS,iBAAiB,IAAY,OAAkD;AACtF,QAAM,OAAO,MAAM;AACnB,QAAM,UACJ,OAAO,MAAM,aAAa,WACtB,MAAM,WACN,OAAO,MAAM,YAAY,WACvB,MAAM,UACN;AACR,QAAM,QAAQ,OAAO,MAAM,SAAS,WAAW,MAAM,OAAO,OAAO,MAAM,UAAU,WAAW,MAAM,QAAQ;AAC5G,QAAM,WAAW,OAAO,MAAM,aAAa,YAAY,MAAM,WAAW,MAAM,WAAW;AACzF,QAAM,SAAoB;AAAA,IACxB;AAAA,IACA,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA,aAAa,OAAO,MAAM,gBAAgB,WAAW,KAAK,cAAc,OAAO,MAAM,gBAAgB,WAAW,MAAM,cAAc;AAAA,IACpI,QAAQ,OAAO,MAAM,WAAW,WAAW,KAAK,SAAS,OAAO,MAAM,WAAW,WAAW,MAAM,SAAS;AAAA,IAC3G;AAAA,EACF;AACA,MAAI,OAAO,MAAM,SAAS,YAAY,MAAM,SAAS,QAAS,QAAO,OAAO;AAC5E,MAAI,QAAQ,OAAO,SAAS,SAAU,CAAC,OAAmC,UAAU;AACpF,aAAW,KAAK,uBAAuB;AACrC,QAAI,MAAM,CAAC,MAAM,OAAW,CAAC,OAAmC,CAAC,IAAI,MAAM,CAAC;AAAA,aACnE,QAAQ,KAAK,CAAC,MAAM,OAAW,CAAC,OAAmC,CAAC,IAAI,KAAK,CAAC;AAAA,EACzF;AACA,SAAO;AACT;AAEA,SAAS,wBAAwB,GAAuC;AACtE,QAAM,MAAiB;AAAA,IACrB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,UAAU,OAAO,EAAE,aAAa,WAAW,EAAE,WAAW;AAAA,IACxD,OAAO,OAAO,EAAE,UAAU,WAAW,EAAE,QAAS,OAAQ,EAAwB,SAAS,WAAY,EAAuB,OAAO;AAAA,IACnI,aAAa,OAAO,EAAE,gBAAgB,WAAW,EAAE,cAAc;AAAA,IACjE,QAAQ,OAAO,EAAE,WAAW,WAAW,EAAE,SAAS;AAAA,IAClD,SACE,OAAO,EAAE,YAAY,WAAW,EAAE,UAAU,OAAO,EAAE,aAAa,WAAW,EAAE,WAAW;AAAA,EAC9F;AACA,SAAO,KAAK,CAAC,EAAE,QAAQ,CAAC,MAAM;AAC5B,QAAI,CAAC,CAAC,MAAM,QAAQ,YAAY,SAAS,QAAQ,eAAe,UAAU,WAAW,YAAY,WAAW,WAAW,EAAE,SAAS,CAAC,GAAG;AACpI,MAAC,IAAgC,CAAC,IAAI,EAAE,CAAC;AAAA,IAC3C;AAAA,EACF,CAAC;AACD,SAAO;AACT;AAEA,SAAS,mBAAmB,GAA8C;AACxE,QAAM,KAAK,OAAO,EAAE,OAAO,YAAY,EAAE,KAAK,EAAE,KAAK;AACrD,QAAM,OAAO,EAAE,SAAS,UAAU,UAAU;AAC5C,QAAM,WAAW,OAAO,EAAE,aAAa,YAAY,EAAE,WAAW,EAAE,WAAW;AAC7E,QAAM,OAAO,EAAE;AACf,QAAM,SAAoB;AAAA,IACxB;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO,OAAO,EAAE,UAAU,WAAW,EAAE,QAAS,OAAO,EAAE,SAAS,WAAW,EAAE,OAAO;AAAA,IACtF,aACE,OAAO,EAAE,gBAAgB,WACrB,EAAE,cACF,OAAO,MAAM,gBAAgB,WAC3B,KAAK,cACL;AAAA,IACR,QACE,OAAO,EAAE,WAAW,WAChB,EAAE,SACF,OAAO,MAAM,WAAW,WACtB,KAAK,SACL;AAAA,IACR,SAAS,OAAO,EAAE,YAAY,WAAW,EAAE,UAAW,OAAO,EAAE,aAAa,WAAW,EAAE,WAAW;AAAA,EACtG;AACA,SAAO,KAAK,CAAC,EAAE,QAAQ,CAAC,MAAM;AAC5B,QAAI,CAAC,CAAC,MAAM,QAAQ,YAAY,SAAS,QAAQ,eAAe,UAAU,WAAW,UAAU,EAAE,SAAS,CAAC,GAAG;AAC5G,MAAC,OAAmC,CAAC,IAAI,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF,CAAC;AACD,SAAO;AACT;;;AC5JA,SAAS,cAAc,kBAAkB;AACzC,SAAS,SAAS,iBAAiB;AAU5B,SAAS,cAAc,KAAuB;AACnD,MAAI,QAAQ,QAAQ,QAAQ,OAAW,QAAO;AAC9C,MAAI,OAAO,QAAQ,UAAU;AAC3B,UAAM,IAAI,IAAI,MAAM,eAAe;AACnC,WAAO,IAAK,QAAQ,IAAI,EAAE,CAAC,CAAC,KAAK,MAAO;AAAA,EAC1C;AACA,MAAI,MAAM,QAAQ,GAAG,EAAG,QAAO,IAAI,IAAI,aAAa;AACpD,MAAI,OAAO,QAAQ,UAAU;AAC3B,UAAM,MAA+B,CAAC;AACtC,eAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,GAAG,EAAG,KAAI,CAAC,IAAI,cAAc,CAAC;AAClE,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAMO,SAAS,aACd,SACA,UAAgC,CAAC,GACxB;AACT,QAAM,EAAE,eAAe,QAAQ,KAAK,IAAI;AACxC,QAAM,SAAS,UAAU,OAAO;AAChC,QAAM,MAAM,QAAQ;AACpB,MAAI,OAAO,KAAM,QAAO;AACxB,SAAO,QAAQ,cAAc,GAAG,IAAI;AACtC;AAOO,SAAS,cACd,UACA,UAAgC,CAAC,GACjB;AAChB,MAAI,CAAC,WAAW,QAAQ,EAAG,QAAO;AAClC,MAAI;AACF,UAAM,MAAM,aAAa,UAAU,MAAM;AACzC,UAAM,MAAM,aAAa,KAAK,OAAO;AACrC,WAAO,OAAO;AAAA,EAChB,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;AC1DA,OAAO,YAAY;AAYnB,SAAS,UAAU,QAA2B;AAC5C,QAAM,MAAM,OAAO,UAAU,QAAQ,IAAI,kBAAkB;AAC3D,MAAI,CAAC,IAAK,OAAM,IAAI,MAAM,qEAAqE;AAC/F,SAAO;AACT;AAEA,SAAS,0BAA0B,QAAyD;AAC1F,QAAM,OAA6C,EAAE,QAAQ,UAAU,MAAM,EAAE;AAC/E,MAAI,OAAO,OAAO,YAAY,YAAY,OAAO,QAAS,MAAK,UAAU,OAAO;AAChF,SAAO;AACT;AAEA,SAAS,iBACP,GACoD;AACpD,MAAI,EAAE,SAAS;AACb,WAAO,EAAE,MAAM,QAAQ,SAAS,EAAE,SAAS,cAAc,EAAE,aAAa;AAC1E,MAAI,EAAE,SAAS,eAAe,gBAAgB,KAAK,EAAE,YAAY,QAAQ;AACvE,WAAO;AAAA,MACL,MAAM;AAAA,MACN,SAAS,EAAE,WAAW;AAAA,MACtB,YAAY,EAAE,WAAW,IAAI,CAAC,QAAQ;AAAA,QACpC,IAAI,GAAG;AAAA,QACP,MAAM;AAAA,QACN,UAAU,EAAE,MAAM,GAAG,SAAS,MAAM,WAAW,GAAG,SAAS,UAAU;AAAA,MACvE,EAAE;AAAA,IACJ;AAAA,EACF;AACA,SAAO,EAAE,MAAM,EAAE,MAAM,SAAU,EAAkB,QAAQ;AAC7D;AAEO,SAAS,uBAAuB,QAA+B;AACpE,QAAM,SAAS,IAAI,OAAO,0BAA0B,MAAM,CAAC;AAC3D,QAAM,QAAQ,OAAO,SAAS,QAAQ,IAAI,gBAAgB;AAC1D,QAAM,cAAc,OAAO,eAAe;AAE1C,SAAO;AAAA,IACL,IAAI,OAAO;AAAA,IACX,MAAM;AAAA,IACN,MAAM,KAAK,UAA8C;AACvD,YAAM,OAAO,MAAM,OAAO,KAAK,YAAY,OAAO;AAAA,QAChD;AAAA,QACA;AAAA,QACA,UAAU,SAAS,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,SAAS,EAAE,QAAQ,EAAE;AAAA,MACtE,CAAC;AACD,YAAM,UAAU,KAAK,QAAQ,CAAC,GAAG,SAAS,WAAW;AACrD,YAAM,QAAQ,KAAK,QACf,EAAE,cAAc,KAAK,MAAM,eAAe,kBAAkB,KAAK,MAAM,kBAAkB,IACzF;AACJ,aAAO,EAAE,SAAS,MAAM;AAAA,IAC1B;AAAA,IACA,MAAM,cACJ,UACA,OACA,UAC8B;AAC9B,YAAM,OAAO,MAAM,OAAO,KAAK,YAAY,OAAO;AAAA,QAChD;AAAA,QACA;AAAA,QACA,UAAU,SAAS,IAAI,gBAAgB;AAAA,QACvC,OAAO,MAAM,IAAI,CAAC,OAAO;AAAA,UACvB,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,EAAE,SAAS;AAAA,YACjB,aAAa,EAAE,SAAS;AAAA,YACxB,YAAa,EAAE,SAAS,cAAc;AAAA,UACxC;AAAA,QACF,EAAE;AAAA,MACJ,CAAC;AACD,YAAM,MAAM,KAAK,QAAQ,CAAC,GAAG;AAC7B,YAAM,QAAQ,KAAK,QACf,EAAE,cAAc,KAAK,MAAM,eAAe,kBAAkB,KAAK,MAAM,kBAAkB,IACzF;AACJ,aAAO;AAAA,QACL,SAAS;AAAA,UACP,MAAM;AAAA,UACN,SAAS,KAAK,WAAW;AAAA,UACzB,YAAY,KAAK,YAAY,IAAI,CAAC,QAAQ;AAAA,YACxC,IAAI,GAAG;AAAA,YACP,MAAM;AAAA,YACN,UAAU;AAAA,cACR,MAAM,GAAG,UAAU,QAAQ;AAAA,cAC3B,WAAW,GAAG,UAAU,aAAa;AAAA,YACvC;AAAA,UACF,EAAE;AAAA,QACJ;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAEO,SAAS,wBAAwB,QAA+B;AACrE,QAAM,SAAS,IAAI,OAAO,0BAA0B,MAAM,CAAC;AAC3D,QAAM,QAAS,OAAO,SAAoB;AAE1C,SAAO;AAAA,IACL,IAAI,OAAO;AAAA,IACX,MAAM;AAAA,IACN,MAAM,OAA4B;AAChC,YAAM,IAAI,MAAM,+DAA+D;AAAA,IACjF;AAAA,IACA,MAAM,cAAc,SAA8E;AAChG,YAAM,OAAO,MAAM,OAAO,OAAO,SAAS;AAAA,QACxC;AAAA,QACA,QAAQ,QAAQ;AAAA,QAChB,MAAO,QAAQ,QAAoD;AAAA,QACnE,GAAG,QAAQ,KAAK;AAAA,QAChB,iBAAiB;AAAA,MACnB,CAAC;AACD,YAAM,MAAM,KAAK,OAAO,CAAC,GAAG,OAAO;AACnC,aAAO,EAAE,IAAI;AAAA,IACf;AAAA,EACF;AACF;AAEO,SAAS,mBAAmB,QAA+B;AAChE,MAAI,OAAO,SAAS,QAAS,QAAO,wBAAwB,MAAM;AAClE,SAAO,uBAAuB,MAAM;AACtC;;;ACjIA,IAAM,oBAAoB;AAE1B,SAAS,mBAAmB,QAA+B;AACzD,SAAO,mBAAmB,MAAM;AAClC;AAEA,IAAM,YAA+D;AAAA,EACnE,QAAQ;AAAA,EACR,CAAC,iBAAiB,GAAG;AACvB;AAEO,SAAS,aAAa,QAA+B;AAC1D,QAAM,KAAK,OAAO,YAAY,IAAI,YAAY;AAC9C,QAAM,KAAK,UAAU,CAAC;AACtB,MAAI,CAAC,IAAI;AACP,UAAM,YAAY,CAAC,GAAG,oBAAI,IAAI,CAAC,GAAG,OAAO,KAAK,SAAS,GAAG,qBAAqB,CAAC,CAAC,EAAE,KAAK,EAAE,KAAK,IAAI;AACnG,UAAM,IAAI;AAAA,MACR,6BAA6B,OAAO,QAAQ,gBAAgB,SAAS;AAAA,IACvE;AAAA,EACF;AACA,SAAO,GAAG,MAAM;AAClB;AAEO,SAAS,iBAAiB,MAAc,SAAkD;AAC/F,YAAU,KAAK,YAAY,CAAC,IAAI;AAClC;;;AChBO,SAAS,kBAAkB,SAAiD;AACjF,QAAM,EAAE,WAAW,QAAQ,IAAI,gBAAgB,QAAQ,UAAU;AACjE,QAAM,MAAM,oBAAI,IAAwB;AAExC,aAAW,UAAU,SAAS;AAC5B,QAAI;AACF,YAAM,SAAS,aAAa,MAAM;AAClC,UAAI,IAAI,OAAO,IAAI,MAAM;AAAA,IAC3B,SAAS,KAAK;AACZ,cAAQ,KAAK,yBAAyB,OAAO,EAAE,MAAM,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAAA,IACzG;AAAA,EACF;AAEA,SAAO;AAAA,IACL,IAAI,IAAoC;AACtC,aAAO,IAAI,IAAI,EAAE;AAAA,IACnB;AAAA,IACA,YAAgC;AAC9B,UAAI,IAAI,IAAI,SAAS,EAAG,QAAO;AAC/B,aAAO,IAAI,OAAO,IAAI,CAAC,GAAG,IAAI,KAAK,CAAC,EAAE,CAAC,IAAI;AAAA,IAC7C;AAAA,IACA,MAAgB;AACd,aAAO,CAAC,GAAG,IAAI,KAAK,CAAC;AAAA,IACvB;AAAA,EACF;AACF;;;AC/BA,IAAM,uBAAuB,oBAAI,IAA8B;AAMxD,SAAS,0BAA0B,cAAsB,SAAiC;AAC/F,uBAAqB,IAAI,aAAa,YAAY,GAAG,OAAO;AAC9D;AAKO,SAAS,oBAAoB,cAAoD;AACtF,SAAO,qBAAqB,IAAI,aAAa,YAAY,CAAC;AAC5D;;;AClBA,SAAS,kBAAkB;AAK3B,IAAM,gBAAgB;AAef,SAAS,6BACd,SACe;AACf,QAAM,EAAE,YAAY,UAAU,UAAU,IAAI;AAC5C,QAAM,EAAE,WAAW,QAAQ,IAAI,gBAAgB,cAAc,IAAI;AACjE,QAAM,gBAAgB,QAAQ,KAAK,CAAC,MAAM,EAAE,OAAO,SAAS,KAAK,QAAQ,CAAC;AAE1E,MAAI,CAAC,eAAe;AAClB,UAAMA,SACJ,YAAY,QAAQ,IAAI,gBAAgB;AAC1C,UAAMC,UAAS,aAAa,QAAQ,IAAI;AACxC,WAAO,IAAI,WAAW;AAAA,MACpB,OAAAD;AAAA,MACA,aAAa;AAAA,MACb,GAAIC,UAAS,EAAE,QAAAA,QAAO,IAAI,CAAC;AAAA,IAC7B,CAAC;AAAA,EACH;AAEA,QAAM,WAAY,cAAwC,YAAY;AACtE,QAAM,mBAAmB,oBAAoB,QAAQ;AACrD,MAAI,kBAAkB;AACpB,UAAM,SAAS;AAAA,MACb,GAAG;AAAA,MACH,OAAO,YAAY,cAAc;AAAA,MACjC,aACE,OAAO,cAAc,gBAAgB,WACjC,cAAc,cACd;AAAA,IACR;AACA,WAAO,iBAAiB,MAAM;AAAA,EAChC;AAEA,QAAM,QACJ,YACA,eAAe,SACf,QAAQ,IAAI,gBACZ;AAEF,MAAI,SACF,aAAa,eAAe,UAAU,QAAQ,IAAI;AACpD,MAAI,UAAU,eAAe;AAE7B,MAAI,WAAW,CAAC,QAAQ,QAAQ,OAAO,EAAE,EAAE,SAAS,KAAK,GAAG;AAC1D,cAAU,QAAQ,QAAQ,OAAO,EAAE,IAAI;AAAA,EACzC;AAEA,MAAI,WAAW,WAAW,QAAW;AACnC,aAAS;AAAA,EACX;AAEA,QAAM,cACJ,OAAO,eAAe,gBAAgB,WAAW,cAAc,cAAc;AAE/E,QAAM,qBAAkE;AAAA,IACtE;AAAA,IACA;AAAA,IACA,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,IAC3B,GAAI,UAAU,EAAE,eAAe,EAAE,QAAQ,EAAE,IAAI,CAAC;AAAA,EAClD;AAEA,SAAO,IAAI,WAAW,kBAAkB;AAC1C;;;AClFA,SAAS,gBAAgB;;;ACAzB,IAAM,iBAAiB,oBAAI,IAAY;AAEvC,IAAM,qBAAqB,CAAC,YAAY;AAMjC,SAAS,4BAA4B,OAAqC;AAC/E,QAAM,WAAW,SAAS,OAAO,CAAC,IAAI,MAAM,QAAQ,KAAK,IAAI,QAAQ,CAAC,KAAK;AAC3E,QAAM,WAAW,SAAS;AAAA,IACxB,CAAC,MAAmB,OAAO,MAAM,YAAY,EAAE,SAAS;AAAA,EAC1D;AACA,SAAO,SAAS,SAAS,IAAI,WAAW;AAC1C;AAQA,eAAsB,kBACpB,mBACe;AACf,QAAM,WAAW,qBAAqB;AACtC,aAAW,OAAO,UAAU;AAC1B,QAAI,eAAe,IAAI,GAAG,EAAG;AAC7B,mBAAe,IAAI,GAAG;AACtB,QAAI;AACF,YAAM,IAAI,MAAM;AAAA;AAAA,QAA0B;AAAA;AAC1C,UACE,OAAQ,EACL,yBAAyB,YAC5B;AACA,QAAC,EAA2C,qBAAqB;AAAA,MACnE;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AACF;;;ADnCO,IAAM,sBAAsB;AAG5B,SAAS,qBAAqB,MAAmF;AACtH,MAAI,CAAC,kBAAkB,IAAI,EAAG,QAAO;AACrC,QAAM,OAAO,KAAK,MAAM,oBAAoB,MAAM;AAClD,QAAM,UAAU,KAAK,QAAQ,GAAG;AAChC,QAAM,aAAa,WAAW,IAAI,KAAK,MAAM,GAAG,OAAO,EAAE,KAAK,IAAI,KAAK,KAAK;AAC5E,QAAM,mBAAmB,WAAW,IAAI,KAAK,MAAM,UAAU,CAAC,EAAE,KAAK,IAAI;AAEzE,QAAM,eAAe,WAAW,MAAM,WAAW;AACjD,QAAM,cAAc,eAAe,WAAW,MAAM,GAAG,CAAC,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,IAAI;AACzF,QAAM,UAAU,eAAe,aAAa,CAAC,IAAI;AACjD,MAAI,CAAC,YAAa,QAAO;AACzB,SAAO,EAAE,aAAa,SAAS,UAAU,oBAAoB,OAAU;AACzE;AAYO,SAAS,kBAAkB,MAA+B;AAC/D,SAAO,OAAO,SAAS,YAAY,KAAK,WAAW,mBAAmB;AACxE;AAEA,SAAS,sBAAsB,KAAuB;AACpD,QAAM,MAAM,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC3D,SACE,IAAI,SAAS,oBAAoB,KACjC,IAAI,SAAS,qBAAqB,KAClC,IAAI,SAAS,kBAAkB,KAC/B,IAAI,SAAS,sBAAsB;AAEvC;AAMA,eAAsB,0BACpB,aACA,UAA4C,CAAC,GAC9B;AACf,QAAM,MAAM,QAAQ,OAAO,QAAQ,IAAI;AACvC,QAAM,UAAU,QAAQ;AACxB,MAAI;AACF,UAAM;AAAA;AAAA,MAA0B;AAAA;AAChC;AAAA,EACF,SAAS,KAAK;AACZ,QAAI,CAAC,sBAAsB,GAAG,EAAG,OAAM;AAAA,EACzC;AACA,QAAM,cAAc,UAAU,GAAG,WAAW,IAAI,OAAO,KAAK;AAC5D,WAAS,eAAe,WAAW,IAAI;AAAA,IACrC;AAAA,IACA,OAAO;AAAA,IACP,UAAU;AAAA,EACZ,CAAC;AACH;AAkBA,eAAsB,mBACpB,MACA,UAAqC,CAAC,GACd;AACxB,QAAM,SAAS,qBAAqB,IAAI;AACxC,MAAI,CAAC,OAAQ,QAAO;AACpB,QAAM,EAAE,aAAa,SAAS,UAAU,iBAAiB,IAAI;AAE7D,QAAM,sBAAsB,QAAQ,wBAAwB;AAC5D,QAAM,MAAM,QAAQ,OAAO,QAAQ,IAAI;AAEvC,QAAM,OAAO,YAA2B;AACtC,UAAM,kBAAkB,CAAC,WAAW,CAAC;AAAA,EACvC;AAEA,MAAI;AACF,UAAM,KAAK;AAAA,EACb,SAAS,KAAK;AACZ,QAAI,uBAAuB,sBAAsB,GAAG,GAAG;AACrD,YAAM,0BAA0B,aAAa,EAAE,SAAS,IAAI,CAAC;AAC7D,YAAM,KAAK;AAAA,IACb,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AAEA,MAAI,iBAAkB,QAAO;AAE7B,MAAI;AACF,UAAM,IAAI,MAAM;AAAA;AAAA,MAA0B;AAAA;AAC1C,QACE,OAAQ,EAAgD,2BAA2B,YACnF;AACA,aAAQ,EAA+C,uBAAuB;AAAA,IAChF;AAAA,EACF,QAAQ;AAAA,EAER;AACA,QAAM,IAAI;AAAA,IACR,iBAAiB,IAAI,qGAAqG,WAAW;AAAA,EACvI;AACF;AAQA,eAAsB,yBACpB,YACA,UAA2C,CAAC,GAC1B;AAClB,MAAI,cAAc,KAAM,QAAO;AAC/B,MAAI,MAAM,QAAQ,UAAU,GAAG;AAC7B,UAAM,MAAiB,CAAC;AACxB,eAAW,QAAQ,YAAY;AAC7B,UAAI,KAAK,MAAM,yBAAyB,MAAM,OAAO,CAAC;AAAA,IACxD;AACA,WAAO;AAAA,EACT;AACA,MAAI,OAAO,eAAe,UAAU;AAClC,UAAM,MAA+B,CAAC;AACtC,eAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,UAAU,GAAG;AAC/C,UAAI,MAAM,cAAc,kBAAkB,CAAC,GAAG;AAC5C,cAAM,WAAW,MAAM,mBAAmB,GAAa,OAAO;AAC9D,YAAI,CAAC,IAAI,YAAY;AACrB;AAAA,MACF;AACA,UAAI,CAAC,IAAI,MAAM,yBAAyB,GAAG,OAAO;AAAA,IACpD;AACA,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAWA,eAAsB,oCACpB,SACwB;AACxB,QAAM,EAAE,qBAAqB,KAAK,GAAG,KAAK,IAAI;AAC9C,QAAM,kBAAkB,MAAM,yBAAyB,QAAQ,cAAc,MAAM;AAAA,IACjF;AAAA,IACA;AAAA,EACF,CAAC;AACD,SAAO,6BAA6B;AAAA,IAClC,GAAG;AAAA,IACH,YAAY;AAAA,EACd,CAAC;AACH;;;AE1LA,SAAS,YAAY;AACrB,SAAS,cAAAC,mBAAkB;AAe3B,SAAS,2BAAmC;AAC1C,QAAM,MAAM,QAAQ,IAAI;AACxB,MAAIC,YAAW,KAAK,KAAK,UAAU,CAAC,EAAG,QAAO,KAAK,KAAK,UAAU;AAClE,MAAIA,YAAW,KAAK,KAAK,UAAU,UAAU,CAAC,EAAG,QAAO,KAAK,KAAK,UAAU,UAAU;AACtF,QAAM,eAAe,KAAK,KAAK,MAAM,UAAU,UAAU;AACzD,MAAIA,YAAW,YAAY,EAAG,QAAO;AACrC,SAAO,KAAK,KAAK,UAAU,UAAU;AACvC;AAMO,SAAS,eAAe,UAAiC,CAAC,GAAG;AAClE,QAAM,aAAa,QAAQ,cAAc,yBAAyB;AAClE,QAAM,aAAa,cAAc,UAAU;AAC3C,MAAI,cAAc,MAAM;AACtB,UAAM,IAAI,MAAM,oBAAoB,UAAU,wDAAwD;AAAA,EACxG;AACA,SAAO,6BAA6B,EAAE,WAAW,CAAC;AACpD;AAMA,eAAsB,oBACpB,UAAsC,CAAC,GACmB;AAC1D,QAAM,aAAa,QAAQ,cAAc,yBAAyB;AAClE,QAAM,aAAa,cAAc,UAAU;AAC3C,MAAI,cAAc,MAAM;AACtB,UAAM,IAAI,MAAM,oBAAoB,UAAU,wDAAwD;AAAA,EACxG;AACA,SAAO,oCAAoC;AAAA,IACzC;AAAA,IACA,qBAAqB,QAAQ,wBAAwB;AAAA,IACrD,KAAK,QAAQ,IAAI;AAAA,EACnB,CAAC;AACH;","names":["model","apiKey","existsSync","existsSync"]}