@easynet/agent-llm 1.0.49 → 1.0.51

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. package/README.md +18 -6
  2. package/dist/api/create-agent-llm.d.ts +2 -2
  3. package/dist/api/create-agent-llm.d.ts.map +1 -1
  4. package/dist/api/create-embed-fn.d.ts +10 -0
  5. package/dist/api/create-embed-fn.d.ts.map +1 -0
  6. package/dist/api/get-default-llm-config.d.ts +3 -3
  7. package/dist/api/get-default-llm-config.d.ts.map +1 -1
  8. package/dist/chunk-36PPGV4O.js +1 -0
  9. package/dist/chunk-36PPGV4O.js.map +1 -0
  10. package/dist/chunk-6EQCGQTV.js +154 -0
  11. package/dist/chunk-6EQCGQTV.js.map +1 -0
  12. package/dist/chunk-7VIJ4MFU.js +139 -0
  13. package/dist/chunk-7VIJ4MFU.js.map +1 -0
  14. package/dist/chunk-AUQEXHUP.js +105 -0
  15. package/dist/chunk-AUQEXHUP.js.map +1 -0
  16. package/dist/chunk-FVQ3PKER.js +354 -0
  17. package/dist/chunk-FVQ3PKER.js.map +1 -0
  18. package/dist/chunk-FZWXYGR6.js +191 -0
  19. package/dist/chunk-FZWXYGR6.js.map +1 -0
  20. package/dist/chunk-G7MKWPEI.js +14 -0
  21. package/dist/chunk-G7MKWPEI.js.map +1 -0
  22. package/dist/chunk-KE7IMUSA.js +223 -0
  23. package/dist/chunk-KE7IMUSA.js.map +1 -0
  24. package/dist/chunk-SPDXNDDD.js +114 -0
  25. package/dist/chunk-SPDXNDDD.js.map +1 -0
  26. package/dist/chunk-WK6P4EOC.js +147 -0
  27. package/dist/chunk-WK6P4EOC.js.map +1 -0
  28. package/dist/cli/index.js +18 -3
  29. package/dist/cli/index.js.map +1 -1
  30. package/dist/config/index.d.ts +1 -0
  31. package/dist/config/index.d.ts.map +1 -1
  32. package/dist/config/index.js +40 -0
  33. package/dist/config/index.js.map +1 -0
  34. package/dist/config/loader.d.ts +15 -0
  35. package/dist/config/loader.d.ts.map +1 -1
  36. package/dist/config/parser.d.ts +2 -2
  37. package/dist/config/parser.d.ts.map +1 -1
  38. package/dist/config/yaml-utils.d.ts +13 -0
  39. package/dist/config/yaml-utils.d.ts.map +1 -0
  40. package/dist/connectivity/check.d.ts +18 -0
  41. package/dist/connectivity/check.d.ts.map +1 -0
  42. package/dist/connectivity/index.d.ts +3 -0
  43. package/dist/connectivity/index.d.ts.map +1 -0
  44. package/dist/connectivity/index.js +11 -0
  45. package/dist/connectivity/index.js.map +1 -0
  46. package/dist/connectivity/types.d.ts +13 -0
  47. package/dist/connectivity/types.d.ts.map +1 -0
  48. package/dist/extensions/index.js +36 -0
  49. package/dist/extensions/index.js.map +1 -0
  50. package/dist/extensions/npm-protocol.d.ts +1 -1
  51. package/dist/extensions/npm-protocol.d.ts.map +1 -1
  52. package/dist/index.d.ts +4 -11
  53. package/dist/index.d.ts.map +1 -1
  54. package/dist/index.js +46 -26
  55. package/dist/index.js.map +1 -1
  56. package/dist/langchain/index.js +11 -0
  57. package/dist/langchain/index.js.map +1 -0
  58. package/dist/model/chat.d.ts +30 -0
  59. package/dist/model/chat.d.ts.map +1 -0
  60. package/dist/model/embed-parser.d.ts +25 -0
  61. package/dist/model/embed-parser.d.ts.map +1 -0
  62. package/dist/model/embedding.d.ts +25 -0
  63. package/dist/model/embedding.d.ts.map +1 -0
  64. package/dist/model/hub.d.ts +26 -3
  65. package/dist/model/hub.d.ts.map +1 -1
  66. package/dist/model/index.d.ts +13 -0
  67. package/dist/model/index.d.ts.map +1 -0
  68. package/dist/model/index.js +17 -0
  69. package/dist/model/index.js.map +1 -0
  70. package/dist/model/llm-parser.d.ts +10 -0
  71. package/dist/model/llm-parser.d.ts.map +1 -0
  72. package/dist/model/types.d.ts +31 -0
  73. package/dist/model/types.d.ts.map +1 -0
  74. package/dist/npm/command.d.ts +37 -0
  75. package/dist/npm/command.d.ts.map +1 -0
  76. package/dist/npm/index.d.ts +5 -0
  77. package/dist/npm/index.d.ts.map +1 -0
  78. package/dist/npm/index.js +39 -0
  79. package/dist/npm/index.js.map +1 -0
  80. package/dist/npm/install.d.ts +9 -0
  81. package/dist/npm/install.d.ts.map +1 -0
  82. package/dist/npm/provider.d.ts +15 -0
  83. package/dist/npm/provider.d.ts.map +1 -0
  84. package/dist/npm/version.d.ts +12 -0
  85. package/dist/npm/version.d.ts.map +1 -0
  86. package/dist/registry/chat-model.d.ts +1 -1
  87. package/dist/registry/chat-model.d.ts.map +1 -1
  88. package/dist/registry/index.js +9 -0
  89. package/dist/registry/index.js.map +1 -0
  90. package/dist/types.d.ts +2 -2
  91. package/dist/types.d.ts.map +1 -1
  92. package/package.json +40 -2
  93. package/dist/chunk-YK4EBPJ3.js +0 -578
  94. package/dist/chunk-YK4EBPJ3.js.map +0 -1
package/README.md CHANGED
@@ -1,11 +1,11 @@
1
- # @easynet/agent-llm
1
+ # @easynet/agent-model
2
2
 
3
- 最小目标:从 `llm.yaml` 创建一个可直接给 LangChain 使用的 ChatModel。
3
+ 统一的 Model Hub:从 `models.yaml` 创建 LangChain ChatModel、Embedding 等模型。
4
4
 
5
5
  ## 最小接口
6
6
 
7
7
  ```ts
8
- import { createAgentLlM } from "@easynet/agent-llm";
8
+ import { createAgentLlM } from "@easynet/agent-model";
9
9
 
10
10
  const llm = await createAgentLlM();
11
11
  const result = await llm.invoke("hello");
@@ -14,7 +14,7 @@ console.log(result.content);
14
14
 
15
15
  ## 最小 YAML(推荐先用这个)
16
16
 
17
- `llm.yaml`
17
+ `models.yaml`
18
18
 
19
19
  ```yaml
20
20
  llm:
@@ -32,15 +32,26 @@ llm:
32
32
  provider: openai
33
33
  base_url: http://localhost:11434/v1
34
34
  model: qwen3:0.6b
35
+
36
+ embed:
37
+ default: gemma
38
+ gemma:
39
+ provider: openai
40
+ base_url: https://ollama-nvidia-8g-2.easynet.world/v1
41
+ model: embeddinggemma:latest
42
+ apiKey: ollama
43
+
44
+ runtime:
45
+ check_connectivity: false
35
46
  ```
36
47
 
37
48
  ## 最简单示例(带注释)
38
49
 
39
50
  ```ts
40
- import { createAgentLlM } from "@easynet/agent-llm";
51
+ import { createAgentLlM } from "@easynet/agent-model";
41
52
 
42
53
  async function main() {
43
- // 1) 从当前目录 llm.yaml 读取配置并创建模型
54
+ // 1) 从当前目录 models.yaml 读取配置并创建模型
44
55
  const llm = await createAgentLlM();
45
56
 
46
57
  // 2) 直接调用模型
@@ -58,3 +69,4 @@ main().catch(console.error);
58
69
  - 自定义 provider:`registerChatModelProvider(...)`
59
70
  - 只取默认模型配置:`getDefaultLlmConfig(...)`
60
71
  - 直接从对象构建模型:`createChatModelFromLlmConfig(...)`
72
+ - 从 models.yaml 创建 EmbedFn:`createEmbedFnFromModelsConfig(...)`
@@ -1,5 +1,5 @@
1
1
  import type { BaseChatModel } from "@langchain/core/language_models/chat_models";
2
- import { type ConnectionStatus } from "@easynet/agent-common";
2
+ import { type ConnectionStatus } from "../connectivity/index.js";
3
3
  export interface CreateAgentLlMOptions {
4
4
  configPath?: string;
5
5
  installNpmIfMissing?: boolean;
@@ -8,7 +8,7 @@ export interface CreateAgentLlMOptions {
8
8
  connectivityTimeoutMs?: number;
9
9
  }
10
10
  /**
11
- * Create a LangChain ChatModel from llm.yaml config.
11
+ * Create a LangChain ChatModel from models.yaml config.
12
12
  * Returns BaseChatModel compatible with LangChain's createAgent and other tools.
13
13
  */
14
14
  export declare function createAgentLlM(configPathOrOptions?: string | CreateAgentLlMOptions): Promise<BaseChatModel>;
@@ -1 +1 @@
1
- {"version":3,"file":"create-agent-llm.d.ts","sourceRoot":"","sources":["../../src/api/create-agent-llm.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,6CAA6C,CAAC;AACjF,OAAO,EAML,KAAK,gBAAgB,EACtB,MAAM,uBAAuB,CAAC;AA0D/B,MAAM,WAAW,qBAAqB;IACpC,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAC9B,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,kBAAkB,CAAC,EAAE,CAAC,MAAM,EAAE,gBAAgB,KAAK,IAAI,CAAC;IACxD,qBAAqB,CAAC,EAAE,MAAM,CAAC;CAChC;AAgFD;;;GAGG;AACH,wBAAsB,cAAc,CAClC,mBAAmB,CAAC,EAAE,MAAM,GAAG,qBAAqB,GACnD,OAAO,CAAC,aAAa,CAAC,CA+BxB"}
1
+ {"version":3,"file":"create-agent-llm.d.ts","sourceRoot":"","sources":["../../src/api/create-agent-llm.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,6CAA6C,CAAC;AACjF,OAAO,EAIL,KAAK,gBAAgB,EACtB,MAAM,0BAA0B,CAAC;AA4DlC,MAAM,WAAW,qBAAqB;IACpC,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAC9B,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,kBAAkB,CAAC,EAAE,CAAC,MAAM,EAAE,gBAAgB,KAAK,IAAI,CAAC;IACxD,qBAAqB,CAAC,EAAE,MAAM,CAAC;CAChC;AAgFD;;;GAGG;AACH,wBAAsB,cAAc,CAClC,mBAAmB,CAAC,EAAE,MAAM,GAAG,qBAAqB,GACnD,OAAO,CAAC,aAAa,CAAC,CAmCxB"}
@@ -0,0 +1,10 @@
1
+ export type EmbedFn = (input: string | string[]) => Promise<number[][]>;
2
+ /**
3
+ * Load models.yaml and build an EmbedFn from the `embed:` section.
4
+ * Returns undefined if no embed section is configured.
5
+ *
6
+ * @param configPath - Path to models.yaml
7
+ * @param embedId - Which embed instance to use (defaults to the `default:` key)
8
+ */
9
+ export declare function createEmbedFnFromModelsConfig(configPath: string, embedId?: string): EmbedFn | undefined;
10
+ //# sourceMappingURL=create-embed-fn.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"create-embed-fn.d.ts","sourceRoot":"","sources":["../../src/api/create-embed-fn.ts"],"names":[],"mappings":"AAOA,MAAM,MAAM,OAAO,GAAG,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,KAAK,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;AAExE;;;;;;GAMG;AACH,wBAAgB,6BAA6B,CAC3C,UAAU,EAAE,MAAM,EAClB,OAAO,CAAC,EAAE,MAAM,GACf,OAAO,GAAG,SAAS,CAsBrB"}
@@ -1,16 +1,16 @@
1
1
  /**
2
- * Load llm.yaml, resolve npm providers, and return the default LLMConfig.
2
+ * Load models.yaml, resolve npm providers, and return the default LLMConfig.
3
3
  * For use by CLIs (e.g. wallee-llm) that need config without creating a model.
4
4
  */
5
5
  import type { LLMConfig } from "../types.js";
6
6
  export interface GetDefaultLlmConfigOptions {
7
- /** Path to llm.yaml. Default: process.cwd() + "/llm.yaml" */
7
+ /** Path to models.yaml. Default: process.cwd() + "/models.yaml" */
8
8
  configPath?: string;
9
9
  /** Install npm provider packages if missing. Default true. */
10
10
  installNpmIfMissing?: boolean;
11
11
  }
12
12
  /**
13
- * Returns the default LLM config from llm.yaml (after resolving npm: providers), or null if no config file.
13
+ * Returns the default LLM config from models.yaml (after resolving npm: providers), or null if no config file.
14
14
  */
15
15
  export declare function getDefaultLlmConfig(options?: GetDefaultLlmConfigOptions): Promise<LLMConfig | null>;
16
16
  //# sourceMappingURL=get-default-llm-config.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"get-default-llm-config.d.ts","sourceRoot":"","sources":["../../src/api/get-default-llm-config.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAMH,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAE7C,MAAM,WAAW,0BAA0B;IACzC,6DAA6D;IAC7D,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,8DAA8D;IAC9D,mBAAmB,CAAC,EAAE,OAAO,CAAC;CAC/B;AAED;;GAEG;AACH,wBAAsB,mBAAmB,CACvC,OAAO,GAAE,0BAA+B,GACvC,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,CAa3B"}
1
+ {"version":3,"file":"get-default-llm-config.d.ts","sourceRoot":"","sources":["../../src/api/get-default-llm-config.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAMH,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAE7C,MAAM,WAAW,0BAA0B;IACzC,mEAAmE;IACnE,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,8DAA8D;IAC9D,mBAAmB,CAAC,EAAE,OAAO,CAAC;CAC/B;AAED;;GAEG;AACH,wBAAsB,mBAAmB,CACvC,OAAO,GAAE,0BAA+B,GACvC,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,CAa3B"}
@@ -0,0 +1 @@
1
+ //# sourceMappingURL=chunk-36PPGV4O.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
@@ -0,0 +1,154 @@
1
+ import {
2
+ getChatModelFactory
3
+ } from "./chunk-G7MKWPEI.js";
4
+ import {
5
+ parseLlmSection
6
+ } from "./chunk-SPDXNDDD.js";
7
+
8
+ // src/langchain/openai-compatible.ts
9
+ import { ChatOpenAI } from "@langchain/openai";
10
+ function createChatOpenAI(options) {
11
+ const {
12
+ baseURL,
13
+ model,
14
+ temperature = 0,
15
+ apiKey,
16
+ defaultHeaders,
17
+ defaultQuery,
18
+ httpAgent
19
+ } = options;
20
+ const config = {};
21
+ if (baseURL) config.baseURL = baseURL;
22
+ if (defaultHeaders) config.defaultHeaders = defaultHeaders;
23
+ if (defaultQuery) config.defaultQuery = defaultQuery;
24
+ if (httpAgent) config.httpAgent = httpAgent;
25
+ return new ChatOpenAI({
26
+ model,
27
+ temperature,
28
+ ...apiKey ? { apiKey } : {},
29
+ ...Object.keys(config).length > 0 ? { configuration: config } : {}
30
+ });
31
+ }
32
+
33
+ // src/langchain/tool-choice.ts
34
+ function hasTools(model, options) {
35
+ const optTools = options?.tools;
36
+ if (Array.isArray(optTools) && optTools.length > 0) return true;
37
+ const defaultTools = model.defaultOptions?.tools;
38
+ return Array.isArray(defaultTools) && defaultTools.length > 0;
39
+ }
40
+ function sanitizeOptions(model, options) {
41
+ if (!options) return options;
42
+ if (options.tool_choice !== "none") return options;
43
+ if (!hasTools(model, options)) return options;
44
+ return { ...options, tool_choice: "auto" };
45
+ }
46
+ function applyToolChoiceAuto(model) {
47
+ const m = model;
48
+ if (m.__agentLlmToolChoicePatched) return;
49
+ m.__agentLlmToolChoicePatched = true;
50
+ const origBindTools = m.bindTools?.bind(model);
51
+ if (origBindTools) {
52
+ m.bindTools = function(tools, opts) {
53
+ const bound = origBindTools(tools, { ...opts, tool_choice: "auto" });
54
+ applyToolChoiceAuto(
55
+ bound
56
+ );
57
+ return bound;
58
+ };
59
+ }
60
+ const origWithConfig = m.withConfig?.bind(model);
61
+ if (origWithConfig) {
62
+ m.withConfig = function(config) {
63
+ const sanitized = sanitizeOptions(this, config) ?? config;
64
+ const next = origWithConfig(sanitized);
65
+ applyToolChoiceAuto(
66
+ next
67
+ );
68
+ return next;
69
+ };
70
+ }
71
+ const origInvoke = m.invoke?.bind(model);
72
+ if (origInvoke) {
73
+ m.invoke = function(input, options) {
74
+ return origInvoke(input, sanitizeOptions(this, options));
75
+ };
76
+ }
77
+ const origStream = m.stream?.bind(model);
78
+ if (origStream) {
79
+ m.stream = function(input, options) {
80
+ return origStream(input, sanitizeOptions(this, options));
81
+ };
82
+ }
83
+ }
84
+
85
+ // src/langchain/index.ts
86
+ var DEFAULT_MODEL = "gpt-4o-mini";
87
+ function normalizeError(e, context) {
88
+ if (e instanceof Error) return new Error(`${context}: ${e.message}`, { cause: e });
89
+ return new Error(`${context}: ${String(e)}`);
90
+ }
91
+ function createChatModelFromLlmConfig(options = {}) {
92
+ const { llmSection, modelEnv, apiKeyEnv } = options;
93
+ let defaultId;
94
+ let configs;
95
+ try {
96
+ const parsed = parseLlmSection(llmSection ?? null);
97
+ defaultId = parsed.defaultId;
98
+ configs = parsed.configs;
99
+ } catch (e) {
100
+ throw normalizeError(e, "Failed to parse llm section");
101
+ }
102
+ const config = configs.find((c) => c.id === defaultId) ?? configs[0];
103
+ if (!config) {
104
+ const model2 = modelEnv ?? process.env.OPENAI_MODEL ?? DEFAULT_MODEL;
105
+ const apiKey2 = apiKeyEnv ?? process.env.OPENAI_API_KEY;
106
+ return createChatOpenAI({
107
+ model: model2,
108
+ temperature: 0,
109
+ ...apiKey2 ? { apiKey: apiKey2 } : {}
110
+ });
111
+ }
112
+ const provider = config.provider ?? "openai";
113
+ const factory = getChatModelFactory(provider);
114
+ if (factory) {
115
+ try {
116
+ return factory({
117
+ ...config,
118
+ model: modelEnv ?? config.model ?? DEFAULT_MODEL,
119
+ temperature: typeof config.temperature === "number" ? config.temperature : 0
120
+ });
121
+ } catch (e) {
122
+ throw normalizeError(e, `Failed to create ChatModel for provider "${provider}"`);
123
+ }
124
+ }
125
+ const model = modelEnv ?? config.model ?? process.env.OPENAI_MODEL ?? DEFAULT_MODEL;
126
+ let apiKey = apiKeyEnv ?? config.apiKey ?? process.env.OPENAI_API_KEY;
127
+ let baseURL = config.baseURL;
128
+ if (baseURL && !baseURL.replace(/\/$/, "").endsWith("/v1")) {
129
+ baseURL = baseURL.replace(/\/$/, "") + "/v1";
130
+ }
131
+ if (baseURL && !apiKey) {
132
+ apiKey = "not-needed";
133
+ }
134
+ const temperature = typeof config.temperature === "number" ? config.temperature : 0;
135
+ const opts = config.options;
136
+ const defaultHeaders = opts?.defaultHeaders;
137
+ const defaultQuery = opts?.defaultQuery;
138
+ const httpAgent = opts?.httpAgent;
139
+ return createChatOpenAI({
140
+ model,
141
+ temperature,
142
+ baseURL,
143
+ apiKey,
144
+ defaultHeaders,
145
+ defaultQuery,
146
+ httpAgent
147
+ });
148
+ }
149
+
150
+ export {
151
+ applyToolChoiceAuto,
152
+ createChatModelFromLlmConfig
153
+ };
154
+ //# sourceMappingURL=chunk-6EQCGQTV.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/langchain/openai-compatible.ts","../src/langchain/tool-choice.ts","../src/langchain/index.ts"],"sourcesContent":["/**\n * Create ChatOpenAI from config - works with any OpenAI-compatible provider.\n * This is the ONLY place we create ChatOpenAI instances.\n */\n\nimport { ChatOpenAI } from \"@langchain/openai\";\nimport type { BaseChatModel } from \"@langchain/core/language_models/chat_models\";\nimport type { Agent } from \"node:http\";\n\nexport interface CreateChatOpenAIOptions {\n baseURL?: string;\n model: string;\n temperature?: number;\n apiKey?: string;\n defaultHeaders?: Record<string, string>;\n defaultQuery?: Record<string, string>;\n httpAgent?: Agent;\n}\n\n/**\n * Create ChatOpenAI from options.\n * Works with OpenAI and any OpenAI-compatible endpoint (CIS, Ollama, etc.).\n */\nexport function createChatOpenAI(options: CreateChatOpenAIOptions): BaseChatModel {\n const {\n baseURL,\n model,\n temperature = 0,\n apiKey,\n defaultHeaders,\n defaultQuery,\n httpAgent,\n } = options;\n\n const config: {\n baseURL?: string;\n defaultHeaders?: Record<string, string>;\n defaultQuery?: Record<string, string>;\n httpAgent?: Agent;\n } = {};\n\n if (baseURL) config.baseURL = baseURL;\n if (defaultHeaders) config.defaultHeaders = defaultHeaders;\n if (defaultQuery) config.defaultQuery = defaultQuery;\n if (httpAgent) config.httpAgent = httpAgent;\n\n return new ChatOpenAI({\n model,\n temperature,\n ...(apiKey ? { apiKey } : {}),\n ...(Object.keys(config).length > 0 ? { configuration: config } : {}),\n });\n}\n","interface ToolChoicePatchTarget {\n __agentLlmToolChoicePatched?: boolean;\n defaultOptions?: { tools?: unknown[] };\n bindTools?: (tools: unknown, opts?: Record<string, unknown>) => unknown;\n withConfig?: (config: Record<string, unknown>) => unknown;\n invoke?: (input: unknown, options?: Record<string, unknown>) => unknown;\n stream?: (input: unknown, options?: Record<string, unknown>) => unknown;\n}\n\nfunction hasTools(model: ToolChoicePatchTarget, options?: Record<string, unknown>): boolean {\n const optTools = options?.tools;\n if (Array.isArray(optTools) && optTools.length > 0) return true;\n const defaultTools = model.defaultOptions?.tools;\n return Array.isArray(defaultTools) && defaultTools.length > 0;\n}\n\nfunction sanitizeOptions(\n model: ToolChoicePatchTarget,\n options?: Record<string, unknown>\n): Record<string, unknown> | undefined {\n if (!options) return options;\n if (options.tool_choice !== \"none\") return options;\n if (!hasTools(model, options)) return options;\n return { ...options, tool_choice: \"auto\" };\n}\n\n/**\n * Force tool_choice to \"auto\" when tools are present.\n * Patches bindTools/withConfig/invoke/stream in-place for compatibility with providers\n * that default to tool_choice: \"none\".\n */\nexport function applyToolChoiceAuto(\n model: { bindTools?: (tools: unknown, opts?: Record<string, unknown>) => unknown }\n): void {\n const m = model as ToolChoicePatchTarget;\n if (m.__agentLlmToolChoicePatched) return;\n m.__agentLlmToolChoicePatched = true;\n\n const origBindTools = m.bindTools?.bind(model);\n if (origBindTools) {\n m.bindTools = function (tools: unknown, opts?: Record<string, unknown>) {\n const bound = origBindTools(tools, { ...opts, tool_choice: \"auto\" });\n applyToolChoiceAuto(\n bound as { bindTools?: (tools: unknown, opts?: Record<string, unknown>) => unknown }\n );\n return bound;\n };\n }\n\n const origWithConfig = m.withConfig?.bind(model);\n if (origWithConfig) {\n m.withConfig = function (config: Record<string, unknown>) {\n const sanitized = sanitizeOptions(this as ToolChoicePatchTarget, config) ?? config;\n const next = origWithConfig(sanitized);\n applyToolChoiceAuto(\n next as { bindTools?: (tools: unknown, opts?: Record<string, unknown>) => unknown }\n );\n return next;\n };\n }\n\n const origInvoke = m.invoke?.bind(model);\n if (origInvoke) {\n m.invoke = function (input: unknown, options?: Record<string, unknown>) {\n return origInvoke(input, sanitizeOptions(this as ToolChoicePatchTarget, options));\n };\n }\n\n const origStream = m.stream?.bind(model);\n if (origStream) {\n m.stream = function (input: unknown, options?: Record<string, unknown>) {\n return origStream(input, sanitizeOptions(this as ToolChoicePatchTarget, options));\n };\n }\n}\n","/**\n * Simple LangChain module: create ChatOpenAI from llm config.\n * Extensions can register custom ChatModel factories via the registry.\n */\n\nimport type { BaseChatModel } from \"@langchain/core/language_models/chat_models\";\nimport { parseLlmSection } from \"../model/llm-parser.js\";\nimport type { LLMConfig } from \"../model/types.js\";\nimport { getChatModelFactory } from \"../registry/chat-model.js\";\nimport { createChatOpenAI } from \"./openai-compatible.js\";\nimport type { Agent } from \"node:http\";\n\nconst DEFAULT_MODEL = \"gpt-4o-mini\";\n\nexport interface CreateChatModelOptions {\n llmSection?: unknown;\n modelEnv?: string;\n apiKeyEnv?: string;\n}\n\nexport { applyToolChoiceAuto } from \"./tool-choice.js\";\n\nfunction normalizeError(e: unknown, context: string): Error {\n if (e instanceof Error) return new Error(`${context}: ${e.message}`, { cause: e });\n return new Error(`${context}: ${String(e)}`);\n}\n\n/**\n * Create a LangChain ChatModel from agent config llm section.\n * Uses extension-registered factory when available; otherwise creates ChatOpenAI.\n */\nexport function createChatModelFromLlmConfig(\n options: CreateChatModelOptions = {}\n): BaseChatModel {\n const { llmSection, modelEnv, apiKeyEnv } = options;\n\n let defaultId: string;\n let configs: LLMConfig[];\n\n try {\n const parsed = parseLlmSection(llmSection ?? null);\n defaultId = parsed.defaultId;\n configs = parsed.configs;\n } catch (e) {\n throw normalizeError(e, \"Failed to parse llm section\");\n }\n\n const config = configs.find((c) => c.id === defaultId) ?? configs[0];\n\n // No config? Use default OpenAI\n if (!config) {\n const model = modelEnv ?? process.env.OPENAI_MODEL ?? DEFAULT_MODEL;\n const apiKey = apiKeyEnv ?? process.env.OPENAI_API_KEY;\n\n return createChatOpenAI({\n model,\n temperature: 0,\n ...(apiKey ? { apiKey } : {}),\n });\n }\n\n // Check for registered custom factory\n const provider = config.provider ?? \"openai\";\n const factory = getChatModelFactory(provider);\n\n if (factory) {\n try {\n return factory({\n ...config,\n model: modelEnv ?? config.model ?? DEFAULT_MODEL,\n temperature: typeof config.temperature === \"number\" ? config.temperature : 0,\n });\n } catch (e) {\n throw normalizeError(e, `Failed to create ChatModel for provider \"${provider}\"`);\n }\n }\n\n // Create standard ChatOpenAI for OpenAI-compatible provider\n const model = modelEnv ?? config.model ?? process.env.OPENAI_MODEL ?? DEFAULT_MODEL;\n let apiKey = apiKeyEnv ?? config.apiKey ?? process.env.OPENAI_API_KEY;\n let baseURL = config.baseURL;\n\n // Ensure baseURL ends with /v1\n if (baseURL && !baseURL.replace(/\\/$/, \"\").endsWith(\"/v1\")) {\n baseURL = baseURL.replace(/\\/$/, \"\") + \"/v1\";\n }\n\n // For local providers without API keys, use a placeholder\n if (baseURL && !apiKey) {\n apiKey = \"not-needed\";\n }\n\n const temperature = typeof config.temperature === \"number\" ? config.temperature : 0;\n\n // Extract options\n const opts = config.options as Record<string, unknown> | undefined;\n const defaultHeaders = opts?.defaultHeaders as Record<string, string> | undefined;\n const defaultQuery = opts?.defaultQuery as Record<string, string> | undefined;\n const httpAgent = opts?.httpAgent as Agent | undefined;\n\n return createChatOpenAI({\n model,\n temperature,\n baseURL,\n apiKey,\n defaultHeaders,\n defaultQuery,\n httpAgent,\n });\n}\n"],"mappings":";;;;;;;;AAKA,SAAS,kBAAkB;AAkBpB,SAAS,iBAAiB,SAAiD;AAChF,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAEJ,QAAM,SAKF,CAAC;AAEL,MAAI,QAAS,QAAO,UAAU;AAC9B,MAAI,eAAgB,QAAO,iBAAiB;AAC5C,MAAI,aAAc,QAAO,eAAe;AACxC,MAAI,UAAW,QAAO,YAAY;AAElC,SAAO,IAAI,WAAW;AAAA,IACpB;AAAA,IACA;AAAA,IACA,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,IAC3B,GAAI,OAAO,KAAK,MAAM,EAAE,SAAS,IAAI,EAAE,eAAe,OAAO,IAAI,CAAC;AAAA,EACpE,CAAC;AACH;;;AC3CA,SAAS,SAAS,OAA8B,SAA4C;AAC1F,QAAM,WAAW,SAAS;AAC1B,MAAI,MAAM,QAAQ,QAAQ,KAAK,SAAS,SAAS,EAAG,QAAO;AAC3D,QAAM,eAAe,MAAM,gBAAgB;AAC3C,SAAO,MAAM,QAAQ,YAAY,KAAK,aAAa,SAAS;AAC9D;AAEA,SAAS,gBACP,OACA,SACqC;AACrC,MAAI,CAAC,QAAS,QAAO;AACrB,MAAI,QAAQ,gBAAgB,OAAQ,QAAO;AAC3C,MAAI,CAAC,SAAS,OAAO,OAAO,EAAG,QAAO;AACtC,SAAO,EAAE,GAAG,SAAS,aAAa,OAAO;AAC3C;AAOO,SAAS,oBACd,OACM;AACN,QAAM,IAAI;AACV,MAAI,EAAE,4BAA6B;AACnC,IAAE,8BAA8B;AAEhC,QAAM,gBAAgB,EAAE,WAAW,KAAK,KAAK;AAC7C,MAAI,eAAe;AACjB,MAAE,YAAY,SAAU,OAAgB,MAAgC;AACtE,YAAM,QAAQ,cAAc,OAAO,EAAE,GAAG,MAAM,aAAa,OAAO,CAAC;AACnE;AAAA,QACE;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAAA,EACF;AAEA,QAAM,iBAAiB,EAAE,YAAY,KAAK,KAAK;AAC/C,MAAI,gBAAgB;AAClB,MAAE,aAAa,SAAU,QAAiC;AACxD,YAAM,YAAY,gBAAgB,MAA+B,MAAM,KAAK;AAC5E,YAAM,OAAO,eAAe,SAAS;AACrC;AAAA,QACE;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAAA,EACF;AAEA,QAAM,aAAa,EAAE,QAAQ,KAAK,KAAK;AACvC,MAAI,YAAY;AACd,MAAE,SAAS,SAAU,OAAgB,SAAmC;AACtE,aAAO,WAAW,OAAO,gBAAgB,MAA+B,OAAO,CAAC;AAAA,IAClF;AAAA,EACF;AAEA,QAAM,aAAa,EAAE,QAAQ,KAAK,KAAK;AACvC,MAAI,YAAY;AACd,MAAE,SAAS,SAAU,OAAgB,SAAmC;AACtE,aAAO,WAAW,OAAO,gBAAgB,MAA+B,OAAO,CAAC;AAAA,IAClF;AAAA,EACF;AACF;;;AC9DA,IAAM,gBAAgB;AAUtB,SAAS,eAAe,GAAY,SAAwB;AAC1D,MAAI,aAAa,MAAO,QAAO,IAAI,MAAM,GAAG,OAAO,KAAK,EAAE,OAAO,IAAI,EAAE,OAAO,EAAE,CAAC;AACjF,SAAO,IAAI,MAAM,GAAG,OAAO,KAAK,OAAO,CAAC,CAAC,EAAE;AAC7C;AAMO,SAAS,6BACd,UAAkC,CAAC,GACpB;AACf,QAAM,EAAE,YAAY,UAAU,UAAU,IAAI;AAE5C,MAAI;AACJ,MAAI;AAEJ,MAAI;AACF,UAAM,SAAS,gBAAgB,cAAc,IAAI;AACjD,gBAAY,OAAO;AACnB,cAAU,OAAO;AAAA,EACnB,SAAS,GAAG;AACV,UAAM,eAAe,GAAG,6BAA6B;AAAA,EACvD;AAEA,QAAM,SAAS,QAAQ,KAAK,CAAC,MAAM,EAAE,OAAO,SAAS,KAAK,QAAQ,CAAC;AAGnE,MAAI,CAAC,QAAQ;AACX,UAAMA,SAAQ,YAAY,QAAQ,IAAI,gBAAgB;AACtD,UAAMC,UAAS,aAAa,QAAQ,IAAI;AAExC,WAAO,iBAAiB;AAAA,MACtB,OAAAD;AAAA,MACA,aAAa;AAAA,MACb,GAAIC,UAAS,EAAE,QAAAA,QAAO,IAAI,CAAC;AAAA,IAC7B,CAAC;AAAA,EACH;AAGA,QAAM,WAAW,OAAO,YAAY;AACpC,QAAM,UAAU,oBAAoB,QAAQ;AAE5C,MAAI,SAAS;AACX,QAAI;AACF,aAAO,QAAQ;AAAA,QACb,GAAG;AAAA,QACH,OAAO,YAAY,OAAO,SAAS;AAAA,QACnC,aAAa,OAAO,OAAO,gBAAgB,WAAW,OAAO,cAAc;AAAA,MAC7E,CAAC;AAAA,IACH,SAAS,GAAG;AACV,YAAM,eAAe,GAAG,4CAA4C,QAAQ,GAAG;AAAA,IACjF;AAAA,EACF;AAGA,QAAM,QAAQ,YAAY,OAAO,SAAS,QAAQ,IAAI,gBAAgB;AACtE,MAAI,SAAS,aAAa,OAAO,UAAU,QAAQ,IAAI;AACvD,MAAI,UAAU,OAAO;AAGrB,MAAI,WAAW,CAAC,QAAQ,QAAQ,OAAO,EAAE,EAAE,SAAS,KAAK,GAAG;AAC1D,cAAU,QAAQ,QAAQ,OAAO,EAAE,IAAI;AAAA,EACzC;AAGA,MAAI,WAAW,CAAC,QAAQ;AACtB,aAAS;AAAA,EACX;AAEA,QAAM,cAAc,OAAO,OAAO,gBAAgB,WAAW,OAAO,cAAc;AAGlF,QAAM,OAAO,OAAO;AACpB,QAAM,iBAAiB,MAAM;AAC7B,QAAM,eAAe,MAAM;AAC3B,QAAM,YAAY,MAAM;AAExB,SAAO,iBAAiB;AAAA,IACtB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;","names":["model","apiKey"]}
@@ -0,0 +1,139 @@
1
+ import {
2
+ loadModelsConfig
3
+ } from "./chunk-WK6P4EOC.js";
4
+ import {
5
+ buildUnreachableError,
6
+ checkEndpointConnectivity
7
+ } from "./chunk-AUQEXHUP.js";
8
+ import {
9
+ resolveLlmSectionWithNpm
10
+ } from "./chunk-FVQ3PKER.js";
11
+ import {
12
+ createChatModelFromLlmConfig
13
+ } from "./chunk-6EQCGQTV.js";
14
+ import {
15
+ parseLlmSection
16
+ } from "./chunk-SPDXNDDD.js";
17
+
18
+ // src/api/create-agent-llm.ts
19
+ import { join } from "path";
20
+ function applyDefaultToolChoice(model) {
21
+ const m = model;
22
+ const orig = m.bindTools?.bind(model);
23
+ if (!orig) return;
24
+ m.bindTools = function(tools, opts) {
25
+ return orig(tools, { ...opts, tool_choice: "auto" });
26
+ };
27
+ }
28
+ var CIS_DEFAULT_RESOLVE_HOST = "s0010-ml-https.s0010.us-west-2.awswd";
29
+ var CIS_DEFAULT_RESOLVE_IP = "10.210.98.124";
30
+ function buildEndpointConnectivityOptions(config) {
31
+ const opts = config.options ?? config;
32
+ const provider = typeof config.provider === "string" ? config.provider : "";
33
+ const baseURL = config.baseURL;
34
+ const isCis = provider === "cis" || provider.includes("cis");
35
+ const useCisDefault = isCis && baseURL.includes(CIS_DEFAULT_RESOLVE_HOST) && opts?.resolveHost == null;
36
+ const resolveHost = opts?.resolveHost != null && typeof opts.resolveHost.from === "string" ? opts.resolveHost : useCisDefault ? { from: CIS_DEFAULT_RESOLVE_HOST, to: CIS_DEFAULT_RESOLVE_IP } : void 0;
37
+ const host = typeof opts?.host === "string" ? opts.host : resolveHost ? resolveHost.from : void 0;
38
+ if (resolveHost == null && host == null) return void 0;
39
+ const verifySSL = opts?.verifySSL === true;
40
+ const bypassAuth = opts?.bypassAuth !== false;
41
+ return {
42
+ resolveHost,
43
+ host,
44
+ verifySSL: resolveHost != null ? false : verifySSL ? true : void 0,
45
+ bypassAuth: bypassAuth ? true : void 0,
46
+ featureKey: typeof opts?.featureKey === "string" ? opts.featureKey : void 0
47
+ };
48
+ }
49
+ function resolveDefaultConfigPath() {
50
+ return join(process.cwd(), "models.yaml");
51
+ }
52
+ function normalizeOptions(configPathOrOptions) {
53
+ if (configPathOrOptions == null) return {};
54
+ if (typeof configPathOrOptions === "string") return { configPath: configPathOrOptions };
55
+ return configPathOrOptions;
56
+ }
57
+ function normalizeError(e, context) {
58
+ if (e instanceof Error) return new Error(`${context}: ${e.message}`, { cause: e });
59
+ return new Error(`${context}: ${String(e)}`);
60
+ }
61
+ async function ensureConnectivity(resolvedLlmSection, options) {
62
+ let configs;
63
+ try {
64
+ const parsed = parseLlmSection(resolvedLlmSection ?? null);
65
+ configs = parsed.configs.filter(
66
+ (c) => typeof c.baseURL === "string" && c.baseURL.length > 0 && (c.baseURL.startsWith("http://") || c.baseURL.startsWith("https://")) && !c.baseURL.includes("${")
67
+ );
68
+ } catch {
69
+ return;
70
+ }
71
+ const shouldCheck = options.checkConnectivity !== false && configs.length > 0;
72
+ if (!shouldCheck) return;
73
+ const report = (status) => options.onConnectionStatus?.(status);
74
+ const timeoutMs = options.connectivityTimeoutMs ?? 8e3;
75
+ for (const config of configs) {
76
+ const { id, baseURL } = config;
77
+ report({
78
+ phase: "checking",
79
+ endpointId: id,
80
+ baseURL,
81
+ message: "Checking connection..."
82
+ });
83
+ const endpointOpts = buildEndpointConnectivityOptions(config);
84
+ const result = await checkEndpointConnectivity(baseURL, {
85
+ timeoutMs,
86
+ ...endpointOpts
87
+ });
88
+ if (result.reachable) {
89
+ report({
90
+ phase: "reachable",
91
+ endpointId: id,
92
+ baseURL,
93
+ message: result.message ?? "Connected"
94
+ });
95
+ } else {
96
+ report({
97
+ phase: "unreachable",
98
+ endpointId: id,
99
+ baseURL,
100
+ message: result.message ?? "Unreachable"
101
+ });
102
+ throw new Error(buildUnreachableError(id, baseURL, result.message));
103
+ }
104
+ }
105
+ }
106
+ async function createAgentLlM(configPathOrOptions) {
107
+ try {
108
+ const options = normalizeOptions(configPathOrOptions);
109
+ const configPath = options.configPath ?? resolveDefaultConfigPath();
110
+ const modelsConfig = loadModelsConfig(configPath);
111
+ if (modelsConfig?.llm == null) {
112
+ throw new Error(
113
+ `No LLM config at ${configPath}. Add models.yaml in the current directory, or pass configPath.`
114
+ );
115
+ }
116
+ const resolvedSection = await resolveLlmSectionWithNpm(modelsConfig.llm, {
117
+ installNpmIfMissing: options.installNpmIfMissing !== false,
118
+ cwd: process.cwd()
119
+ });
120
+ const checkConnectivity = options.checkConnectivity ?? modelsConfig.runtime.check_connectivity;
121
+ await ensureConnectivity(resolvedSection, {
122
+ checkConnectivity,
123
+ onConnectionStatus: options.onConnectionStatus,
124
+ connectivityTimeoutMs: options.connectivityTimeoutMs
125
+ });
126
+ const model = createChatModelFromLlmConfig({ llmSection: resolvedSection });
127
+ applyDefaultToolChoice(model);
128
+ return model;
129
+ } catch (e) {
130
+ if (e instanceof Error && e.message.includes("No LLM config")) throw e;
131
+ if (e instanceof Error && e.message.includes("Cannot connect to")) throw e;
132
+ throw normalizeError(e, "createAgentLlM failed");
133
+ }
134
+ }
135
+
136
+ export {
137
+ createAgentLlM
138
+ };
139
+ //# sourceMappingURL=chunk-7VIJ4MFU.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/api/create-agent-llm.ts"],"sourcesContent":["/**\n * Simple API: create LangChain ChatModel from models.yaml config.\n * Supports OpenAI-compatible providers with optional connectivity check and npm: provider resolution.\n */\nimport { join } from \"node:path\";\nimport type { BaseChatModel } from \"@langchain/core/language_models/chat_models\";\nimport {\n checkEndpointConnectivity,\n buildUnreachableError,\n type EndpointConnectivityOptions,\n type ConnectionStatus,\n} from \"../connectivity/index.js\";\nimport { parseLlmSection } from \"../model/llm-parser.js\";\nimport type { LLMConfig } from \"../model/types.js\";\nimport { createChatModelFromLlmConfig } from \"../langchain/index.js\";\nimport { resolveLlmSectionWithNpm } from \"../extensions/npm-protocol.js\";\nimport { loadModelsConfig } from \"../config/loader.js\";\n\n/**\n * Ensure bindTools always receives tool_choice: \"auto\" when tools are bound.\n * Fixes \"Tool choice is none, but model called a tool\" when using this model\n * with LangChain createAgent (AgentNode leaves tool_choice undefined for non-structured tools).\n * Mutates the model in place so it still passes isBaseChatModel / bindTools checks.\n */\nfunction applyDefaultToolChoice(model: BaseChatModel): void {\n const m = model as {\n bindTools?: (tools: unknown, opts?: Record<string, unknown>) => unknown;\n };\n const orig = m.bindTools?.bind(model);\n if (!orig) return;\n m.bindTools = function (tools: unknown, opts?: Record<string, unknown>) {\n return orig(tools, { ...opts, tool_choice: \"auto\" });\n };\n}\n\nconst CIS_DEFAULT_RESOLVE_HOST = \"s0010-ml-https.s0010.us-west-2.awswd\";\nconst CIS_DEFAULT_RESOLVE_IP = \"10.210.98.124\";\n\nfunction buildEndpointConnectivityOptions(\n config: LLMConfig & { baseURL: string }\n): EndpointConnectivityOptions | undefined {\n const opts = (config.options as Record<string, unknown> | undefined) ?? config;\n const provider = typeof config.provider === \"string\" ? config.provider : \"\";\n const baseURL = config.baseURL;\n const isCis = provider === \"cis\" || provider.includes(\"cis\");\n const useCisDefault =\n isCis &&\n baseURL.includes(CIS_DEFAULT_RESOLVE_HOST) &&\n opts?.resolveHost == null;\n\n const resolveHost =\n opts?.resolveHost != null && typeof (opts.resolveHost as { from?: string; to?: string }).from === \"string\"\n ? (opts.resolveHost as { from: string; to: string })\n : useCisDefault\n ? { from: CIS_DEFAULT_RESOLVE_HOST, to: CIS_DEFAULT_RESOLVE_IP }\n : undefined;\n const host = typeof opts?.host === \"string\" ? opts.host : (resolveHost ? resolveHost.from : undefined);\n if (resolveHost == null && host == null) return undefined;\n\n const verifySSL = opts?.verifySSL === true;\n const bypassAuth = opts?.bypassAuth !== false;\n\n return {\n resolveHost,\n host,\n verifySSL: resolveHost != null ? false : (verifySSL ? true : undefined),\n bypassAuth: bypassAuth ? true : undefined,\n featureKey: typeof opts?.featureKey === \"string\" ? opts.featureKey : undefined,\n };\n}\n\nexport interface CreateAgentLlMOptions {\n configPath?: string;\n installNpmIfMissing?: boolean;\n checkConnectivity?: boolean;\n onConnectionStatus?: (status: ConnectionStatus) => void;\n connectivityTimeoutMs?: number;\n}\n\nfunction resolveDefaultConfigPath(): string {\n return join(process.cwd(), \"models.yaml\");\n}\n\nfunction normalizeOptions(\n configPathOrOptions?: string | CreateAgentLlMOptions\n): CreateAgentLlMOptions {\n if (configPathOrOptions == null) return {};\n if (typeof configPathOrOptions === \"string\") return { configPath: configPathOrOptions };\n return configPathOrOptions;\n}\n\nfunction normalizeError(e: unknown, context: string): Error {\n if (e instanceof Error) return new Error(`${context}: ${e.message}`, { cause: e });\n return new Error(`${context}: ${String(e)}`);\n}\n\nasync function ensureConnectivity(\n resolvedLlmSection: unknown,\n options: {\n checkConnectivity?: boolean;\n onConnectionStatus?: (status: ConnectionStatus) => void;\n connectivityTimeoutMs?: number;\n }\n): Promise<void> {\n let configs: Array<LLMConfig & { baseURL: string }>;\n try {\n const parsed = parseLlmSection(resolvedLlmSection ?? null);\n configs = parsed.configs.filter(\n (c: LLMConfig): c is LLMConfig & { baseURL: string } =>\n typeof c.baseURL === \"string\" &&\n c.baseURL.length > 0 &&\n (c.baseURL.startsWith(\"http://\") || c.baseURL.startsWith(\"https://\")) &&\n !c.baseURL.includes(\"${\")\n );\n } catch {\n return;\n }\n const shouldCheck = options.checkConnectivity !== false && configs.length > 0;\n if (!shouldCheck) return;\n\n const report = (status: ConnectionStatus) => options.onConnectionStatus?.(status);\n const timeoutMs = options.connectivityTimeoutMs ?? 8000;\n\n for (const config of configs) {\n const { id, baseURL } = config;\n report({\n phase: \"checking\",\n endpointId: id,\n baseURL,\n message: \"Checking connection...\",\n });\n\n const endpointOpts = buildEndpointConnectivityOptions(config);\n const result = await checkEndpointConnectivity(baseURL, {\n timeoutMs,\n ...endpointOpts,\n });\n\n if (result.reachable) {\n report({\n phase: \"reachable\",\n endpointId: id,\n baseURL,\n message: result.message ?? \"Connected\",\n });\n } else {\n report({\n phase: \"unreachable\",\n endpointId: id,\n baseURL,\n message: result.message ?? \"Unreachable\",\n });\n throw new Error(buildUnreachableError(id, baseURL, result.message));\n }\n }\n}\n\n/**\n * Create a LangChain ChatModel from models.yaml config.\n * Returns BaseChatModel compatible with LangChain's createAgent and other tools.\n */\nexport async function createAgentLlM(\n configPathOrOptions?: string | CreateAgentLlMOptions\n): Promise<BaseChatModel> {\n try {\n const options = normalizeOptions(configPathOrOptions);\n const configPath = options.configPath ?? resolveDefaultConfigPath();\n const modelsConfig = loadModelsConfig(configPath);\n\n if (modelsConfig?.llm == null) {\n throw new Error(\n `No LLM config at ${configPath}. Add models.yaml in the current directory, or pass configPath.`\n );\n }\n\n const resolvedSection = await resolveLlmSectionWithNpm(modelsConfig.llm, {\n installNpmIfMissing: options.installNpmIfMissing !== false,\n cwd: process.cwd(),\n });\n\n // Priority: caller option > YAML runtime > default (true)\n const checkConnectivity =\n options.checkConnectivity ?? modelsConfig.runtime.check_connectivity;\n\n await ensureConnectivity(resolvedSection, {\n checkConnectivity,\n onConnectionStatus: options.onConnectionStatus,\n connectivityTimeoutMs: options.connectivityTimeoutMs,\n });\n\n const model = createChatModelFromLlmConfig({ llmSection: resolvedSection });\n applyDefaultToolChoice(model);\n return model;\n } catch (e) {\n if (e instanceof Error && e.message.includes(\"No LLM config\")) throw e;\n if (e instanceof Error && e.message.includes(\"Cannot connect to\")) throw e;\n throw normalizeError(e, \"createAgentLlM failed\");\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAIA,SAAS,YAAY;AAoBrB,SAAS,uBAAuB,OAA4B;AAC1D,QAAM,IAAI;AAGV,QAAM,OAAO,EAAE,WAAW,KAAK,KAAK;AACpC,MAAI,CAAC,KAAM;AACX,IAAE,YAAY,SAAU,OAAgB,MAAgC;AACtE,WAAO,KAAK,OAAO,EAAE,GAAG,MAAM,aAAa,OAAO,CAAC;AAAA,EACrD;AACF;AAEA,IAAM,2BAA2B;AACjC,IAAM,yBAAyB;AAE/B,SAAS,iCACP,QACyC;AACzC,QAAM,OAAQ,OAAO,WAAmD;AACxE,QAAM,WAAW,OAAO,OAAO,aAAa,WAAW,OAAO,WAAW;AACzE,QAAM,UAAU,OAAO;AACvB,QAAM,QAAQ,aAAa,SAAS,SAAS,SAAS,KAAK;AAC3D,QAAM,gBACJ,SACA,QAAQ,SAAS,wBAAwB,KACzC,MAAM,eAAe;AAEvB,QAAM,cACJ,MAAM,eAAe,QAAQ,OAAQ,KAAK,YAA+C,SAAS,WAC7F,KAAK,cACN,gBACE,EAAE,MAAM,0BAA0B,IAAI,uBAAuB,IAC7D;AACR,QAAM,OAAO,OAAO,MAAM,SAAS,WAAW,KAAK,OAAQ,cAAc,YAAY,OAAO;AAC5F,MAAI,eAAe,QAAQ,QAAQ,KAAM,QAAO;AAEhD,QAAM,YAAY,MAAM,cAAc;AACtC,QAAM,aAAa,MAAM,eAAe;AAExC,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,WAAW,eAAe,OAAO,QAAS,YAAY,OAAO;AAAA,IAC7D,YAAY,aAAa,OAAO;AAAA,IAChC,YAAY,OAAO,MAAM,eAAe,WAAW,KAAK,aAAa;AAAA,EACvE;AACF;AAUA,SAAS,2BAAmC;AAC1C,SAAO,KAAK,QAAQ,IAAI,GAAG,aAAa;AAC1C;AAEA,SAAS,iBACP,qBACuB;AACvB,MAAI,uBAAuB,KAAM,QAAO,CAAC;AACzC,MAAI,OAAO,wBAAwB,SAAU,QAAO,EAAE,YAAY,oBAAoB;AACtF,SAAO;AACT;AAEA,SAAS,eAAe,GAAY,SAAwB;AAC1D,MAAI,aAAa,MAAO,QAAO,IAAI,MAAM,GAAG,OAAO,KAAK,EAAE,OAAO,IAAI,EAAE,OAAO,EAAE,CAAC;AACjF,SAAO,IAAI,MAAM,GAAG,OAAO,KAAK,OAAO,CAAC,CAAC,EAAE;AAC7C;AAEA,eAAe,mBACb,oBACA,SAKe;AACf,MAAI;AACJ,MAAI;AACF,UAAM,SAAS,gBAAgB,sBAAsB,IAAI;AACzD,cAAU,OAAO,QAAQ;AAAA,MACvB,CAAC,MACC,OAAO,EAAE,YAAY,YACrB,EAAE,QAAQ,SAAS,MAClB,EAAE,QAAQ,WAAW,SAAS,KAAK,EAAE,QAAQ,WAAW,UAAU,MACnE,CAAC,EAAE,QAAQ,SAAS,IAAI;AAAA,IAC5B;AAAA,EACF,QAAQ;AACN;AAAA,EACF;AACA,QAAM,cAAc,QAAQ,sBAAsB,SAAS,QAAQ,SAAS;AAC5E,MAAI,CAAC,YAAa;AAElB,QAAM,SAAS,CAAC,WAA6B,QAAQ,qBAAqB,MAAM;AAChF,QAAM,YAAY,QAAQ,yBAAyB;AAEnD,aAAW,UAAU,SAAS;AAC5B,UAAM,EAAE,IAAI,QAAQ,IAAI;AACxB,WAAO;AAAA,MACL,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,MACA,SAAS;AAAA,IACX,CAAC;AAED,UAAM,eAAe,iCAAiC,MAAM;AAC5D,UAAM,SAAS,MAAM,0BAA0B,SAAS;AAAA,MACtD;AAAA,MACA,GAAG;AAAA,IACL,CAAC;AAED,QAAI,OAAO,WAAW;AACpB,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,QACZ;AAAA,QACA,SAAS,OAAO,WAAW;AAAA,MAC7B,CAAC;AAAA,IACH,OAAO;AACL,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,QACZ;AAAA,QACA,SAAS,OAAO,WAAW;AAAA,MAC7B,CAAC;AACD,YAAM,IAAI,MAAM,sBAAsB,IAAI,SAAS,OAAO,OAAO,CAAC;AAAA,IACpE;AAAA,EACF;AACF;AAMA,eAAsB,eACpB,qBACwB;AACxB,MAAI;AACF,UAAM,UAAU,iBAAiB,mBAAmB;AACpD,UAAM,aAAa,QAAQ,cAAc,yBAAyB;AAClE,UAAM,eAAe,iBAAiB,UAAU;AAEhD,QAAI,cAAc,OAAO,MAAM;AAC7B,YAAM,IAAI;AAAA,QACR,oBAAoB,UAAU;AAAA,MAChC;AAAA,IACF;AAEA,UAAM,kBAAkB,MAAM,yBAAyB,aAAa,KAAK;AAAA,MACvE,qBAAqB,QAAQ,wBAAwB;AAAA,MACrD,KAAK,QAAQ,IAAI;AAAA,IACnB,CAAC;AAGD,UAAM,oBACJ,QAAQ,qBAAqB,aAAa,QAAQ;AAEpD,UAAM,mBAAmB,iBAAiB;AAAA,MACxC;AAAA,MACA,oBAAoB,QAAQ;AAAA,MAC5B,uBAAuB,QAAQ;AAAA,IACjC,CAAC;AAED,UAAM,QAAQ,6BAA6B,EAAE,YAAY,gBAAgB,CAAC;AAC1E,2BAAuB,KAAK;AAC5B,WAAO;AAAA,EACT,SAAS,GAAG;AACV,QAAI,aAAa,SAAS,EAAE,QAAQ,SAAS,eAAe,EAAG,OAAM;AACrE,QAAI,aAAa,SAAS,EAAE,QAAQ,SAAS,mBAAmB,EAAG,OAAM;AACzE,UAAM,eAAe,GAAG,uBAAuB;AAAA,EACjD;AACF;","names":[]}
@@ -0,0 +1,105 @@
1
+ // src/connectivity/check.ts
2
+ import https from "https";
3
+ var DEFAULT_TIMEOUT_MS = 8e3;
4
+ function probePath(baseURL) {
5
+ const base = baseURL.replace(/\/+$/, "");
6
+ if (base.endsWith("/v1")) return `${base}/models`;
7
+ return base.includes("/v1") ? `${base}/models` : `${base}/v1/models`;
8
+ }
9
+ function resolveProbeRequest(baseURL, endpointOptions) {
10
+ let path = probePath(baseURL);
11
+ const urlObj = new URL(path);
12
+ const resolveFrom = endpointOptions?.resolveHost?.from;
13
+ const resolveTo = endpointOptions?.resolveHost?.to;
14
+ let hostHeader = endpointOptions?.host;
15
+ if (resolveFrom && resolveTo && urlObj.hostname) {
16
+ urlObj.hostname = urlObj.hostname.replace(resolveFrom, resolveTo);
17
+ hostHeader = hostHeader ?? resolveFrom;
18
+ }
19
+ const searchParams = new URLSearchParams(urlObj.search);
20
+ if (endpointOptions?.bypassAuth === true) searchParams.set("bypass_auth", "true");
21
+ urlObj.search = searchParams.toString();
22
+ return { url: urlObj.toString(), hostHeader };
23
+ }
24
+ function checkWithHttps(url, hostHeader, options) {
25
+ return new Promise((resolve) => {
26
+ const u = new URL(url);
27
+ const reqOpts = {
28
+ hostname: u.hostname,
29
+ port: u.port || (u.protocol === "https:" ? 443 : 80),
30
+ path: u.pathname + u.search,
31
+ method: "GET",
32
+ headers: { Accept: "application/json" },
33
+ rejectUnauthorized: options.verifySSL
34
+ };
35
+ if (hostHeader) reqOpts.headers = { ...reqOpts.headers, Host: hostHeader };
36
+ const timeoutId = setTimeout(() => {
37
+ req.destroy();
38
+ resolve({ reachable: false, message: "Connection timed out" });
39
+ }, options.timeoutMs);
40
+ const req = https.request(reqOpts, (res) => {
41
+ clearTimeout(timeoutId);
42
+ resolve({
43
+ reachable: true,
44
+ message: res.statusCode === 200 ? "OK" : `HTTP ${res.statusCode}`,
45
+ statusCode: res.statusCode
46
+ });
47
+ });
48
+ req.on("error", (err) => {
49
+ clearTimeout(timeoutId);
50
+ resolve({ reachable: false, message: err.message || "Connection failed" });
51
+ });
52
+ req.end();
53
+ });
54
+ }
55
+ async function checkEndpointConnectivity(baseURL, options) {
56
+ const timeoutMs = options?.timeoutMs ?? DEFAULT_TIMEOUT_MS;
57
+ const useHttps = options?.resolveHost != null || options?.verifySSL === false || options?.host != null || options?.bypassAuth === true;
58
+ if (useHttps) {
59
+ const { url: url2, hostHeader } = resolveProbeRequest(baseURL, options);
60
+ return checkWithHttps(url2, hostHeader, {
61
+ timeoutMs,
62
+ verifySSL: options?.verifySSL === true
63
+ });
64
+ }
65
+ const url = probePath(baseURL);
66
+ const controller = new AbortController();
67
+ const timeoutId = setTimeout(() => controller.abort(), timeoutMs);
68
+ try {
69
+ const res = await fetch(url, {
70
+ method: "GET",
71
+ signal: controller.signal,
72
+ headers: { Accept: "application/json" }
73
+ });
74
+ clearTimeout(timeoutId);
75
+ return {
76
+ reachable: true,
77
+ message: res.ok ? "OK" : `HTTP ${res.status}`,
78
+ statusCode: res.status
79
+ };
80
+ } catch (err) {
81
+ clearTimeout(timeoutId);
82
+ const message = err instanceof Error ? err.message : String(err);
83
+ const isTimeout = err instanceof Error && err.name === "AbortError";
84
+ return {
85
+ reachable: false,
86
+ message: isTimeout ? "Connection timed out" : message || "Connection failed"
87
+ };
88
+ }
89
+ }
90
+ var CIS_UNREACHABLE_REMINDER = "Please ensure you are connected to Secure VPN and try again.";
91
+ function buildUnreachableError(endpointId, baseURL, detail) {
92
+ const parts = [
93
+ `Cannot connect to CIS (endpoint: ${endpointId}, base URL: ${baseURL}).`,
94
+ detail && ` ${detail}`,
95
+ ` ${CIS_UNREACHABLE_REMINDER}`
96
+ ];
97
+ return parts.filter(Boolean).join("").trim();
98
+ }
99
+
100
+ export {
101
+ checkEndpointConnectivity,
102
+ CIS_UNREACHABLE_REMINDER,
103
+ buildUnreachableError
104
+ };
105
+ //# sourceMappingURL=chunk-AUQEXHUP.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/connectivity/check.ts"],"sourcesContent":["/**\n * Check connectivity to OpenAI-compatible / CIS endpoints.\n */\nimport https from \"node:https\";\nimport type { CheckConnectivityResult } from \"./types.js\";\n\nconst DEFAULT_TIMEOUT_MS = 8000;\n\nexport interface EndpointConnectivityOptions {\n timeoutMs?: number;\n resolveHost?: { from: string; to: string };\n host?: string;\n verifySSL?: boolean;\n bypassAuth?: boolean;\n featureKey?: string;\n}\n\nfunction probePath(baseURL: string): string {\n const base = baseURL.replace(/\\/+$/, \"\");\n if (base.endsWith(\"/v1\")) return `${base}/models`;\n return base.includes(\"/v1\") ? `${base}/models` : `${base}/v1/models`;\n}\n\nfunction resolveProbeRequest(\n baseURL: string,\n endpointOptions?: EndpointConnectivityOptions | null\n): { url: string; hostHeader: string | undefined } {\n let path = probePath(baseURL);\n const urlObj = new URL(path);\n const resolveFrom = endpointOptions?.resolveHost?.from;\n const resolveTo = endpointOptions?.resolveHost?.to;\n let hostHeader = endpointOptions?.host;\n if (resolveFrom && resolveTo && urlObj.hostname) {\n urlObj.hostname = urlObj.hostname.replace(resolveFrom, resolveTo);\n hostHeader = hostHeader ?? resolveFrom;\n }\n const searchParams = new URLSearchParams(urlObj.search);\n if (endpointOptions?.bypassAuth === true) searchParams.set(\"bypass_auth\", \"true\");\n urlObj.search = searchParams.toString();\n return { url: urlObj.toString(), hostHeader };\n}\n\nfunction checkWithHttps(\n url: string,\n hostHeader: string | undefined,\n options: { timeoutMs: number; verifySSL: boolean }\n): Promise<CheckConnectivityResult> {\n return new Promise((resolve) => {\n const u = new URL(url);\n const reqOpts: https.RequestOptions = {\n hostname: u.hostname,\n port: u.port || (u.protocol === \"https:\" ? 443 : 80),\n path: u.pathname + u.search,\n method: \"GET\",\n headers: { Accept: \"application/json\" },\n rejectUnauthorized: options.verifySSL,\n };\n if (hostHeader) reqOpts.headers = { ...reqOpts.headers, Host: hostHeader };\n\n const timeoutId = setTimeout(() => {\n req.destroy();\n resolve({ reachable: false, message: \"Connection timed out\" });\n }, options.timeoutMs);\n\n const req = https.request(reqOpts, (res) => {\n clearTimeout(timeoutId);\n resolve({\n reachable: true,\n message: res.statusCode === 200 ? \"OK\" : `HTTP ${res.statusCode}`,\n statusCode: res.statusCode,\n });\n });\n req.on(\"error\", (err) => {\n clearTimeout(timeoutId);\n resolve({ reachable: false, message: err.message || \"Connection failed\" });\n });\n req.end();\n });\n}\n\nexport async function checkEndpointConnectivity(\n baseURL: string,\n options?: { timeoutMs?: number } & EndpointConnectivityOptions\n): Promise<CheckConnectivityResult> {\n const timeoutMs = options?.timeoutMs ?? DEFAULT_TIMEOUT_MS;\n const useHttps =\n options?.resolveHost != null ||\n options?.verifySSL === false ||\n options?.host != null ||\n options?.bypassAuth === true;\n\n if (useHttps) {\n const { url, hostHeader } = resolveProbeRequest(baseURL, options);\n return checkWithHttps(url, hostHeader, {\n timeoutMs,\n verifySSL: options?.verifySSL === true,\n });\n }\n\n const url = probePath(baseURL);\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), timeoutMs);\n\n try {\n const res = await fetch(url, {\n method: \"GET\",\n signal: controller.signal,\n headers: { Accept: \"application/json\" },\n });\n clearTimeout(timeoutId);\n return {\n reachable: true,\n message: res.ok ? \"OK\" : `HTTP ${res.status}`,\n statusCode: res.status,\n };\n } catch (err) {\n clearTimeout(timeoutId);\n const message = err instanceof Error ? err.message : String(err);\n const isTimeout = err instanceof Error && err.name === \"AbortError\";\n return {\n reachable: false,\n message: isTimeout ? \"Connection timed out\" : message || \"Connection failed\",\n };\n }\n}\n\nexport const CIS_UNREACHABLE_REMINDER =\n \"Please ensure you are connected to Secure VPN and try again.\";\n\nexport function buildUnreachableError(\n endpointId: string,\n baseURL: string,\n detail?: string\n): string {\n const parts = [\n `Cannot connect to CIS (endpoint: ${endpointId}, base URL: ${baseURL}).`,\n detail && ` ${detail}`,\n ` ${CIS_UNREACHABLE_REMINDER}`,\n ];\n return parts.filter(Boolean).join(\"\").trim();\n}\n"],"mappings":";AAGA,OAAO,WAAW;AAGlB,IAAM,qBAAqB;AAW3B,SAAS,UAAU,SAAyB;AAC1C,QAAM,OAAO,QAAQ,QAAQ,QAAQ,EAAE;AACvC,MAAI,KAAK,SAAS,KAAK,EAAG,QAAO,GAAG,IAAI;AACxC,SAAO,KAAK,SAAS,KAAK,IAAI,GAAG,IAAI,YAAY,GAAG,IAAI;AAC1D;AAEA,SAAS,oBACP,SACA,iBACiD;AACjD,MAAI,OAAO,UAAU,OAAO;AAC5B,QAAM,SAAS,IAAI,IAAI,IAAI;AAC3B,QAAM,cAAc,iBAAiB,aAAa;AAClD,QAAM,YAAY,iBAAiB,aAAa;AAChD,MAAI,aAAa,iBAAiB;AAClC,MAAI,eAAe,aAAa,OAAO,UAAU;AAC/C,WAAO,WAAW,OAAO,SAAS,QAAQ,aAAa,SAAS;AAChE,iBAAa,cAAc;AAAA,EAC7B;AACA,QAAM,eAAe,IAAI,gBAAgB,OAAO,MAAM;AACtD,MAAI,iBAAiB,eAAe,KAAM,cAAa,IAAI,eAAe,MAAM;AAChF,SAAO,SAAS,aAAa,SAAS;AACtC,SAAO,EAAE,KAAK,OAAO,SAAS,GAAG,WAAW;AAC9C;AAEA,SAAS,eACP,KACA,YACA,SACkC;AAClC,SAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,UAAM,IAAI,IAAI,IAAI,GAAG;AACrB,UAAM,UAAgC;AAAA,MACpC,UAAU,EAAE;AAAA,MACZ,MAAM,EAAE,SAAS,EAAE,aAAa,WAAW,MAAM;AAAA,MACjD,MAAM,EAAE,WAAW,EAAE;AAAA,MACrB,QAAQ;AAAA,MACR,SAAS,EAAE,QAAQ,mBAAmB;AAAA,MACtC,oBAAoB,QAAQ;AAAA,IAC9B;AACA,QAAI,WAAY,SAAQ,UAAU,EAAE,GAAG,QAAQ,SAAS,MAAM,WAAW;AAEzE,UAAM,YAAY,WAAW,MAAM;AACjC,UAAI,QAAQ;AACZ,cAAQ,EAAE,WAAW,OAAO,SAAS,uBAAuB,CAAC;AAAA,IAC/D,GAAG,QAAQ,SAAS;AAEpB,UAAM,MAAM,MAAM,QAAQ,SAAS,CAAC,QAAQ;AAC1C,mBAAa,SAAS;AACtB,cAAQ;AAAA,QACN,WAAW;AAAA,QACX,SAAS,IAAI,eAAe,MAAM,OAAO,QAAQ,IAAI,UAAU;AAAA,QAC/D,YAAY,IAAI;AAAA,MAClB,CAAC;AAAA,IACH,CAAC;AACD,QAAI,GAAG,SAAS,CAAC,QAAQ;AACvB,mBAAa,SAAS;AACtB,cAAQ,EAAE,WAAW,OAAO,SAAS,IAAI,WAAW,oBAAoB,CAAC;AAAA,IAC3E,CAAC;AACD,QAAI,IAAI;AAAA,EACV,CAAC;AACH;AAEA,eAAsB,0BACpB,SACA,SACkC;AAClC,QAAM,YAAY,SAAS,aAAa;AACxC,QAAM,WACJ,SAAS,eAAe,QACxB,SAAS,cAAc,SACvB,SAAS,QAAQ,QACjB,SAAS,eAAe;AAE1B,MAAI,UAAU;AACZ,UAAM,EAAE,KAAAA,MAAK,WAAW,IAAI,oBAAoB,SAAS,OAAO;AAChE,WAAO,eAAeA,MAAK,YAAY;AAAA,MACrC;AAAA,MACA,WAAW,SAAS,cAAc;AAAA,IACpC,CAAC;AAAA,EACH;AAEA,QAAM,MAAM,UAAU,OAAO;AAC7B,QAAM,aAAa,IAAI,gBAAgB;AACvC,QAAM,YAAY,WAAW,MAAM,WAAW,MAAM,GAAG,SAAS;AAEhE,MAAI;AACF,UAAM,MAAM,MAAM,MAAM,KAAK;AAAA,MAC3B,QAAQ;AAAA,MACR,QAAQ,WAAW;AAAA,MACnB,SAAS,EAAE,QAAQ,mBAAmB;AAAA,IACxC,CAAC;AACD,iBAAa,SAAS;AACtB,WAAO;AAAA,MACL,WAAW;AAAA,MACX,SAAS,IAAI,KAAK,OAAO,QAAQ,IAAI,MAAM;AAAA,MAC3C,YAAY,IAAI;AAAA,IAClB;AAAA,EACF,SAAS,KAAK;AACZ,iBAAa,SAAS;AACtB,UAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC/D,UAAM,YAAY,eAAe,SAAS,IAAI,SAAS;AACvD,WAAO;AAAA,MACL,WAAW;AAAA,MACX,SAAS,YAAY,yBAAyB,WAAW;AAAA,IAC3D;AAAA,EACF;AACF;AAEO,IAAM,2BACX;AAEK,SAAS,sBACd,YACA,SACA,QACQ;AACR,QAAM,QAAQ;AAAA,IACZ,oCAAoC,UAAU,eAAe,OAAO;AAAA,IACpE,UAAU,IAAI,MAAM;AAAA,IACpB,IAAI,wBAAwB;AAAA,EAC9B;AACA,SAAO,MAAM,OAAO,OAAO,EAAE,KAAK,EAAE,EAAE,KAAK;AAC7C;","names":["url"]}