mcard-js 2.1.49 → 2.1.51
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/CardCollection-EMSBVZP3.js +10 -0
- package/dist/CardCollection-KQWR4PCV.js +10 -0
- package/dist/CardCollection-ORGE2XBG.js +10 -0
- package/dist/EngineRegistry-ABZXHZWO.js +17 -0
- package/dist/EngineRegistry-EIOT4MUZ.js +17 -0
- package/dist/EngineRegistry-IQ6EVO72.js +17 -0
- package/dist/EngineRegistry-PHRFXEOE.js +17 -0
- package/dist/IndexedDBEngine-EWA3SLAO.js +12 -0
- package/dist/IndexedDBEngine-FXAD42F3.js +12 -0
- package/dist/IndexedDBEngine-RD4447IS.js +12 -0
- package/dist/LLMRuntime-ARUWOX52.js +17 -0
- package/dist/LLMRuntime-C3XCO7WF.js +17 -0
- package/dist/LLMRuntime-CQ7X43QR.js +17 -0
- package/dist/LLMRuntime-PD45COKE.js +17 -0
- package/dist/LLMRuntime-QOUMLT33.js +17 -0
- package/dist/LLMRuntime-SZNLTHD7.js +17 -0
- package/dist/LLMRuntime-TVJGK2BG.js +17 -0
- package/dist/LambdaRuntime-25GMEJCU.js +19 -0
- package/dist/LambdaRuntime-7KQUMHPI.js +19 -0
- package/dist/LambdaRuntime-DRT7ODPC.js +19 -0
- package/dist/LambdaRuntime-HSREEYQG.js +19 -0
- package/dist/LambdaRuntime-IH7NVG6Z.js +19 -0
- package/dist/LambdaRuntime-MPG27FM2.js +19 -0
- package/dist/LambdaRuntime-ODSWIMNM.js +19 -0
- package/dist/LambdaRuntime-PHGRZYAW.js +19 -0
- package/dist/LambdaRuntime-QOEYR37L.js +19 -0
- package/dist/LambdaRuntime-RT33TFN2.js +19 -0
- package/dist/LambdaRuntime-W6TQBP5O.js +19 -0
- package/dist/Loader-35WSUC53.js +14 -0
- package/dist/Loader-STS3G4OQ.js +16 -0
- package/dist/Loader-W22AEM6F.js +12 -0
- package/dist/Loader-YBPWP43S.js +12 -0
- package/dist/Loader-ZYSS7B4D.js +12 -0
- package/dist/NetworkRuntime-KR2QITXV.js +987 -0
- package/dist/NetworkRuntime-S6V2CMZV.js +1575 -0
- package/dist/OllamaProvider-2ANW6EB2.js +9 -0
- package/dist/OllamaProvider-5QFJKYAC.js +9 -0
- package/dist/OllamaProvider-6QXJGR7V.js +9 -0
- package/dist/OllamaProvider-ABEEFX7M.js +9 -0
- package/dist/OllamaProvider-Z2CGY5LY.js +9 -0
- package/dist/VCard-225X42W7.js +25 -0
- package/dist/chunk-2APJYBH4.js +368 -0
- package/dist/chunk-4DFTWDRB.js +497 -0
- package/dist/chunk-4PBRTFSY.js +112 -0
- package/dist/chunk-4T3H25AP.js +299 -0
- package/dist/chunk-5DFXPIRL.js +42 -0
- package/dist/chunk-5HRZV4R3.js +217 -0
- package/dist/chunk-6ZRJXVJ3.js +529 -0
- package/dist/chunk-7N7JYGN2.js +364 -0
- package/dist/chunk-7QTJUGYQ.js +74 -0
- package/dist/chunk-7TXIPJI2.js +2360 -0
- package/dist/chunk-BFJUD527.js +2369 -0
- package/dist/chunk-CHXIVTQV.js +364 -0
- package/dist/chunk-DM2ABCA4.js +497 -0
- package/dist/chunk-DTPHGTBQ.js +275 -0
- package/dist/chunk-EDAJ5FO6.js +405 -0
- package/dist/chunk-ETJWXHKZ.js +246 -0
- package/dist/chunk-FLYGNPUC.js +2369 -0
- package/dist/chunk-FSDRDWOP.js +34 -0
- package/dist/chunk-GIKMCG4D.js +497 -0
- package/dist/chunk-IJKS3LGK.js +428 -0
- package/dist/chunk-JUQ2VQZA.js +428 -0
- package/dist/chunk-JVW4J7BY.js +2369 -0
- package/dist/chunk-JWTRVEC3.js +2369 -0
- package/dist/chunk-KJM4C65U.js +299 -0
- package/dist/chunk-KMC566CN.js +591 -0
- package/dist/chunk-KMNP6DBL.js +455 -0
- package/dist/chunk-LVU7O5IY.js +597 -0
- package/dist/chunk-M4C6RWLA.js +373 -0
- package/dist/chunk-NAAAKSEO.js +541 -0
- package/dist/chunk-NKIXLPHL.js +373 -0
- package/dist/chunk-NOEDMK7I.js +428 -0
- package/dist/chunk-NOPYSBOQ.js +2360 -0
- package/dist/chunk-P4G42QCY.js +2369 -0
- package/dist/chunk-PKLONZCF.js +253 -0
- package/dist/chunk-PNGECWPN.js +597 -0
- package/dist/chunk-PYP6T64W.js +217 -0
- package/dist/chunk-QFT3COE2.js +217 -0
- package/dist/chunk-QFZFXMNX.js +275 -0
- package/dist/chunk-QZGRQRJP.js +2369 -0
- package/dist/chunk-R3XRBAM7.js +253 -0
- package/dist/chunk-RYP66UMH.js +74 -0
- package/dist/chunk-RZIZYRLF.js +112 -0
- package/dist/chunk-T43V44RS.js +2369 -0
- package/dist/chunk-UCNVX5BZ.js +74 -0
- package/dist/chunk-UDF7HS4V.js +368 -0
- package/dist/chunk-VJPXJVEH.js +299 -0
- package/dist/chunk-VW3KBDK5.js +74 -0
- package/dist/chunk-X72XIYSN.js +364 -0
- package/dist/chunk-XETU7TV4.js +112 -0
- package/dist/chunk-Y4BT6LHA.js +368 -0
- package/dist/chunk-YQGB6BIA.js +2369 -0
- package/dist/chunk-ZEQPO3XV.js +217 -0
- package/dist/chunk-ZKRKWXEQ.js +2369 -0
- package/dist/chunk-ZMK2HTZ5.js +275 -0
- package/dist/constants-CLB7B6MN.js +101 -0
- package/dist/constants-O343SMHL.js +103 -0
- package/dist/constants-YPGDEX5X.js +103 -0
- package/dist/index.browser.cjs +11 -5
- package/dist/index.browser.js +12 -12
- package/dist/index.cjs +2358 -1896
- package/dist/index.d.cts +934 -776
- package/dist/index.d.ts +934 -776
- package/dist/index.js +1353 -1271
- package/dist/storage/SqliteNodeEngine.cjs +12 -6
- package/dist/storage/SqliteNodeEngine.js +4 -4
- package/dist/storage/SqliteWasmEngine.cjs +11 -5
- package/dist/storage/SqliteWasmEngine.js +4 -4
- package/package.json +5 -3
|
@@ -0,0 +1,428 @@
|
|
|
1
|
+
import {
|
|
2
|
+
BaseLLMProvider,
|
|
3
|
+
DEFAULT_PROVIDER,
|
|
4
|
+
LLMConfig,
|
|
5
|
+
LLM_PROVIDERS,
|
|
6
|
+
OllamaProvider
|
|
7
|
+
} from "./chunk-UDF7HS4V.js";
|
|
8
|
+
import {
|
|
9
|
+
IO
|
|
10
|
+
} from "./chunk-MPMRBT5R.js";
|
|
11
|
+
import {
|
|
12
|
+
Either
|
|
13
|
+
} from "./chunk-2KADE3SE.js";
|
|
14
|
+
import {
|
|
15
|
+
LLM_DEFAULT_TIMEOUT_SECS
|
|
16
|
+
} from "./chunk-ETJWXHKZ.js";
|
|
17
|
+
|
|
18
|
+
// src/ptr/llm/providers/WebLLMProvider.ts
|
|
19
|
+
function asOptionalNumber(value) {
|
|
20
|
+
return typeof value === "number" ? value : void 0;
|
|
21
|
+
}
|
|
22
|
+
var WebLLMProvider = class extends BaseLLMProvider {
|
|
23
|
+
provider_name = "webllm";
|
|
24
|
+
config;
|
|
25
|
+
engine = null;
|
|
26
|
+
current_model = null;
|
|
27
|
+
initialization_promise = null;
|
|
28
|
+
constructor() {
|
|
29
|
+
super();
|
|
30
|
+
this.config = LLM_PROVIDERS["webllm"];
|
|
31
|
+
}
|
|
32
|
+
async _get_engine(model_id) {
|
|
33
|
+
if (this.engine && this.current_model === model_id) {
|
|
34
|
+
return Either.right(this.engine);
|
|
35
|
+
}
|
|
36
|
+
if (this.initialization_promise) {
|
|
37
|
+
await this.initialization_promise;
|
|
38
|
+
if (this.engine && this.current_model === model_id) {
|
|
39
|
+
return Either.right(this.engine);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
this.initialization_promise = (async () => {
|
|
43
|
+
try {
|
|
44
|
+
if (typeof window === "undefined") {
|
|
45
|
+
throw new Error("WebLLM only supports browser environments.");
|
|
46
|
+
}
|
|
47
|
+
const windowWithWebLLM = window;
|
|
48
|
+
let webllm = windowWithWebLLM.webllm;
|
|
49
|
+
if (!webllm) {
|
|
50
|
+
try {
|
|
51
|
+
webllm = await import("@mlc-ai/web-llm");
|
|
52
|
+
} catch (e) {
|
|
53
|
+
void e;
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
if (!webllm) {
|
|
57
|
+
throw new Error("WebLLM library not found. Please include @mlc-ai/web-llm or add script tag.");
|
|
58
|
+
}
|
|
59
|
+
if (!this.engine) {
|
|
60
|
+
this.engine = await webllm.CreateMLCEngine(model_id, {
|
|
61
|
+
initProgressCallback: (report) => {
|
|
62
|
+
console.debug(`[WebLLM] ${report.text}`);
|
|
63
|
+
}
|
|
64
|
+
});
|
|
65
|
+
} else {
|
|
66
|
+
await this.engine.reload(model_id);
|
|
67
|
+
}
|
|
68
|
+
this.current_model = model_id;
|
|
69
|
+
} catch (e) {
|
|
70
|
+
this.engine = null;
|
|
71
|
+
this.current_model = null;
|
|
72
|
+
throw e;
|
|
73
|
+
}
|
|
74
|
+
})();
|
|
75
|
+
try {
|
|
76
|
+
await this.initialization_promise;
|
|
77
|
+
return Either.right(this.engine);
|
|
78
|
+
} catch (e) {
|
|
79
|
+
this.initialization_promise = null;
|
|
80
|
+
const error = e;
|
|
81
|
+
return Either.left(`WebLLM init failed: ${error.message || String(e)}`);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
async complete(prompt, params, images) {
|
|
85
|
+
const model = typeof params.model === "string" ? params.model : this.config.default_model;
|
|
86
|
+
const engineResult = await this._get_engine(model);
|
|
87
|
+
if (engineResult.isLeft) return Either.left(engineResult.left);
|
|
88
|
+
const engine = engineResult.right;
|
|
89
|
+
try {
|
|
90
|
+
const completion = await engine.chat.completions.create({
|
|
91
|
+
messages: [{ role: "user", content: prompt }],
|
|
92
|
+
temperature: asOptionalNumber(params.temperature),
|
|
93
|
+
max_tokens: asOptionalNumber(params.max_tokens),
|
|
94
|
+
top_p: asOptionalNumber(params.top_p),
|
|
95
|
+
stream: false
|
|
96
|
+
});
|
|
97
|
+
const content = completion.choices?.[0]?.message?.content || "";
|
|
98
|
+
return Either.right(content);
|
|
99
|
+
} catch (e) {
|
|
100
|
+
const error = e;
|
|
101
|
+
return Either.left(`WebLLM completion error: ${error.message || String(e)}`);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
async chat(messages, params) {
|
|
105
|
+
const model = typeof params.model === "string" ? params.model : this.config.default_model;
|
|
106
|
+
const engineResult = await this._get_engine(model);
|
|
107
|
+
if (engineResult.isLeft) return Either.left(engineResult.left);
|
|
108
|
+
const engine = engineResult.right;
|
|
109
|
+
try {
|
|
110
|
+
const completion = await engine.chat.completions.create({
|
|
111
|
+
messages,
|
|
112
|
+
temperature: asOptionalNumber(params.temperature),
|
|
113
|
+
max_tokens: asOptionalNumber(params.max_tokens),
|
|
114
|
+
top_p: asOptionalNumber(params.top_p),
|
|
115
|
+
stream: false
|
|
116
|
+
});
|
|
117
|
+
const choice = completion.choices?.[0];
|
|
118
|
+
return Either.right({
|
|
119
|
+
content: choice?.message?.content || "",
|
|
120
|
+
role: choice?.message?.role || "assistant",
|
|
121
|
+
model,
|
|
122
|
+
usage: completion.usage
|
|
123
|
+
});
|
|
124
|
+
} catch (e) {
|
|
125
|
+
const error = e;
|
|
126
|
+
return Either.left(`WebLLM chat error: ${error.message || String(e)}`);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
async validate_connection() {
|
|
130
|
+
if (typeof window === "undefined") return false;
|
|
131
|
+
if (window.webllm) return true;
|
|
132
|
+
try {
|
|
133
|
+
await import("@mlc-ai/web-llm");
|
|
134
|
+
return true;
|
|
135
|
+
} catch {
|
|
136
|
+
return false;
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
async list_models() {
|
|
140
|
+
return Either.right(this.config.available_models);
|
|
141
|
+
}
|
|
142
|
+
};
|
|
143
|
+
|
|
144
|
+
// src/ptr/llm/providers/MLCLLMProvider.ts
|
|
145
|
+
import * as http from "http";
|
|
146
|
+
import * as https from "https";
|
|
147
|
+
var MLCLLMProvider = class extends BaseLLMProvider {
|
|
148
|
+
provider_name = "mlc-llm";
|
|
149
|
+
base_url;
|
|
150
|
+
timeout;
|
|
151
|
+
config;
|
|
152
|
+
constructor(base_url = null, timeout = LLM_DEFAULT_TIMEOUT_SECS) {
|
|
153
|
+
super();
|
|
154
|
+
this.config = LLM_PROVIDERS["mlc-llm"];
|
|
155
|
+
this.base_url = (base_url || this.config.base_url).replace(/\/$/, "");
|
|
156
|
+
this.timeout = timeout * 1e3;
|
|
157
|
+
}
|
|
158
|
+
async _fetch_json(endpoint, options) {
|
|
159
|
+
if (typeof globalThis.fetch === "function") {
|
|
160
|
+
try {
|
|
161
|
+
const controller = new AbortController();
|
|
162
|
+
const id = setTimeout(() => controller.abort(), this.timeout);
|
|
163
|
+
const response = await fetch(`${this.base_url}${endpoint}`, {
|
|
164
|
+
...options,
|
|
165
|
+
signal: controller.signal
|
|
166
|
+
});
|
|
167
|
+
clearTimeout(id);
|
|
168
|
+
if (!response.ok) {
|
|
169
|
+
return Either.left(`HTTP error ${response.status}: ${await response.text()}`);
|
|
170
|
+
}
|
|
171
|
+
const data = await response.json();
|
|
172
|
+
return Either.right(data);
|
|
173
|
+
} catch (e) {
|
|
174
|
+
const error = e;
|
|
175
|
+
return Either.left(`Connection error: ${error.message || String(e)}`);
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
return this._node_request(endpoint, options);
|
|
179
|
+
}
|
|
180
|
+
_node_request(endpoint, options) {
|
|
181
|
+
const urlStr = `${this.base_url}${endpoint}`;
|
|
182
|
+
const url = new URL(urlStr);
|
|
183
|
+
const isHttps = url.protocol === "https:";
|
|
184
|
+
const client = isHttps ? https : http;
|
|
185
|
+
const reqOptions = {
|
|
186
|
+
method: options.method || "GET",
|
|
187
|
+
headers: options.headers || {},
|
|
188
|
+
timeout: this.timeout
|
|
189
|
+
};
|
|
190
|
+
return new Promise((resolve) => {
|
|
191
|
+
const req = client.request(url, reqOptions, (res) => {
|
|
192
|
+
let body = "";
|
|
193
|
+
res.on("data", (chunk) => body += chunk);
|
|
194
|
+
res.on("end", () => {
|
|
195
|
+
if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300) {
|
|
196
|
+
try {
|
|
197
|
+
resolve(Either.right(JSON.parse(body)));
|
|
198
|
+
} catch (e) {
|
|
199
|
+
resolve(Either.left(`Parse error: ${e}`));
|
|
200
|
+
}
|
|
201
|
+
} else {
|
|
202
|
+
resolve(Either.left(`HTTP Error ${res.statusCode}: ${body}`));
|
|
203
|
+
}
|
|
204
|
+
});
|
|
205
|
+
});
|
|
206
|
+
req.on("error", (e) => resolve(Either.left(e.message)));
|
|
207
|
+
req.on("timeout", () => {
|
|
208
|
+
req.destroy();
|
|
209
|
+
resolve(Either.left("Request timed out"));
|
|
210
|
+
});
|
|
211
|
+
if (options.body) {
|
|
212
|
+
req.write(options.body);
|
|
213
|
+
}
|
|
214
|
+
req.end();
|
|
215
|
+
});
|
|
216
|
+
}
|
|
217
|
+
async complete(prompt, params, images) {
|
|
218
|
+
const data = {
|
|
219
|
+
model: typeof params.model === "string" ? params.model : this.config.default_model,
|
|
220
|
+
messages: [{ role: "user", content: prompt }],
|
|
221
|
+
max_tokens: params.max_tokens,
|
|
222
|
+
temperature: params.temperature,
|
|
223
|
+
top_p: params.top_p,
|
|
224
|
+
stream: false
|
|
225
|
+
};
|
|
226
|
+
const result = await this._fetch_json(this.config.api_path, {
|
|
227
|
+
method: "POST",
|
|
228
|
+
headers: { "Content-Type": "application/json" },
|
|
229
|
+
body: JSON.stringify(data)
|
|
230
|
+
});
|
|
231
|
+
if (result.isLeft) return Either.left(result.left);
|
|
232
|
+
const response = result.right;
|
|
233
|
+
if (response.choices && response.choices.length > 0) {
|
|
234
|
+
return Either.right(response.choices[0].text || "");
|
|
235
|
+
}
|
|
236
|
+
return Either.left(`Unexpected response format: ${JSON.stringify(response)}`);
|
|
237
|
+
}
|
|
238
|
+
async chat(messages, params) {
|
|
239
|
+
const data = {
|
|
240
|
+
model: params.model || this.config.default_model,
|
|
241
|
+
messages,
|
|
242
|
+
max_tokens: params.max_tokens,
|
|
243
|
+
temperature: params.temperature,
|
|
244
|
+
top_p: params.top_p,
|
|
245
|
+
stream: false
|
|
246
|
+
};
|
|
247
|
+
const result = await this._fetch_json(this.config.chat_path, {
|
|
248
|
+
method: "POST",
|
|
249
|
+
headers: { "Content-Type": "application/json" },
|
|
250
|
+
body: JSON.stringify(data)
|
|
251
|
+
});
|
|
252
|
+
if (result.isLeft) return Either.left(result.left);
|
|
253
|
+
const response = result.right;
|
|
254
|
+
if (response.choices && response.choices.length > 0) {
|
|
255
|
+
const message = response.choices[0].message;
|
|
256
|
+
return Either.right({
|
|
257
|
+
content: message?.content || "",
|
|
258
|
+
role: message?.role || "assistant",
|
|
259
|
+
model: response.model,
|
|
260
|
+
usage: response.usage
|
|
261
|
+
});
|
|
262
|
+
}
|
|
263
|
+
return Either.left(`Unexpected response format: ${JSON.stringify(response)}`);
|
|
264
|
+
}
|
|
265
|
+
async validate_connection() {
|
|
266
|
+
const result = await this._fetch_json(this.config.models_path, { method: "GET" });
|
|
267
|
+
return result.isRight;
|
|
268
|
+
}
|
|
269
|
+
async list_models() {
|
|
270
|
+
const result = await this._fetch_json(this.config.models_path, { method: "GET" });
|
|
271
|
+
if (result.isLeft) return Either.left(result.left);
|
|
272
|
+
const response = result.right;
|
|
273
|
+
if (response.data && Array.isArray(response.data)) {
|
|
274
|
+
return Either.right(response.data.map((m) => m.id).filter((id) => typeof id === "string"));
|
|
275
|
+
}
|
|
276
|
+
return Either.left("Invalid models response");
|
|
277
|
+
}
|
|
278
|
+
};
|
|
279
|
+
|
|
280
|
+
// src/ptr/llm/LLMRuntime.ts
|
|
281
|
+
function get_provider(provider_name = DEFAULT_PROVIDER, base_url = null, timeout = LLM_DEFAULT_TIMEOUT_SECS) {
|
|
282
|
+
if (provider_name === "ollama") {
|
|
283
|
+
return new OllamaProvider(base_url, timeout);
|
|
284
|
+
}
|
|
285
|
+
if (provider_name === "webllm") {
|
|
286
|
+
return new WebLLMProvider();
|
|
287
|
+
}
|
|
288
|
+
if (provider_name === "mlc-llm") {
|
|
289
|
+
return new MLCLLMProvider(base_url, timeout);
|
|
290
|
+
}
|
|
291
|
+
throw new Error(`Unknown provider: ${provider_name}`);
|
|
292
|
+
}
|
|
293
|
+
var LLMRuntime = class {
|
|
294
|
+
provider_name;
|
|
295
|
+
_provider = null;
|
|
296
|
+
constructor(provider_name = DEFAULT_PROVIDER) {
|
|
297
|
+
this.provider_name = provider_name;
|
|
298
|
+
}
|
|
299
|
+
get provider() {
|
|
300
|
+
if (!this._provider) {
|
|
301
|
+
this._provider = get_provider(this.provider_name);
|
|
302
|
+
}
|
|
303
|
+
return this._provider;
|
|
304
|
+
}
|
|
305
|
+
async execute(codeOrPath, context, config, chapterDir) {
|
|
306
|
+
let configCtx = {};
|
|
307
|
+
if (typeof context === "object" && context !== null) {
|
|
308
|
+
configCtx = context;
|
|
309
|
+
}
|
|
310
|
+
const concrete = config;
|
|
311
|
+
const llmConfig = LLMConfig.from_concrete(concrete, configCtx);
|
|
312
|
+
if (llmConfig.provider !== this.provider_name || llmConfig.endpoint_url) {
|
|
313
|
+
this.provider_name = llmConfig.provider;
|
|
314
|
+
this._provider = get_provider(llmConfig.provider, llmConfig.endpoint_url, llmConfig.timeout);
|
|
315
|
+
}
|
|
316
|
+
let prompt = "";
|
|
317
|
+
let images;
|
|
318
|
+
if (typeof context === "string") {
|
|
319
|
+
prompt = context;
|
|
320
|
+
} else if (context && typeof context === "object") {
|
|
321
|
+
const ctx = context;
|
|
322
|
+
if (typeof ctx.prompt === "string") {
|
|
323
|
+
prompt = ctx.prompt;
|
|
324
|
+
if (Array.isArray(ctx.images)) {
|
|
325
|
+
images = ctx.images;
|
|
326
|
+
}
|
|
327
|
+
} else {
|
|
328
|
+
prompt = JSON.stringify(context);
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
let result;
|
|
332
|
+
if (llmConfig.system_prompt) {
|
|
333
|
+
result = await this._execute_chat(prompt, llmConfig, images);
|
|
334
|
+
} else {
|
|
335
|
+
result = await this._execute_completion(prompt, llmConfig, images);
|
|
336
|
+
}
|
|
337
|
+
if (result.isLeft) {
|
|
338
|
+
return `Error: ${result.left}`;
|
|
339
|
+
}
|
|
340
|
+
return this._format_response(result.right, llmConfig);
|
|
341
|
+
}
|
|
342
|
+
async _execute_completion(prompt, config, images) {
|
|
343
|
+
const params = config.to_provider_params();
|
|
344
|
+
return this.provider.complete(prompt, params, images);
|
|
345
|
+
}
|
|
346
|
+
async _execute_chat(prompt, config, images) {
|
|
347
|
+
const messages = [];
|
|
348
|
+
if (config.system_prompt) {
|
|
349
|
+
messages.push({ role: "system", content: config.system_prompt });
|
|
350
|
+
}
|
|
351
|
+
const userMsg = { role: "user", content: prompt };
|
|
352
|
+
if (images && images.length > 0) {
|
|
353
|
+
userMsg.images = images;
|
|
354
|
+
}
|
|
355
|
+
messages.push(userMsg);
|
|
356
|
+
if (config.assistant_instruction) {
|
|
357
|
+
messages.push({ role: "assistant", content: config.assistant_instruction });
|
|
358
|
+
}
|
|
359
|
+
const params = config.to_provider_params();
|
|
360
|
+
return this.provider.chat(messages, params);
|
|
361
|
+
}
|
|
362
|
+
_format_response(response, config) {
|
|
363
|
+
let content = response;
|
|
364
|
+
if (response && typeof response === "object" && "content" in response) {
|
|
365
|
+
content = response.content;
|
|
366
|
+
}
|
|
367
|
+
if (config.response_format === "json") {
|
|
368
|
+
try {
|
|
369
|
+
if (typeof content === "string") {
|
|
370
|
+
const start = content.indexOf("{");
|
|
371
|
+
const end = content.lastIndexOf("}") + 1;
|
|
372
|
+
if (start >= 0 && end > start) {
|
|
373
|
+
return JSON.parse(content.substring(start, end));
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
return content;
|
|
377
|
+
} catch (e) {
|
|
378
|
+
return content;
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
return content;
|
|
382
|
+
}
|
|
383
|
+
};
|
|
384
|
+
function promptMonad(prompt, config = {}) {
|
|
385
|
+
return IO.of(async () => {
|
|
386
|
+
try {
|
|
387
|
+
const llmConfig = new LLMConfig(config);
|
|
388
|
+
const runtime = new LLMRuntime(llmConfig.provider);
|
|
389
|
+
const params = llmConfig.to_provider_params();
|
|
390
|
+
return runtime.provider.complete(prompt, params);
|
|
391
|
+
} catch (e) {
|
|
392
|
+
return Either.left(`LLM execution failed: ${e}`);
|
|
393
|
+
}
|
|
394
|
+
});
|
|
395
|
+
}
|
|
396
|
+
function chatMonad(messages = null, prompt = null, system_prompt = "", config = {}) {
|
|
397
|
+
return IO.of(async () => {
|
|
398
|
+
try {
|
|
399
|
+
const configData = { ...config };
|
|
400
|
+
if (system_prompt) configData.system_prompt = system_prompt;
|
|
401
|
+
const llmConfig = new LLMConfig(configData);
|
|
402
|
+
const runtime = new LLMRuntime(llmConfig.provider);
|
|
403
|
+
const msgs = messages ? [...messages] : [];
|
|
404
|
+
if (msgs.length === 0) {
|
|
405
|
+
if (llmConfig.system_prompt) {
|
|
406
|
+
msgs.push({ role: "system", content: llmConfig.system_prompt });
|
|
407
|
+
}
|
|
408
|
+
if (prompt) {
|
|
409
|
+
msgs.push({ role: "user", content: prompt });
|
|
410
|
+
}
|
|
411
|
+
if (llmConfig.assistant_instruction) {
|
|
412
|
+
msgs.push({ role: "assistant", content: llmConfig.assistant_instruction });
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
const params = llmConfig.to_provider_params();
|
|
416
|
+
return runtime.provider.chat(msgs, params);
|
|
417
|
+
} catch (e) {
|
|
418
|
+
return Either.left(`LLM chat failed: ${e}`);
|
|
419
|
+
}
|
|
420
|
+
});
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
export {
|
|
424
|
+
get_provider,
|
|
425
|
+
LLMRuntime,
|
|
426
|
+
promptMonad,
|
|
427
|
+
chatMonad
|
|
428
|
+
};
|