mcard-js 2.1.49 → 2.1.51
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/CardCollection-EMSBVZP3.js +10 -0
- package/dist/CardCollection-KQWR4PCV.js +10 -0
- package/dist/CardCollection-ORGE2XBG.js +10 -0
- package/dist/EngineRegistry-ABZXHZWO.js +17 -0
- package/dist/EngineRegistry-EIOT4MUZ.js +17 -0
- package/dist/EngineRegistry-IQ6EVO72.js +17 -0
- package/dist/EngineRegistry-PHRFXEOE.js +17 -0
- package/dist/IndexedDBEngine-EWA3SLAO.js +12 -0
- package/dist/IndexedDBEngine-FXAD42F3.js +12 -0
- package/dist/IndexedDBEngine-RD4447IS.js +12 -0
- package/dist/LLMRuntime-ARUWOX52.js +17 -0
- package/dist/LLMRuntime-C3XCO7WF.js +17 -0
- package/dist/LLMRuntime-CQ7X43QR.js +17 -0
- package/dist/LLMRuntime-PD45COKE.js +17 -0
- package/dist/LLMRuntime-QOUMLT33.js +17 -0
- package/dist/LLMRuntime-SZNLTHD7.js +17 -0
- package/dist/LLMRuntime-TVJGK2BG.js +17 -0
- package/dist/LambdaRuntime-25GMEJCU.js +19 -0
- package/dist/LambdaRuntime-7KQUMHPI.js +19 -0
- package/dist/LambdaRuntime-DRT7ODPC.js +19 -0
- package/dist/LambdaRuntime-HSREEYQG.js +19 -0
- package/dist/LambdaRuntime-IH7NVG6Z.js +19 -0
- package/dist/LambdaRuntime-MPG27FM2.js +19 -0
- package/dist/LambdaRuntime-ODSWIMNM.js +19 -0
- package/dist/LambdaRuntime-PHGRZYAW.js +19 -0
- package/dist/LambdaRuntime-QOEYR37L.js +19 -0
- package/dist/LambdaRuntime-RT33TFN2.js +19 -0
- package/dist/LambdaRuntime-W6TQBP5O.js +19 -0
- package/dist/Loader-35WSUC53.js +14 -0
- package/dist/Loader-STS3G4OQ.js +16 -0
- package/dist/Loader-W22AEM6F.js +12 -0
- package/dist/Loader-YBPWP43S.js +12 -0
- package/dist/Loader-ZYSS7B4D.js +12 -0
- package/dist/NetworkRuntime-KR2QITXV.js +987 -0
- package/dist/NetworkRuntime-S6V2CMZV.js +1575 -0
- package/dist/OllamaProvider-2ANW6EB2.js +9 -0
- package/dist/OllamaProvider-5QFJKYAC.js +9 -0
- package/dist/OllamaProvider-6QXJGR7V.js +9 -0
- package/dist/OllamaProvider-ABEEFX7M.js +9 -0
- package/dist/OllamaProvider-Z2CGY5LY.js +9 -0
- package/dist/VCard-225X42W7.js +25 -0
- package/dist/chunk-2APJYBH4.js +368 -0
- package/dist/chunk-4DFTWDRB.js +497 -0
- package/dist/chunk-4PBRTFSY.js +112 -0
- package/dist/chunk-4T3H25AP.js +299 -0
- package/dist/chunk-5DFXPIRL.js +42 -0
- package/dist/chunk-5HRZV4R3.js +217 -0
- package/dist/chunk-6ZRJXVJ3.js +529 -0
- package/dist/chunk-7N7JYGN2.js +364 -0
- package/dist/chunk-7QTJUGYQ.js +74 -0
- package/dist/chunk-7TXIPJI2.js +2360 -0
- package/dist/chunk-BFJUD527.js +2369 -0
- package/dist/chunk-CHXIVTQV.js +364 -0
- package/dist/chunk-DM2ABCA4.js +497 -0
- package/dist/chunk-DTPHGTBQ.js +275 -0
- package/dist/chunk-EDAJ5FO6.js +405 -0
- package/dist/chunk-ETJWXHKZ.js +246 -0
- package/dist/chunk-FLYGNPUC.js +2369 -0
- package/dist/chunk-FSDRDWOP.js +34 -0
- package/dist/chunk-GIKMCG4D.js +497 -0
- package/dist/chunk-IJKS3LGK.js +428 -0
- package/dist/chunk-JUQ2VQZA.js +428 -0
- package/dist/chunk-JVW4J7BY.js +2369 -0
- package/dist/chunk-JWTRVEC3.js +2369 -0
- package/dist/chunk-KJM4C65U.js +299 -0
- package/dist/chunk-KMC566CN.js +591 -0
- package/dist/chunk-KMNP6DBL.js +455 -0
- package/dist/chunk-LVU7O5IY.js +597 -0
- package/dist/chunk-M4C6RWLA.js +373 -0
- package/dist/chunk-NAAAKSEO.js +541 -0
- package/dist/chunk-NKIXLPHL.js +373 -0
- package/dist/chunk-NOEDMK7I.js +428 -0
- package/dist/chunk-NOPYSBOQ.js +2360 -0
- package/dist/chunk-P4G42QCY.js +2369 -0
- package/dist/chunk-PKLONZCF.js +253 -0
- package/dist/chunk-PNGECWPN.js +597 -0
- package/dist/chunk-PYP6T64W.js +217 -0
- package/dist/chunk-QFT3COE2.js +217 -0
- package/dist/chunk-QFZFXMNX.js +275 -0
- package/dist/chunk-QZGRQRJP.js +2369 -0
- package/dist/chunk-R3XRBAM7.js +253 -0
- package/dist/chunk-RYP66UMH.js +74 -0
- package/dist/chunk-RZIZYRLF.js +112 -0
- package/dist/chunk-T43V44RS.js +2369 -0
- package/dist/chunk-UCNVX5BZ.js +74 -0
- package/dist/chunk-UDF7HS4V.js +368 -0
- package/dist/chunk-VJPXJVEH.js +299 -0
- package/dist/chunk-VW3KBDK5.js +74 -0
- package/dist/chunk-X72XIYSN.js +364 -0
- package/dist/chunk-XETU7TV4.js +112 -0
- package/dist/chunk-Y4BT6LHA.js +368 -0
- package/dist/chunk-YQGB6BIA.js +2369 -0
- package/dist/chunk-ZEQPO3XV.js +217 -0
- package/dist/chunk-ZKRKWXEQ.js +2369 -0
- package/dist/chunk-ZMK2HTZ5.js +275 -0
- package/dist/constants-CLB7B6MN.js +101 -0
- package/dist/constants-O343SMHL.js +103 -0
- package/dist/constants-YPGDEX5X.js +103 -0
- package/dist/index.browser.cjs +11 -5
- package/dist/index.browser.js +12 -12
- package/dist/index.cjs +2358 -1896
- package/dist/index.d.cts +934 -776
- package/dist/index.d.ts +934 -776
- package/dist/index.js +1353 -1271
- package/dist/storage/SqliteNodeEngine.cjs +12 -6
- package/dist/storage/SqliteNodeEngine.js +4 -4
- package/dist/storage/SqliteWasmEngine.cjs +11 -5
- package/dist/storage/SqliteWasmEngine.js +4 -4
- package/package.json +5 -3
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
// src/storage/EngineRegistry.ts
|
|
2
|
+
var EngineType = /* @__PURE__ */ ((EngineType2) => {
|
|
3
|
+
EngineType2["INDEXED_DB"] = "indexeddb";
|
|
4
|
+
EngineType2["SQLITE_NODE"] = "sqlite-node";
|
|
5
|
+
EngineType2["SQLITE_WASM"] = "sqlite-wasm";
|
|
6
|
+
return EngineType2;
|
|
7
|
+
})(EngineType || {});
|
|
8
|
+
var ENGINE_INFO = {
|
|
9
|
+
["indexeddb" /* INDEXED_DB */]: {
|
|
10
|
+
type: "indexeddb" /* INDEXED_DB */,
|
|
11
|
+
displayName: "IndexedDB",
|
|
12
|
+
environment: "browser",
|
|
13
|
+
package: null,
|
|
14
|
+
description: "Browser-native IndexedDB key-value store"
|
|
15
|
+
},
|
|
16
|
+
["sqlite-node" /* SQLITE_NODE */]: {
|
|
17
|
+
type: "sqlite-node" /* SQLITE_NODE */,
|
|
18
|
+
displayName: "SQLite (Node.js)",
|
|
19
|
+
environment: "node",
|
|
20
|
+
package: "better-sqlite3",
|
|
21
|
+
description: "SQLite via native better-sqlite3 bindings"
|
|
22
|
+
},
|
|
23
|
+
["sqlite-wasm" /* SQLITE_WASM */]: {
|
|
24
|
+
type: "sqlite-wasm" /* SQLITE_WASM */,
|
|
25
|
+
displayName: "SQLite (WASM)",
|
|
26
|
+
environment: "browser",
|
|
27
|
+
package: "sql.js",
|
|
28
|
+
description: "SQLite via sql.js WebAssembly"
|
|
29
|
+
}
|
|
30
|
+
};
|
|
31
|
+
async function createEngine(type, options = {}) {
|
|
32
|
+
switch (type) {
|
|
33
|
+
case "indexeddb" /* INDEXED_DB */: {
|
|
34
|
+
const { IndexedDBEngine } = await import("./IndexedDBEngine-FXAD42F3.js");
|
|
35
|
+
const engine = new IndexedDBEngine();
|
|
36
|
+
await engine.init();
|
|
37
|
+
return engine;
|
|
38
|
+
}
|
|
39
|
+
case "sqlite-node" /* SQLITE_NODE */: {
|
|
40
|
+
const { SqliteNodeEngine } = await import("./storage/SqliteNodeEngine.js");
|
|
41
|
+
return SqliteNodeEngine.create(options.dbPath || ":memory:");
|
|
42
|
+
}
|
|
43
|
+
case "sqlite-wasm" /* SQLITE_WASM */: {
|
|
44
|
+
const { SqliteWasmEngine } = await import("./storage/SqliteWasmEngine.js");
|
|
45
|
+
const engine = new SqliteWasmEngine();
|
|
46
|
+
await engine.init(options.wasmUrl, options.existingData);
|
|
47
|
+
return engine;
|
|
48
|
+
}
|
|
49
|
+
default: {
|
|
50
|
+
const validTypes = Object.values(EngineType).join(", ");
|
|
51
|
+
throw new Error(
|
|
52
|
+
`Unknown engine type: "${type}". Valid types: ${validTypes}`
|
|
53
|
+
);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
function getAvailableEngines() {
|
|
58
|
+
return Object.values(EngineType);
|
|
59
|
+
}
|
|
60
|
+
function getEngineInfo(type) {
|
|
61
|
+
return ENGINE_INFO[type];
|
|
62
|
+
}
|
|
63
|
+
function getEnginesByEnvironment(env) {
|
|
64
|
+
return Object.values(ENGINE_INFO).filter((info) => info.environment === env);
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
export {
|
|
68
|
+
EngineType,
|
|
69
|
+
ENGINE_INFO,
|
|
70
|
+
createEngine,
|
|
71
|
+
getAvailableEngines,
|
|
72
|
+
getEngineInfo,
|
|
73
|
+
getEnginesByEnvironment
|
|
74
|
+
};
|
|
@@ -0,0 +1,368 @@
|
|
|
1
|
+
import {
|
|
2
|
+
Either
|
|
3
|
+
} from "./chunk-2KADE3SE.js";
|
|
4
|
+
import {
|
|
5
|
+
LLM_DEFAULT_RETRY_COUNT,
|
|
6
|
+
LLM_DEFAULT_RETRY_DELAY_SECS,
|
|
7
|
+
LLM_DEFAULT_TIMEOUT_SECS
|
|
8
|
+
} from "./chunk-ETJWXHKZ.js";
|
|
9
|
+
|
|
10
|
+
// src/ptr/llm/providers/LLMProvider.ts
|
|
11
|
+
var BaseLLMProvider = class {
|
|
12
|
+
async get_status() {
|
|
13
|
+
const available = await this.validate_connection();
|
|
14
|
+
let models = [];
|
|
15
|
+
if (available) {
|
|
16
|
+
const result = await this.list_models();
|
|
17
|
+
if (result.isRight) {
|
|
18
|
+
models = result.right;
|
|
19
|
+
} else {
|
|
20
|
+
models = result.left;
|
|
21
|
+
}
|
|
22
|
+
} else {
|
|
23
|
+
models = "Not connected";
|
|
24
|
+
}
|
|
25
|
+
return {
|
|
26
|
+
provider: this.provider_name,
|
|
27
|
+
available,
|
|
28
|
+
models: Array.isArray(models) ? models : [],
|
|
29
|
+
error: typeof models === "string" ? models : null
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
};
|
|
33
|
+
|
|
34
|
+
// src/ptr/llm/Config.ts
|
|
35
|
+
var DEFAULT_PROVIDER = "ollama";
|
|
36
|
+
var LLM_PROVIDERS = {
|
|
37
|
+
"ollama": {
|
|
38
|
+
base_url: "http://localhost:11434",
|
|
39
|
+
api_path: "/api/generate",
|
|
40
|
+
chat_path: "/api/chat",
|
|
41
|
+
models_path: "/api/tags",
|
|
42
|
+
default_model: "qwen3:0.6b",
|
|
43
|
+
available_models: ["qwen3:0.6b", "llama3:latest", "qwen3:latest"]
|
|
44
|
+
},
|
|
45
|
+
"webllm": {
|
|
46
|
+
base_url: "",
|
|
47
|
+
// running in-browser
|
|
48
|
+
api_path: "",
|
|
49
|
+
chat_path: "",
|
|
50
|
+
models_path: null,
|
|
51
|
+
default_model: "Llama-3-8B-Instruct-q4f32_1-MLC",
|
|
52
|
+
available_models: ["Llama-3-8B-Instruct-q4f32_1-MLC", "Hermes-2-Pro-Llama-3-8B-q4f16_1-MLC", "Phi-3-Mini-4k-Instruct-q4f16_1-MLC"]
|
|
53
|
+
},
|
|
54
|
+
"mlc-llm": {
|
|
55
|
+
base_url: "http://localhost:8000",
|
|
56
|
+
api_path: "/v1/completions",
|
|
57
|
+
chat_path: "/v1/chat/completions",
|
|
58
|
+
models_path: "/v1/models",
|
|
59
|
+
default_model: "Llama-3-8B-Instruct-q4f16_1-MLC",
|
|
60
|
+
available_models: []
|
|
61
|
+
},
|
|
62
|
+
"lmstudio": {
|
|
63
|
+
base_url: "http://localhost:1234",
|
|
64
|
+
api_path: "/v1/completions",
|
|
65
|
+
chat_path: "/v1/chat/completions",
|
|
66
|
+
models_path: "/v1/models",
|
|
67
|
+
default_model: "local-model",
|
|
68
|
+
available_models: []
|
|
69
|
+
},
|
|
70
|
+
"openai": {
|
|
71
|
+
base_url: "https://api.openai.com",
|
|
72
|
+
api_path: "/v1/completions",
|
|
73
|
+
chat_path: "/v1/chat/completions",
|
|
74
|
+
models_path: "/v1/models",
|
|
75
|
+
default_model: "gpt-4",
|
|
76
|
+
available_models: ["gpt-4", "gpt-4-turbo", "gpt-3.5-turbo"]
|
|
77
|
+
},
|
|
78
|
+
"anthropic": {
|
|
79
|
+
base_url: "https://api.anthropic.com",
|
|
80
|
+
api_path: "/v1/messages",
|
|
81
|
+
chat_path: "/v1/messages",
|
|
82
|
+
models_path: null,
|
|
83
|
+
default_model: "claude-3-sonnet-20240229",
|
|
84
|
+
available_models: ["claude-3-opus", "claude-3-sonnet", "claude-3-haiku"]
|
|
85
|
+
}
|
|
86
|
+
};
|
|
87
|
+
var DEFAULT_LLM_CONFIG = {
|
|
88
|
+
temperature: 0.7,
|
|
89
|
+
max_tokens: 2048,
|
|
90
|
+
top_p: 1,
|
|
91
|
+
top_k: 40,
|
|
92
|
+
timeout: LLM_DEFAULT_TIMEOUT_SECS,
|
|
93
|
+
stream: false,
|
|
94
|
+
response_format: "text",
|
|
95
|
+
retry_count: LLM_DEFAULT_RETRY_COUNT,
|
|
96
|
+
retry_delay: LLM_DEFAULT_RETRY_DELAY_SECS
|
|
97
|
+
};
|
|
98
|
+
var RESPONSE_FORMATS = ["text", "json", "structured", "markdown"];
|
|
99
|
+
var LLMConfig = class _LLMConfig {
|
|
100
|
+
provider;
|
|
101
|
+
model;
|
|
102
|
+
endpoint_url;
|
|
103
|
+
api_key;
|
|
104
|
+
system_prompt;
|
|
105
|
+
assistant_instruction;
|
|
106
|
+
temperature;
|
|
107
|
+
max_tokens;
|
|
108
|
+
top_p;
|
|
109
|
+
top_k;
|
|
110
|
+
frequency_penalty;
|
|
111
|
+
presence_penalty;
|
|
112
|
+
stop_sequences;
|
|
113
|
+
response_format;
|
|
114
|
+
json_schema;
|
|
115
|
+
timeout;
|
|
116
|
+
retry_count;
|
|
117
|
+
retry_delay;
|
|
118
|
+
stream;
|
|
119
|
+
constructor(data = {}) {
|
|
120
|
+
this.provider = data.provider || DEFAULT_PROVIDER;
|
|
121
|
+
this.model = data.model || null;
|
|
122
|
+
this.endpoint_url = data.endpoint_url || null;
|
|
123
|
+
this.api_key = data.api_key || null;
|
|
124
|
+
this.system_prompt = data.system_prompt || "";
|
|
125
|
+
this.assistant_instruction = data.assistant_instruction || "";
|
|
126
|
+
this.temperature = data.temperature ?? DEFAULT_LLM_CONFIG.temperature;
|
|
127
|
+
this.max_tokens = data.max_tokens ?? DEFAULT_LLM_CONFIG.max_tokens;
|
|
128
|
+
this.top_p = data.top_p ?? DEFAULT_LLM_CONFIG.top_p;
|
|
129
|
+
this.top_k = data.top_k ?? DEFAULT_LLM_CONFIG.top_k;
|
|
130
|
+
this.frequency_penalty = data.frequency_penalty || 0;
|
|
131
|
+
this.presence_penalty = data.presence_penalty || 0;
|
|
132
|
+
this.stop_sequences = data.stop_sequences || [];
|
|
133
|
+
this.response_format = data.response_format || DEFAULT_LLM_CONFIG.response_format;
|
|
134
|
+
this.json_schema = data.json_schema || null;
|
|
135
|
+
this.timeout = data.timeout ?? DEFAULT_LLM_CONFIG.timeout;
|
|
136
|
+
this.retry_count = data.retry_count ?? DEFAULT_LLM_CONFIG.retry_count;
|
|
137
|
+
this.retry_delay = data.retry_delay ?? DEFAULT_LLM_CONFIG.retry_delay;
|
|
138
|
+
this.stream = data.stream ?? DEFAULT_LLM_CONFIG.stream;
|
|
139
|
+
this.validate();
|
|
140
|
+
}
|
|
141
|
+
validate() {
|
|
142
|
+
if (!LLM_PROVIDERS[this.provider]) {
|
|
143
|
+
throw new Error(`Unknown provider: ${this.provider}. Available: ${Object.keys(LLM_PROVIDERS).join(", ")}`);
|
|
144
|
+
}
|
|
145
|
+
if (!RESPONSE_FORMATS.includes(this.response_format)) {
|
|
146
|
+
throw new Error(`Unknown response format: ${this.response_format}. Available: ${RESPONSE_FORMATS.join(", ")}`);
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
get effective_model() {
|
|
150
|
+
return this.model || LLM_PROVIDERS[this.provider].default_model;
|
|
151
|
+
}
|
|
152
|
+
get effective_base_url() {
|
|
153
|
+
if (this.endpoint_url) {
|
|
154
|
+
return this.endpoint_url.replace(/\/$/, "");
|
|
155
|
+
}
|
|
156
|
+
return LLM_PROVIDERS[this.provider].base_url;
|
|
157
|
+
}
|
|
158
|
+
to_provider_params() {
|
|
159
|
+
const params = {
|
|
160
|
+
model: this.effective_model,
|
|
161
|
+
temperature: this.temperature
|
|
162
|
+
};
|
|
163
|
+
if (this.provider === "ollama") {
|
|
164
|
+
params.options = {
|
|
165
|
+
num_predict: this.max_tokens,
|
|
166
|
+
top_p: this.top_p,
|
|
167
|
+
top_k: this.top_k,
|
|
168
|
+
temperature: this.temperature
|
|
169
|
+
};
|
|
170
|
+
if (this.stop_sequences.length > 0) {
|
|
171
|
+
params.options.stop = this.stop_sequences;
|
|
172
|
+
}
|
|
173
|
+
} else {
|
|
174
|
+
params.max_tokens = this.max_tokens;
|
|
175
|
+
params.top_p = this.top_p;
|
|
176
|
+
if (this.stop_sequences.length > 0) {
|
|
177
|
+
params.stop = this.stop_sequences;
|
|
178
|
+
}
|
|
179
|
+
if (this.frequency_penalty) params.frequency_penalty = this.frequency_penalty;
|
|
180
|
+
if (this.presence_penalty) params.presence_penalty = this.presence_penalty;
|
|
181
|
+
}
|
|
182
|
+
return params;
|
|
183
|
+
}
|
|
184
|
+
static from_concrete(concrete, context = {}) {
|
|
185
|
+
const configData = { ...concrete.llm_config || {} };
|
|
186
|
+
["provider", "model", "system_prompt", "temperature", "max_tokens"].forEach((key) => {
|
|
187
|
+
if (key in concrete) {
|
|
188
|
+
configData[key] = concrete[key];
|
|
189
|
+
}
|
|
190
|
+
});
|
|
191
|
+
const contextKeys = [
|
|
192
|
+
"provider",
|
|
193
|
+
"model",
|
|
194
|
+
"endpoint_url",
|
|
195
|
+
"api_key",
|
|
196
|
+
"system_prompt",
|
|
197
|
+
"assistant_instruction",
|
|
198
|
+
"temperature",
|
|
199
|
+
"max_tokens",
|
|
200
|
+
"top_p",
|
|
201
|
+
"top_k",
|
|
202
|
+
"response_format",
|
|
203
|
+
"timeout"
|
|
204
|
+
];
|
|
205
|
+
contextKeys.forEach((key) => {
|
|
206
|
+
if (key in context) {
|
|
207
|
+
configData[key] = context[key];
|
|
208
|
+
}
|
|
209
|
+
});
|
|
210
|
+
return new _LLMConfig(configData);
|
|
211
|
+
}
|
|
212
|
+
};
|
|
213
|
+
|
|
214
|
+
// src/ptr/llm/providers/OllamaProvider.ts
|
|
215
|
+
import * as http from "http";
|
|
216
|
+
import * as https from "https";
|
|
217
|
+
var OllamaProvider = class extends BaseLLMProvider {
|
|
218
|
+
provider_name = "ollama";
|
|
219
|
+
base_url;
|
|
220
|
+
timeout;
|
|
221
|
+
config;
|
|
222
|
+
constructor(base_url = null, timeout = LLM_DEFAULT_TIMEOUT_SECS) {
|
|
223
|
+
super();
|
|
224
|
+
this.config = LLM_PROVIDERS["ollama"];
|
|
225
|
+
this.base_url = (base_url || this.config.base_url).replace(/\/$/, "");
|
|
226
|
+
this.timeout = timeout * 1e3;
|
|
227
|
+
}
|
|
228
|
+
async _make_request(endpoint, data = null, method = "POST") {
|
|
229
|
+
const urlStr = `${this.base_url}${endpoint}`;
|
|
230
|
+
const url = new URL(urlStr);
|
|
231
|
+
const isHttps = url.protocol === "https:";
|
|
232
|
+
const client = isHttps ? https : http;
|
|
233
|
+
const options = {
|
|
234
|
+
method,
|
|
235
|
+
headers: {
|
|
236
|
+
"Content-Type": "application/json"
|
|
237
|
+
},
|
|
238
|
+
timeout: this.timeout
|
|
239
|
+
};
|
|
240
|
+
return new Promise((resolve) => {
|
|
241
|
+
let payload;
|
|
242
|
+
if (data) {
|
|
243
|
+
payload = JSON.stringify(data);
|
|
244
|
+
options.headers["Content-Length"] = Buffer.byteLength(payload);
|
|
245
|
+
}
|
|
246
|
+
const req = client.request(url, options, (res) => {
|
|
247
|
+
let body = "";
|
|
248
|
+
res.on("data", (chunk) => {
|
|
249
|
+
body += chunk;
|
|
250
|
+
});
|
|
251
|
+
res.on("end", () => {
|
|
252
|
+
if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300) {
|
|
253
|
+
try {
|
|
254
|
+
if (body.includes("\n") && !body.trim().startsWith("{")) {
|
|
255
|
+
resolve(Either.right(JSON.parse(body)));
|
|
256
|
+
} else {
|
|
257
|
+
resolve(Either.right(JSON.parse(body)));
|
|
258
|
+
}
|
|
259
|
+
} catch (e) {
|
|
260
|
+
const lines = body.trim().split("\n").filter((l) => l);
|
|
261
|
+
if (lines.length > 0) {
|
|
262
|
+
try {
|
|
263
|
+
resolve(Either.right(JSON.parse(lines[lines.length - 1])));
|
|
264
|
+
} catch (parseErr) {
|
|
265
|
+
resolve(Either.left(`Ollama response parse error: ${parseErr}`));
|
|
266
|
+
}
|
|
267
|
+
} else {
|
|
268
|
+
resolve(Either.left(`Ollama response parse error: ${e}`));
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
} else {
|
|
272
|
+
resolve(Either.left(`Ollama HTTP error ${res.statusCode}: ${body}`));
|
|
273
|
+
}
|
|
274
|
+
});
|
|
275
|
+
});
|
|
276
|
+
req.on("error", (e) => {
|
|
277
|
+
resolve(Either.left(`Ollama connection error: ${e.message}`));
|
|
278
|
+
});
|
|
279
|
+
req.on("timeout", () => {
|
|
280
|
+
req.destroy();
|
|
281
|
+
resolve(Either.left(`Ollama request timed out after ${this.timeout}ms`));
|
|
282
|
+
});
|
|
283
|
+
if (payload) {
|
|
284
|
+
req.write(payload);
|
|
285
|
+
}
|
|
286
|
+
req.end();
|
|
287
|
+
});
|
|
288
|
+
}
|
|
289
|
+
async complete(prompt, params, images) {
|
|
290
|
+
const data = {
|
|
291
|
+
model: typeof params.model === "string" ? params.model : this.config.default_model,
|
|
292
|
+
prompt,
|
|
293
|
+
stream: false
|
|
294
|
+
};
|
|
295
|
+
if (images && images.length > 0) {
|
|
296
|
+
data.images = images;
|
|
297
|
+
}
|
|
298
|
+
if (params.options) {
|
|
299
|
+
data.options = params.options;
|
|
300
|
+
}
|
|
301
|
+
const result = await this._make_request(this.config.api_path, data);
|
|
302
|
+
if (result.isLeft) {
|
|
303
|
+
return Either.left(result.left);
|
|
304
|
+
}
|
|
305
|
+
const response = result.right;
|
|
306
|
+
if (response.response !== void 0) {
|
|
307
|
+
return Either.right(response.response);
|
|
308
|
+
} else if (response.error) {
|
|
309
|
+
return Either.left(`Ollama error: ${response.error}`);
|
|
310
|
+
} else {
|
|
311
|
+
return Either.left(`Unexpected Ollama response format: ${JSON.stringify(response)}`);
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
async chat(messages, params) {
|
|
315
|
+
const data = {
|
|
316
|
+
model: typeof params.model === "string" ? params.model : this.config.default_model,
|
|
317
|
+
messages,
|
|
318
|
+
stream: false
|
|
319
|
+
};
|
|
320
|
+
if (params.options) {
|
|
321
|
+
data.options = params.options;
|
|
322
|
+
}
|
|
323
|
+
const result = await this._make_request(this.config.chat_path, data);
|
|
324
|
+
if (result.isLeft) {
|
|
325
|
+
return Either.left(result.left);
|
|
326
|
+
}
|
|
327
|
+
const response = result.right;
|
|
328
|
+
if (response.message) {
|
|
329
|
+
return Either.right({
|
|
330
|
+
content: response.message.content || "",
|
|
331
|
+
role: response.message.role || "assistant",
|
|
332
|
+
model: response.model || data.model,
|
|
333
|
+
done: response.done ?? true,
|
|
334
|
+
total_duration: response.total_duration,
|
|
335
|
+
eval_count: response.eval_count
|
|
336
|
+
});
|
|
337
|
+
} else if (response.error) {
|
|
338
|
+
return Either.left(`Ollama error: ${response.error}`);
|
|
339
|
+
} else {
|
|
340
|
+
return Either.left(`Unexpected Ollama chat response format: ${JSON.stringify(response)}`);
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
async validate_connection() {
|
|
344
|
+
const result = await this._make_request(this.config.models_path, null, "GET");
|
|
345
|
+
return result.isRight;
|
|
346
|
+
}
|
|
347
|
+
async list_models() {
|
|
348
|
+
const result = await this._make_request(this.config.models_path, null, "GET");
|
|
349
|
+
if (result.isLeft) {
|
|
350
|
+
return Either.left(result.left);
|
|
351
|
+
}
|
|
352
|
+
const response = result.right;
|
|
353
|
+
if (response.models) {
|
|
354
|
+
const models = response.models.map((m) => m.name || m.model || "unknown");
|
|
355
|
+
return Either.right(models);
|
|
356
|
+
} else {
|
|
357
|
+
return Either.left(`Unexpected models response: ${JSON.stringify(response)}`);
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
};
|
|
361
|
+
|
|
362
|
+
export {
|
|
363
|
+
DEFAULT_PROVIDER,
|
|
364
|
+
LLM_PROVIDERS,
|
|
365
|
+
LLMConfig,
|
|
366
|
+
BaseLLMProvider,
|
|
367
|
+
OllamaProvider
|
|
368
|
+
};
|