noosphere 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +257 -0
- package/dist/index.js +1290 -0
- package/dist/index.js.map +1 -0
- package/package.json +38 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,1290 @@
|
|
|
1
|
+
// src/errors.ts
|
|
2
|
+
var RETRYABLE_CODES = /* @__PURE__ */ new Set([
|
|
3
|
+
"PROVIDER_UNAVAILABLE",
|
|
4
|
+
"RATE_LIMITED",
|
|
5
|
+
"TIMEOUT",
|
|
6
|
+
"GENERATION_FAILED"
|
|
7
|
+
]);
|
|
8
|
+
var NoosphereError = class extends Error {
|
|
9
|
+
code;
|
|
10
|
+
provider;
|
|
11
|
+
modality;
|
|
12
|
+
model;
|
|
13
|
+
cause;
|
|
14
|
+
constructor(message, options) {
|
|
15
|
+
super(message);
|
|
16
|
+
this.name = "NoosphereError";
|
|
17
|
+
this.code = options.code;
|
|
18
|
+
this.provider = options.provider;
|
|
19
|
+
this.modality = options.modality;
|
|
20
|
+
this.model = options.model;
|
|
21
|
+
this.cause = options.cause;
|
|
22
|
+
}
|
|
23
|
+
isRetryable() {
|
|
24
|
+
return RETRYABLE_CODES.has(this.code);
|
|
25
|
+
}
|
|
26
|
+
};
|
|
27
|
+
|
|
28
|
+
// src/config.ts
|
|
29
|
+
var ENV_KEY_MAP = {
|
|
30
|
+
openai: "OPENAI_API_KEY",
|
|
31
|
+
anthropic: "ANTHROPIC_API_KEY",
|
|
32
|
+
google: "GEMINI_API_KEY",
|
|
33
|
+
fal: "FAL_KEY",
|
|
34
|
+
openrouter: "OPENROUTER_API_KEY",
|
|
35
|
+
huggingface: "HUGGINGFACE_TOKEN",
|
|
36
|
+
groq: "GROQ_API_KEY",
|
|
37
|
+
mistral: "MISTRAL_API_KEY",
|
|
38
|
+
xai: "XAI_API_KEY"
|
|
39
|
+
};
|
|
40
|
+
var LOCAL_DEFAULTS = {
|
|
41
|
+
ollama: { host: "http://localhost", port: 11434, envHost: "OLLAMA_HOST", envPort: "OLLAMA_PORT" },
|
|
42
|
+
comfyui: { host: "http://localhost", port: 8188, envHost: "COMFYUI_HOST", envPort: "COMFYUI_PORT" },
|
|
43
|
+
piper: { host: "http://localhost", port: 5500, envHost: "PIPER_HOST", envPort: "PIPER_PORT" },
|
|
44
|
+
kokoro: { host: "http://localhost", port: 5501, envHost: "KOKORO_HOST", envPort: "KOKORO_PORT" }
|
|
45
|
+
};
|
|
46
|
+
var DEFAULT_RETRYABLE = [
|
|
47
|
+
"PROVIDER_UNAVAILABLE",
|
|
48
|
+
"RATE_LIMITED",
|
|
49
|
+
"TIMEOUT"
|
|
50
|
+
];
|
|
51
|
+
function resolveConfig(input) {
|
|
52
|
+
const keys = {};
|
|
53
|
+
for (const [name, envVar] of Object.entries(ENV_KEY_MAP)) {
|
|
54
|
+
keys[name] = input.keys?.[name] ?? process.env[envVar];
|
|
55
|
+
}
|
|
56
|
+
const local = {};
|
|
57
|
+
for (const [name, defaults] of Object.entries(LOCAL_DEFAULTS)) {
|
|
58
|
+
const cfgLocal = input.local?.[name];
|
|
59
|
+
const envPort = process.env[defaults.envPort];
|
|
60
|
+
const envHost = process.env[defaults.envHost];
|
|
61
|
+
local[name] = {
|
|
62
|
+
enabled: cfgLocal?.enabled ?? true,
|
|
63
|
+
host: cfgLocal?.host ?? envHost ?? defaults.host,
|
|
64
|
+
port: cfgLocal?.port ?? (envPort ? parseInt(envPort, 10) : defaults.port),
|
|
65
|
+
type: cfgLocal?.type
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
const autoDetectEnv = process.env.NOOSPHERE_AUTO_DETECT_LOCAL;
|
|
69
|
+
const cacheTTLEnv = process.env.NOOSPHERE_DISCOVERY_CACHE_TTL;
|
|
70
|
+
return {
|
|
71
|
+
keys,
|
|
72
|
+
local,
|
|
73
|
+
customLocal: input.local?.custom ?? [],
|
|
74
|
+
defaults: input.defaults ?? {},
|
|
75
|
+
autoDetectLocal: input.autoDetectLocal ?? (autoDetectEnv !== void 0 ? autoDetectEnv !== "false" : true),
|
|
76
|
+
discoveryCacheTTL: input.discoveryCacheTTL ?? (cacheTTLEnv ? parseInt(cacheTTLEnv, 10) : 60),
|
|
77
|
+
retry: {
|
|
78
|
+
maxRetries: input.retry?.maxRetries ?? 2,
|
|
79
|
+
backoffMs: input.retry?.backoffMs ?? 1e3,
|
|
80
|
+
retryableErrors: input.retry?.retryableErrors ?? DEFAULT_RETRYABLE,
|
|
81
|
+
failover: input.retry?.failover ?? true
|
|
82
|
+
},
|
|
83
|
+
timeout: {
|
|
84
|
+
llm: input.timeout?.llm ?? 3e4,
|
|
85
|
+
image: input.timeout?.image ?? 12e4,
|
|
86
|
+
video: input.timeout?.video ?? 3e5,
|
|
87
|
+
tts: input.timeout?.tts ?? 6e4
|
|
88
|
+
},
|
|
89
|
+
onUsage: input.onUsage
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// src/registry.ts
|
|
94
|
+
var Registry = class {
|
|
95
|
+
providers = /* @__PURE__ */ new Map();
|
|
96
|
+
modelCache = /* @__PURE__ */ new Map();
|
|
97
|
+
// providerId -> cached models
|
|
98
|
+
cacheTTLMs;
|
|
99
|
+
constructor(cacheTTLMinutes) {
|
|
100
|
+
this.cacheTTLMs = cacheTTLMinutes * 60 * 1e3;
|
|
101
|
+
}
|
|
102
|
+
addProvider(provider) {
|
|
103
|
+
this.providers.set(provider.id, provider);
|
|
104
|
+
}
|
|
105
|
+
getProvider(id) {
|
|
106
|
+
return this.providers.get(id);
|
|
107
|
+
}
|
|
108
|
+
getAllProviders() {
|
|
109
|
+
return Array.from(this.providers.values());
|
|
110
|
+
}
|
|
111
|
+
resolveProvider(modality, preferredId) {
|
|
112
|
+
if (preferredId) {
|
|
113
|
+
const p = this.providers.get(preferredId);
|
|
114
|
+
if (p && p.modalities.includes(modality)) return p;
|
|
115
|
+
return null;
|
|
116
|
+
}
|
|
117
|
+
let bestCloud = null;
|
|
118
|
+
for (const p of this.providers.values()) {
|
|
119
|
+
if (!p.modalities.includes(modality)) continue;
|
|
120
|
+
if (p.isLocal) return p;
|
|
121
|
+
if (!bestCloud) bestCloud = p;
|
|
122
|
+
}
|
|
123
|
+
return bestCloud;
|
|
124
|
+
}
|
|
125
|
+
resolveModel(modelId, modality) {
|
|
126
|
+
for (const [providerId, cached] of this.modelCache) {
|
|
127
|
+
const model = cached.models.find(
|
|
128
|
+
(m) => m.id === modelId && m.modality === modality
|
|
129
|
+
);
|
|
130
|
+
if (model) {
|
|
131
|
+
const provider = this.providers.get(providerId);
|
|
132
|
+
if (provider) return { provider, model };
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
return null;
|
|
136
|
+
}
|
|
137
|
+
getModels(modality) {
|
|
138
|
+
const all = [];
|
|
139
|
+
for (const cached of this.modelCache.values()) {
|
|
140
|
+
for (const model of cached.models) {
|
|
141
|
+
if (!modality || model.modality === modality) {
|
|
142
|
+
all.push(model);
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
return all;
|
|
147
|
+
}
|
|
148
|
+
getModel(provider, modelId) {
|
|
149
|
+
const cached = this.modelCache.get(provider);
|
|
150
|
+
return cached?.models.find((m) => m.id === modelId) ?? null;
|
|
151
|
+
}
|
|
152
|
+
async syncProvider(providerId) {
|
|
153
|
+
const provider = this.providers.get(providerId);
|
|
154
|
+
if (!provider) return 0;
|
|
155
|
+
const models = await provider.listModels();
|
|
156
|
+
this.modelCache.set(providerId, { models, syncedAt: Date.now() });
|
|
157
|
+
return models.length;
|
|
158
|
+
}
|
|
159
|
+
async syncAll() {
|
|
160
|
+
const byProvider = {};
|
|
161
|
+
const errors = [];
|
|
162
|
+
let synced = 0;
|
|
163
|
+
for (const provider of this.providers.values()) {
|
|
164
|
+
try {
|
|
165
|
+
const count = await this.syncProvider(provider.id);
|
|
166
|
+
byProvider[provider.id] = count;
|
|
167
|
+
synced += count;
|
|
168
|
+
} catch (err) {
|
|
169
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
170
|
+
errors.push(`${provider.id}: ${msg}`);
|
|
171
|
+
byProvider[provider.id] = 0;
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
return { synced, byProvider, errors };
|
|
175
|
+
}
|
|
176
|
+
isCacheStale(providerId) {
|
|
177
|
+
const cached = this.modelCache.get(providerId);
|
|
178
|
+
if (!cached) return true;
|
|
179
|
+
return Date.now() - cached.syncedAt > this.cacheTTLMs;
|
|
180
|
+
}
|
|
181
|
+
clearCache() {
|
|
182
|
+
this.modelCache.clear();
|
|
183
|
+
}
|
|
184
|
+
getProviderInfos(modality) {
|
|
185
|
+
const infos = [];
|
|
186
|
+
for (const provider of this.providers.values()) {
|
|
187
|
+
if (modality && !provider.modalities.includes(modality)) continue;
|
|
188
|
+
const cached = this.modelCache.get(provider.id);
|
|
189
|
+
infos.push({
|
|
190
|
+
id: provider.id,
|
|
191
|
+
name: provider.name,
|
|
192
|
+
modalities: provider.modalities,
|
|
193
|
+
local: provider.isLocal,
|
|
194
|
+
status: "online",
|
|
195
|
+
// ping-based status is set externally
|
|
196
|
+
modelCount: cached?.models.length ?? 0
|
|
197
|
+
});
|
|
198
|
+
}
|
|
199
|
+
return infos;
|
|
200
|
+
}
|
|
201
|
+
};
|
|
202
|
+
|
|
203
|
+
// src/tracking.ts
|
|
204
|
+
var UsageTracker = class {
|
|
205
|
+
events = [];
|
|
206
|
+
onUsage;
|
|
207
|
+
constructor(onUsage) {
|
|
208
|
+
this.onUsage = onUsage;
|
|
209
|
+
}
|
|
210
|
+
async record(event) {
|
|
211
|
+
this.events.push(event);
|
|
212
|
+
if (this.onUsage) {
|
|
213
|
+
await this.onUsage(event);
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
getSummary(options) {
|
|
217
|
+
let filtered = this.events;
|
|
218
|
+
if (options?.since) {
|
|
219
|
+
const since = new Date(options.since).getTime();
|
|
220
|
+
filtered = filtered.filter((e) => new Date(e.timestamp).getTime() >= since);
|
|
221
|
+
}
|
|
222
|
+
if (options?.until) {
|
|
223
|
+
const until = new Date(options.until).getTime();
|
|
224
|
+
filtered = filtered.filter((e) => new Date(e.timestamp).getTime() <= until);
|
|
225
|
+
}
|
|
226
|
+
if (options?.provider) {
|
|
227
|
+
filtered = filtered.filter((e) => e.provider === options.provider);
|
|
228
|
+
}
|
|
229
|
+
if (options?.modality) {
|
|
230
|
+
filtered = filtered.filter((e) => e.modality === options.modality);
|
|
231
|
+
}
|
|
232
|
+
const byProvider = {};
|
|
233
|
+
const byModality = { llm: 0, image: 0, video: 0, tts: 0 };
|
|
234
|
+
let totalCost = 0;
|
|
235
|
+
for (const event of filtered) {
|
|
236
|
+
totalCost += event.cost;
|
|
237
|
+
byProvider[event.provider] = (byProvider[event.provider] ?? 0) + event.cost;
|
|
238
|
+
byModality[event.modality] += event.cost;
|
|
239
|
+
}
|
|
240
|
+
return {
|
|
241
|
+
totalCost,
|
|
242
|
+
totalRequests: filtered.length,
|
|
243
|
+
byProvider,
|
|
244
|
+
byModality
|
|
245
|
+
};
|
|
246
|
+
}
|
|
247
|
+
clear() {
|
|
248
|
+
this.events = [];
|
|
249
|
+
}
|
|
250
|
+
};
|
|
251
|
+
|
|
252
|
+
// src/providers/pi-ai.ts
|
|
253
|
+
import { getModels, getProviders, complete, stream, setApiKey } from "@mariozechner/pi-ai";
|
|
254
|
+
var KNOWN_PROVIDERS = ["anthropic", "google", "openai", "xai", "groq", "cerebras", "openrouter", "zai"];
|
|
255
|
+
var LOCAL_PROVIDERS = /* @__PURE__ */ new Set(["ollama"]);
|
|
256
|
+
function extractText(msg) {
|
|
257
|
+
return msg.content.filter((c) => c.type === "text").map((c) => c.text).join("");
|
|
258
|
+
}
|
|
259
|
+
function extractThinking(msg) {
|
|
260
|
+
const thinking = msg.content.filter((c) => c.type === "thinking").map((c) => c.thinking).join("");
|
|
261
|
+
return thinking || void 0;
|
|
262
|
+
}
|
|
263
|
+
var PiAiProvider = class {
|
|
264
|
+
id = "pi-ai";
|
|
265
|
+
name = "pi-ai (LLM Gateway)";
|
|
266
|
+
modalities = ["llm"];
|
|
267
|
+
isLocal = false;
|
|
268
|
+
keys;
|
|
269
|
+
constructor(keys) {
|
|
270
|
+
this.keys = {};
|
|
271
|
+
for (const [k, v] of Object.entries(keys)) {
|
|
272
|
+
if (v) {
|
|
273
|
+
this.keys[k] = v;
|
|
274
|
+
if (KNOWN_PROVIDERS.includes(k)) {
|
|
275
|
+
setApiKey(k, v);
|
|
276
|
+
} else {
|
|
277
|
+
setApiKey(k, v);
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
async ping() {
|
|
283
|
+
try {
|
|
284
|
+
getProviders();
|
|
285
|
+
return true;
|
|
286
|
+
} catch {
|
|
287
|
+
return false;
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
async listModels(modality) {
|
|
291
|
+
if (modality && modality !== "llm") return [];
|
|
292
|
+
const models = [];
|
|
293
|
+
for (const provider of KNOWN_PROVIDERS) {
|
|
294
|
+
try {
|
|
295
|
+
const providerModels = getModels(provider);
|
|
296
|
+
for (const m of providerModels) {
|
|
297
|
+
models.push({
|
|
298
|
+
id: m.id,
|
|
299
|
+
provider: "pi-ai",
|
|
300
|
+
name: m.name || m.id,
|
|
301
|
+
modality: "llm",
|
|
302
|
+
local: LOCAL_PROVIDERS.has(String(m.provider)),
|
|
303
|
+
cost: {
|
|
304
|
+
price: m.cost.input ?? 0,
|
|
305
|
+
unit: m.cost.input > 0 ? "per_1m_tokens" : "free"
|
|
306
|
+
},
|
|
307
|
+
capabilities: {
|
|
308
|
+
contextWindow: m.contextWindow,
|
|
309
|
+
maxTokens: m.maxTokens,
|
|
310
|
+
supportsVision: m.input.includes("image"),
|
|
311
|
+
supportsStreaming: true
|
|
312
|
+
}
|
|
313
|
+
});
|
|
314
|
+
}
|
|
315
|
+
} catch {
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
return models;
|
|
319
|
+
}
|
|
320
|
+
async chat(options) {
|
|
321
|
+
const start = Date.now();
|
|
322
|
+
const { model, provider } = this.findModel(options.model);
|
|
323
|
+
if (!model || !provider) {
|
|
324
|
+
throw new Error(`Model not found: ${options.model ?? "default"}`);
|
|
325
|
+
}
|
|
326
|
+
const context = {
|
|
327
|
+
systemPrompt: options.messages.find((m) => m.role === "system")?.content,
|
|
328
|
+
messages: options.messages.filter((m) => m.role !== "system").map((m) => ({
|
|
329
|
+
role: m.role,
|
|
330
|
+
content: m.content,
|
|
331
|
+
timestamp: Date.now()
|
|
332
|
+
}))
|
|
333
|
+
};
|
|
334
|
+
const response = await complete(model, context);
|
|
335
|
+
const inputTokens = response.usage?.input ?? 0;
|
|
336
|
+
const outputTokens = response.usage?.output ?? 0;
|
|
337
|
+
return {
|
|
338
|
+
content: extractText(response),
|
|
339
|
+
thinking: extractThinking(response),
|
|
340
|
+
provider: "pi-ai",
|
|
341
|
+
model: response.model ?? options.model ?? "unknown",
|
|
342
|
+
modality: "llm",
|
|
343
|
+
latencyMs: Date.now() - start,
|
|
344
|
+
usage: {
|
|
345
|
+
cost: response.usage?.cost?.total ?? 0,
|
|
346
|
+
input: inputTokens,
|
|
347
|
+
output: outputTokens,
|
|
348
|
+
unit: "tokens"
|
|
349
|
+
}
|
|
350
|
+
};
|
|
351
|
+
}
|
|
352
|
+
stream(options) {
|
|
353
|
+
const start = Date.now();
|
|
354
|
+
const { model, provider } = this.findModel(options.model);
|
|
355
|
+
if (!model || !provider) {
|
|
356
|
+
throw new Error(`Model not found: ${options.model ?? "default"}`);
|
|
357
|
+
}
|
|
358
|
+
const context = {
|
|
359
|
+
systemPrompt: options.messages.find((m) => m.role === "system")?.content,
|
|
360
|
+
messages: options.messages.filter((m) => m.role !== "system").map((m) => ({
|
|
361
|
+
role: m.role,
|
|
362
|
+
content: m.content,
|
|
363
|
+
timestamp: Date.now()
|
|
364
|
+
}))
|
|
365
|
+
};
|
|
366
|
+
const piStream = stream(model, context);
|
|
367
|
+
const self = this;
|
|
368
|
+
let aborted = false;
|
|
369
|
+
let resolveResult = null;
|
|
370
|
+
let rejectResult = null;
|
|
371
|
+
const resultPromise = new Promise((resolve, reject) => {
|
|
372
|
+
resolveResult = resolve;
|
|
373
|
+
rejectResult = reject;
|
|
374
|
+
});
|
|
375
|
+
const asyncIterator = {
|
|
376
|
+
async *[Symbol.asyncIterator]() {
|
|
377
|
+
try {
|
|
378
|
+
for await (const chunk of piStream) {
|
|
379
|
+
if (aborted) break;
|
|
380
|
+
if (chunk.type === "text_delta") {
|
|
381
|
+
yield { type: "text_delta", delta: chunk.delta };
|
|
382
|
+
} else if (chunk.type === "thinking_delta") {
|
|
383
|
+
yield { type: "thinking_delta", delta: chunk.delta };
|
|
384
|
+
}
|
|
385
|
+
}
|
|
386
|
+
const final = await piStream.result();
|
|
387
|
+
const inputTokens = final.usage?.input ?? 0;
|
|
388
|
+
const outputTokens = final.usage?.output ?? 0;
|
|
389
|
+
const result = {
|
|
390
|
+
content: extractText(final),
|
|
391
|
+
thinking: extractThinking(final),
|
|
392
|
+
provider: "pi-ai",
|
|
393
|
+
model: final.model ?? options.model ?? "unknown",
|
|
394
|
+
modality: "llm",
|
|
395
|
+
latencyMs: Date.now() - start,
|
|
396
|
+
usage: {
|
|
397
|
+
cost: final.usage?.cost?.total ?? 0,
|
|
398
|
+
input: inputTokens,
|
|
399
|
+
output: outputTokens,
|
|
400
|
+
unit: "tokens"
|
|
401
|
+
}
|
|
402
|
+
};
|
|
403
|
+
resolveResult?.(result);
|
|
404
|
+
yield { type: "done", result };
|
|
405
|
+
} catch (err) {
|
|
406
|
+
const error = err instanceof Error ? err : new Error(String(err));
|
|
407
|
+
rejectResult?.(error);
|
|
408
|
+
yield { type: "error", error };
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
};
|
|
412
|
+
return {
|
|
413
|
+
[Symbol.asyncIterator]: () => asyncIterator[Symbol.asyncIterator](),
|
|
414
|
+
result: () => resultPromise,
|
|
415
|
+
abort: () => {
|
|
416
|
+
aborted = true;
|
|
417
|
+
}
|
|
418
|
+
};
|
|
419
|
+
}
|
|
420
|
+
findModel(modelId) {
|
|
421
|
+
for (const provider of KNOWN_PROVIDERS) {
|
|
422
|
+
try {
|
|
423
|
+
const models = getModels(provider);
|
|
424
|
+
const found = modelId ? models.find((m) => m.id === modelId) : models[0];
|
|
425
|
+
if (found) return { model: found, provider };
|
|
426
|
+
} catch {
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
return { model: null, provider: null };
|
|
430
|
+
}
|
|
431
|
+
};
|
|
432
|
+
|
|
433
|
+
// src/providers/fal.ts
|
|
434
|
+
import { fal } from "@fal-ai/client";
|
|
435
|
+
var FAL_PRICING_URL = "https://api.fal.ai/v1/models/pricing";
|
|
436
|
+
var FalProvider = class {
|
|
437
|
+
id = "fal";
|
|
438
|
+
name = "fal.ai";
|
|
439
|
+
modalities = ["image", "video", "tts"];
|
|
440
|
+
isLocal = false;
|
|
441
|
+
apiKey;
|
|
442
|
+
pricingCache = /* @__PURE__ */ new Map();
|
|
443
|
+
constructor(apiKey) {
|
|
444
|
+
this.apiKey = apiKey;
|
|
445
|
+
fal.config({ credentials: apiKey });
|
|
446
|
+
}
|
|
447
|
+
async ping() {
|
|
448
|
+
return !!this.apiKey;
|
|
449
|
+
}
|
|
450
|
+
async listModels(modality) {
|
|
451
|
+
try {
|
|
452
|
+
const res = await fetch(FAL_PRICING_URL, {
|
|
453
|
+
headers: { Authorization: `Key ${this.apiKey}` }
|
|
454
|
+
});
|
|
455
|
+
if (!res.ok) return [];
|
|
456
|
+
const data = await res.json();
|
|
457
|
+
this.pricingCache.clear();
|
|
458
|
+
const models = [];
|
|
459
|
+
for (const entry of data) {
|
|
460
|
+
const inferredModality = this.inferModality(entry.modelId, entry.unit);
|
|
461
|
+
if (modality && inferredModality !== modality) continue;
|
|
462
|
+
this.pricingCache.set(entry.modelId, { price: entry.price, unit: entry.unit });
|
|
463
|
+
models.push({
|
|
464
|
+
id: entry.modelId,
|
|
465
|
+
provider: "fal",
|
|
466
|
+
name: entry.modelId.replace("fal-ai/", ""),
|
|
467
|
+
modality: inferredModality,
|
|
468
|
+
local: false,
|
|
469
|
+
cost: { price: entry.price, unit: entry.unit }
|
|
470
|
+
});
|
|
471
|
+
}
|
|
472
|
+
return models;
|
|
473
|
+
} catch {
|
|
474
|
+
return [];
|
|
475
|
+
}
|
|
476
|
+
}
|
|
477
|
+
async image(options) {
|
|
478
|
+
const model = options.model ?? "fal-ai/flux/schnell";
|
|
479
|
+
const start = Date.now();
|
|
480
|
+
const response = await fal.subscribe(model, {
|
|
481
|
+
input: {
|
|
482
|
+
prompt: options.prompt,
|
|
483
|
+
negative_prompt: options.negativePrompt,
|
|
484
|
+
image_size: options.width && options.height ? { width: options.width, height: options.height } : void 0,
|
|
485
|
+
seed: options.seed,
|
|
486
|
+
num_inference_steps: options.steps,
|
|
487
|
+
guidance_scale: options.guidanceScale
|
|
488
|
+
}
|
|
489
|
+
});
|
|
490
|
+
const image = response.data?.images?.[0];
|
|
491
|
+
const pricing = this.pricingCache.get(model);
|
|
492
|
+
return {
|
|
493
|
+
url: image?.url,
|
|
494
|
+
provider: "fal",
|
|
495
|
+
model,
|
|
496
|
+
modality: "image",
|
|
497
|
+
latencyMs: Date.now() - start,
|
|
498
|
+
usage: {
|
|
499
|
+
cost: pricing?.price ?? 0,
|
|
500
|
+
unit: pricing?.unit ?? "per_image"
|
|
501
|
+
},
|
|
502
|
+
media: {
|
|
503
|
+
width: image?.width,
|
|
504
|
+
height: image?.height,
|
|
505
|
+
format: "png"
|
|
506
|
+
}
|
|
507
|
+
};
|
|
508
|
+
}
|
|
509
|
+
async video(options) {
|
|
510
|
+
const model = options.model ?? "fal-ai/kling-video/v2/master/text-to-video";
|
|
511
|
+
const start = Date.now();
|
|
512
|
+
const response = await fal.subscribe(model, {
|
|
513
|
+
input: {
|
|
514
|
+
prompt: options.prompt,
|
|
515
|
+
image_url: options.imageUrl,
|
|
516
|
+
duration: options.duration,
|
|
517
|
+
fps: options.fps
|
|
518
|
+
}
|
|
519
|
+
});
|
|
520
|
+
const video = response.data?.video;
|
|
521
|
+
const pricing = this.pricingCache.get(model);
|
|
522
|
+
return {
|
|
523
|
+
url: video?.url ?? response.data?.video_url,
|
|
524
|
+
provider: "fal",
|
|
525
|
+
model,
|
|
526
|
+
modality: "video",
|
|
527
|
+
latencyMs: Date.now() - start,
|
|
528
|
+
usage: {
|
|
529
|
+
cost: pricing?.price ?? 0,
|
|
530
|
+
unit: pricing?.unit ?? "per_second"
|
|
531
|
+
},
|
|
532
|
+
media: {
|
|
533
|
+
width: options.width,
|
|
534
|
+
height: options.height,
|
|
535
|
+
duration: options.duration,
|
|
536
|
+
format: "mp4",
|
|
537
|
+
fps: options.fps
|
|
538
|
+
}
|
|
539
|
+
};
|
|
540
|
+
}
|
|
541
|
+
async speak(options) {
|
|
542
|
+
const model = options.model ?? "fal-ai/kokoro/american-english";
|
|
543
|
+
const start = Date.now();
|
|
544
|
+
const response = await fal.run(model, {
|
|
545
|
+
input: {
|
|
546
|
+
text: options.text,
|
|
547
|
+
voice: options.voice,
|
|
548
|
+
speed: options.speed
|
|
549
|
+
}
|
|
550
|
+
});
|
|
551
|
+
const audioUrl = response.data?.audio_url ?? response.data?.audio?.url;
|
|
552
|
+
const pricing = this.pricingCache.get(model);
|
|
553
|
+
return {
|
|
554
|
+
url: audioUrl,
|
|
555
|
+
provider: "fal",
|
|
556
|
+
model,
|
|
557
|
+
modality: "tts",
|
|
558
|
+
latencyMs: Date.now() - start,
|
|
559
|
+
usage: {
|
|
560
|
+
cost: pricing?.price ?? 0,
|
|
561
|
+
input: options.text.length,
|
|
562
|
+
unit: pricing?.unit ?? "per_1k_chars"
|
|
563
|
+
},
|
|
564
|
+
media: {
|
|
565
|
+
format: options.format ?? "mp3"
|
|
566
|
+
}
|
|
567
|
+
};
|
|
568
|
+
}
|
|
569
|
+
inferModality(modelId, unit) {
|
|
570
|
+
if (unit.includes("char") || modelId.includes("tts") || modelId.includes("kokoro") || modelId.includes("elevenlabs")) return "tts";
|
|
571
|
+
if (unit.includes("second") || modelId.includes("video") || modelId.includes("kling") || modelId.includes("sora") || modelId.includes("veo")) return "video";
|
|
572
|
+
return "image";
|
|
573
|
+
}
|
|
574
|
+
};
|
|
575
|
+
|
|
576
|
+
// src/providers/comfyui.ts
|
|
577
|
+
var DEFAULT_TXT2IMG_WORKFLOW = {
|
|
578
|
+
"3": {
|
|
579
|
+
class_type: "KSampler",
|
|
580
|
+
inputs: {
|
|
581
|
+
seed: 0,
|
|
582
|
+
steps: 20,
|
|
583
|
+
cfg: 7,
|
|
584
|
+
sampler_name: "euler",
|
|
585
|
+
scheduler: "normal",
|
|
586
|
+
denoise: 1,
|
|
587
|
+
model: ["4", 0],
|
|
588
|
+
positive: ["6", 0],
|
|
589
|
+
negative: ["7", 0],
|
|
590
|
+
latent_image: ["5", 0]
|
|
591
|
+
}
|
|
592
|
+
},
|
|
593
|
+
"4": { class_type: "CheckpointLoaderSimple", inputs: { ckpt_name: "sd_xl_base_1.0.safetensors" } },
|
|
594
|
+
"5": { class_type: "EmptyLatentImage", inputs: { width: 1024, height: 1024, batch_size: 1 } },
|
|
595
|
+
"6": { class_type: "CLIPTextEncode", inputs: { text: "", clip: ["4", 1] } },
|
|
596
|
+
"7": { class_type: "CLIPTextEncode", inputs: { text: "", clip: ["4", 1] } },
|
|
597
|
+
"8": { class_type: "VAEDecode", inputs: { samples: ["3", 0], vae: ["4", 2] } },
|
|
598
|
+
"9": { class_type: "SaveImage", inputs: { filename_prefix: "noosphere", images: ["8", 0] } }
|
|
599
|
+
};
|
|
600
|
+
var ComfyUIProvider = class {
|
|
601
|
+
id = "comfyui";
|
|
602
|
+
name = "ComfyUI";
|
|
603
|
+
modalities = ["image", "video"];
|
|
604
|
+
isLocal = true;
|
|
605
|
+
baseUrl;
|
|
606
|
+
constructor(config) {
|
|
607
|
+
this.baseUrl = `${config.host}:${config.port}`;
|
|
608
|
+
}
|
|
609
|
+
async ping() {
|
|
610
|
+
try {
|
|
611
|
+
const res = await fetch(`${this.baseUrl}/system_stats`);
|
|
612
|
+
return res.ok;
|
|
613
|
+
} catch {
|
|
614
|
+
return false;
|
|
615
|
+
}
|
|
616
|
+
}
|
|
617
|
+
async listModels(modality) {
|
|
618
|
+
try {
|
|
619
|
+
const res = await fetch(`${this.baseUrl}/object_info`);
|
|
620
|
+
if (!res.ok) return [];
|
|
621
|
+
const models = [];
|
|
622
|
+
if (!modality || modality === "image") {
|
|
623
|
+
models.push({
|
|
624
|
+
id: "comfyui-txt2img",
|
|
625
|
+
provider: "comfyui",
|
|
626
|
+
name: "ComfyUI Text-to-Image",
|
|
627
|
+
modality: "image",
|
|
628
|
+
local: true,
|
|
629
|
+
cost: { price: 0, unit: "free" },
|
|
630
|
+
capabilities: { maxWidth: 2048, maxHeight: 2048, supportsNegativePrompt: true }
|
|
631
|
+
});
|
|
632
|
+
}
|
|
633
|
+
if (!modality || modality === "video") {
|
|
634
|
+
models.push({
|
|
635
|
+
id: "comfyui-txt2vid",
|
|
636
|
+
provider: "comfyui",
|
|
637
|
+
name: "ComfyUI Text-to-Video",
|
|
638
|
+
modality: "video",
|
|
639
|
+
local: true,
|
|
640
|
+
cost: { price: 0, unit: "free" },
|
|
641
|
+
capabilities: { maxDuration: 10, supportsImageToVideo: true }
|
|
642
|
+
});
|
|
643
|
+
}
|
|
644
|
+
return models;
|
|
645
|
+
} catch {
|
|
646
|
+
return [];
|
|
647
|
+
}
|
|
648
|
+
}
|
|
649
|
+
async image(options) {
|
|
650
|
+
const start = Date.now();
|
|
651
|
+
const workflow = structuredClone(DEFAULT_TXT2IMG_WORKFLOW);
|
|
652
|
+
workflow["6"].inputs.text = options.prompt;
|
|
653
|
+
workflow["7"].inputs.text = options.negativePrompt ?? "";
|
|
654
|
+
workflow["5"].inputs.width = options.width ?? 1024;
|
|
655
|
+
workflow["5"].inputs.height = options.height ?? 1024;
|
|
656
|
+
if (options.seed !== void 0) workflow["3"].inputs.seed = options.seed;
|
|
657
|
+
if (options.steps !== void 0) workflow["3"].inputs.steps = options.steps;
|
|
658
|
+
if (options.guidanceScale !== void 0) workflow["3"].inputs.cfg = options.guidanceScale;
|
|
659
|
+
const queueRes = await fetch(`${this.baseUrl}/prompt`, {
|
|
660
|
+
method: "POST",
|
|
661
|
+
headers: { "Content-Type": "application/json" },
|
|
662
|
+
body: JSON.stringify({ prompt: workflow })
|
|
663
|
+
});
|
|
664
|
+
if (!queueRes.ok) throw new Error(`ComfyUI queue failed: ${queueRes.status}`);
|
|
665
|
+
const { prompt_id } = await queueRes.json();
|
|
666
|
+
const imageData = await this.pollForResult(prompt_id);
|
|
667
|
+
return {
|
|
668
|
+
buffer: Buffer.from(imageData),
|
|
669
|
+
provider: "comfyui",
|
|
670
|
+
model: options.model ?? "comfyui-txt2img",
|
|
671
|
+
modality: "image",
|
|
672
|
+
latencyMs: Date.now() - start,
|
|
673
|
+
usage: { cost: 0, unit: "free" },
|
|
674
|
+
media: {
|
|
675
|
+
width: options.width ?? 1024,
|
|
676
|
+
height: options.height ?? 1024,
|
|
677
|
+
format: "png"
|
|
678
|
+
}
|
|
679
|
+
};
|
|
680
|
+
}
|
|
681
|
+
async video(_options) {
|
|
682
|
+
throw new Error("ComfyUI video generation requires a configured AnimateDiff workflow");
|
|
683
|
+
}
|
|
684
|
+
async pollForResult(promptId, maxWaitMs = 3e5) {
|
|
685
|
+
const deadline = Date.now() + maxWaitMs;
|
|
686
|
+
while (Date.now() < deadline) {
|
|
687
|
+
const res = await fetch(`${this.baseUrl}/history/${promptId}`);
|
|
688
|
+
if (!res.ok) {
|
|
689
|
+
await new Promise((r) => setTimeout(r, 1e3));
|
|
690
|
+
continue;
|
|
691
|
+
}
|
|
692
|
+
const history = await res.json();
|
|
693
|
+
const entry = history[promptId];
|
|
694
|
+
if (!entry?.outputs) {
|
|
695
|
+
await new Promise((r) => setTimeout(r, 1e3));
|
|
696
|
+
continue;
|
|
697
|
+
}
|
|
698
|
+
for (const nodeOutput of Object.values(entry.outputs)) {
|
|
699
|
+
if (nodeOutput.images?.length > 0) {
|
|
700
|
+
const img = nodeOutput.images[0];
|
|
701
|
+
const imgRes = await fetch(
|
|
702
|
+
`${this.baseUrl}/view?filename=${img.filename}&subfolder=${img.subfolder}&type=${img.type}`
|
|
703
|
+
);
|
|
704
|
+
return imgRes.arrayBuffer();
|
|
705
|
+
}
|
|
706
|
+
}
|
|
707
|
+
await new Promise((r) => setTimeout(r, 1e3));
|
|
708
|
+
}
|
|
709
|
+
throw new Error(`ComfyUI generation timed out after ${maxWaitMs}ms`);
|
|
710
|
+
}
|
|
711
|
+
};
|
|
712
|
+
|
|
713
|
+
// src/providers/local-tts.ts
|
|
714
|
+
var LocalTTSProvider = class {
|
|
715
|
+
id;
|
|
716
|
+
name;
|
|
717
|
+
modalities = ["tts"];
|
|
718
|
+
isLocal = true;
|
|
719
|
+
baseUrl;
|
|
720
|
+
constructor(config) {
|
|
721
|
+
this.id = config.id;
|
|
722
|
+
this.name = config.name;
|
|
723
|
+
this.baseUrl = `${config.host}:${config.port}`;
|
|
724
|
+
}
|
|
725
|
+
async ping() {
|
|
726
|
+
try {
|
|
727
|
+
const res = await fetch(`${this.baseUrl}/health`);
|
|
728
|
+
return res.ok;
|
|
729
|
+
} catch {
|
|
730
|
+
return false;
|
|
731
|
+
}
|
|
732
|
+
}
|
|
733
|
+
async listModels(modality) {
|
|
734
|
+
if (modality && modality !== "tts") return [];
|
|
735
|
+
try {
|
|
736
|
+
let voices = [];
|
|
737
|
+
try {
|
|
738
|
+
const res = await fetch(`${this.baseUrl}/voices`);
|
|
739
|
+
if (res.ok) {
|
|
740
|
+
const data = await res.json();
|
|
741
|
+
if (Array.isArray(data)) {
|
|
742
|
+
voices = data;
|
|
743
|
+
}
|
|
744
|
+
}
|
|
745
|
+
} catch {
|
|
746
|
+
const res = await fetch(`${this.baseUrl}/v1/models`);
|
|
747
|
+
if (res.ok) {
|
|
748
|
+
const data = await res.json();
|
|
749
|
+
voices = data.data ?? [];
|
|
750
|
+
}
|
|
751
|
+
}
|
|
752
|
+
return voices.map((v) => ({
|
|
753
|
+
id: v.id,
|
|
754
|
+
provider: this.id,
|
|
755
|
+
name: v.name ?? v.id,
|
|
756
|
+
modality: "tts",
|
|
757
|
+
local: true,
|
|
758
|
+
cost: { price: 0, unit: "free" },
|
|
759
|
+
capabilities: { voices: voices.map((vv) => vv.id) }
|
|
760
|
+
}));
|
|
761
|
+
} catch {
|
|
762
|
+
return [];
|
|
763
|
+
}
|
|
764
|
+
}
|
|
765
|
+
async speak(options) {
|
|
766
|
+
const start = Date.now();
|
|
767
|
+
const res = await fetch(`${this.baseUrl}/v1/audio/speech`, {
|
|
768
|
+
method: "POST",
|
|
769
|
+
headers: { "Content-Type": "application/json" },
|
|
770
|
+
body: JSON.stringify({
|
|
771
|
+
model: options.model ?? "tts-1",
|
|
772
|
+
input: options.text,
|
|
773
|
+
voice: options.voice ?? "default",
|
|
774
|
+
speed: options.speed ?? 1,
|
|
775
|
+
response_format: options.format ?? "mp3"
|
|
776
|
+
})
|
|
777
|
+
});
|
|
778
|
+
if (!res.ok) {
|
|
779
|
+
throw new Error(`Local TTS failed: ${res.status} ${await res.text()}`);
|
|
780
|
+
}
|
|
781
|
+
const audioBuffer = Buffer.from(await res.arrayBuffer());
|
|
782
|
+
return {
|
|
783
|
+
buffer: audioBuffer,
|
|
784
|
+
provider: this.id,
|
|
785
|
+
model: options.model ?? options.voice ?? "default",
|
|
786
|
+
modality: "tts",
|
|
787
|
+
latencyMs: Date.now() - start,
|
|
788
|
+
usage: {
|
|
789
|
+
cost: 0,
|
|
790
|
+
input: options.text.length,
|
|
791
|
+
unit: "characters"
|
|
792
|
+
},
|
|
793
|
+
media: {
|
|
794
|
+
format: options.format ?? "mp3"
|
|
795
|
+
}
|
|
796
|
+
};
|
|
797
|
+
}
|
|
798
|
+
};
|
|
799
|
+
|
|
800
|
+
// src/providers/huggingface.ts
|
|
801
|
+
import { HfInference } from "@huggingface/inference";
|
|
802
|
+
var HuggingFaceProvider = class {
|
|
803
|
+
id = "huggingface";
|
|
804
|
+
name = "HuggingFace Inference";
|
|
805
|
+
modalities = ["image", "tts", "llm"];
|
|
806
|
+
isLocal = false;
|
|
807
|
+
client;
|
|
808
|
+
constructor(token) {
|
|
809
|
+
this.client = new HfInference(token);
|
|
810
|
+
}
|
|
811
|
+
async ping() {
|
|
812
|
+
return true;
|
|
813
|
+
}
|
|
814
|
+
async listModels(modality) {
|
|
815
|
+
const models = [];
|
|
816
|
+
if (!modality || modality === "image") {
|
|
817
|
+
models.push({
|
|
818
|
+
id: "stabilityai/stable-diffusion-xl-base-1.0",
|
|
819
|
+
provider: "huggingface",
|
|
820
|
+
name: "SDXL Base",
|
|
821
|
+
modality: "image",
|
|
822
|
+
local: false,
|
|
823
|
+
cost: { price: 0, unit: "free" }
|
|
824
|
+
});
|
|
825
|
+
}
|
|
826
|
+
if (!modality || modality === "tts") {
|
|
827
|
+
models.push({
|
|
828
|
+
id: "facebook/mms-tts-eng",
|
|
829
|
+
provider: "huggingface",
|
|
830
|
+
name: "MMS TTS English",
|
|
831
|
+
modality: "tts",
|
|
832
|
+
local: false,
|
|
833
|
+
cost: { price: 0, unit: "free" }
|
|
834
|
+
});
|
|
835
|
+
}
|
|
836
|
+
if (!modality || modality === "llm") {
|
|
837
|
+
models.push({
|
|
838
|
+
id: "meta-llama/Llama-3.1-8B-Instruct",
|
|
839
|
+
provider: "huggingface",
|
|
840
|
+
name: "Llama 3.1 8B",
|
|
841
|
+
modality: "llm",
|
|
842
|
+
local: false,
|
|
843
|
+
cost: { price: 0, unit: "free" }
|
|
844
|
+
});
|
|
845
|
+
}
|
|
846
|
+
return models;
|
|
847
|
+
}
|
|
848
|
+
async chat(options) {
|
|
849
|
+
const start = Date.now();
|
|
850
|
+
const model = options.model ?? "meta-llama/Llama-3.1-8B-Instruct";
|
|
851
|
+
const response = await this.client.chatCompletion({
|
|
852
|
+
model,
|
|
853
|
+
messages: options.messages,
|
|
854
|
+
temperature: options.temperature,
|
|
855
|
+
max_tokens: options.maxTokens
|
|
856
|
+
});
|
|
857
|
+
const choice = response.choices?.[0];
|
|
858
|
+
const usage = response.usage;
|
|
859
|
+
return {
|
|
860
|
+
content: choice?.message?.content ?? "",
|
|
861
|
+
provider: "huggingface",
|
|
862
|
+
model,
|
|
863
|
+
modality: "llm",
|
|
864
|
+
latencyMs: Date.now() - start,
|
|
865
|
+
usage: {
|
|
866
|
+
cost: 0,
|
|
867
|
+
input: usage?.prompt_tokens,
|
|
868
|
+
output: usage?.completion_tokens,
|
|
869
|
+
unit: "tokens"
|
|
870
|
+
}
|
|
871
|
+
};
|
|
872
|
+
}
|
|
873
|
+
async image(options) {
|
|
874
|
+
const start = Date.now();
|
|
875
|
+
const model = options.model ?? "stabilityai/stable-diffusion-xl-base-1.0";
|
|
876
|
+
const blob = await this.client.textToImage({
|
|
877
|
+
model,
|
|
878
|
+
inputs: options.prompt,
|
|
879
|
+
parameters: {
|
|
880
|
+
negative_prompt: options.negativePrompt,
|
|
881
|
+
width: options.width,
|
|
882
|
+
height: options.height,
|
|
883
|
+
guidance_scale: options.guidanceScale,
|
|
884
|
+
num_inference_steps: options.steps
|
|
885
|
+
}
|
|
886
|
+
}, { outputType: "blob" });
|
|
887
|
+
const buffer = Buffer.from(await blob.arrayBuffer());
|
|
888
|
+
return {
|
|
889
|
+
buffer,
|
|
890
|
+
provider: "huggingface",
|
|
891
|
+
model,
|
|
892
|
+
modality: "image",
|
|
893
|
+
latencyMs: Date.now() - start,
|
|
894
|
+
usage: { cost: 0, unit: "free" },
|
|
895
|
+
media: {
|
|
896
|
+
width: options.width ?? 1024,
|
|
897
|
+
height: options.height ?? 1024,
|
|
898
|
+
format: "png"
|
|
899
|
+
}
|
|
900
|
+
};
|
|
901
|
+
}
|
|
902
|
+
async speak(options) {
|
|
903
|
+
const start = Date.now();
|
|
904
|
+
const model = options.model ?? "facebook/mms-tts-eng";
|
|
905
|
+
const blob = await this.client.textToSpeech({
|
|
906
|
+
model,
|
|
907
|
+
inputs: options.text
|
|
908
|
+
});
|
|
909
|
+
const buffer = Buffer.from(await blob.arrayBuffer());
|
|
910
|
+
return {
|
|
911
|
+
buffer,
|
|
912
|
+
provider: "huggingface",
|
|
913
|
+
model,
|
|
914
|
+
modality: "tts",
|
|
915
|
+
latencyMs: Date.now() - start,
|
|
916
|
+
usage: {
|
|
917
|
+
cost: 0,
|
|
918
|
+
input: options.text.length,
|
|
919
|
+
unit: "characters"
|
|
920
|
+
},
|
|
921
|
+
media: { format: "wav" }
|
|
922
|
+
};
|
|
923
|
+
}
|
|
924
|
+
};
|
|
925
|
+
|
|
926
|
+
// src/noosphere.ts
|
|
927
|
+
var Noosphere = class {
|
|
928
|
+
config;
|
|
929
|
+
registry;
|
|
930
|
+
tracker;
|
|
931
|
+
initialized = false;
|
|
932
|
+
constructor(config = {}) {
|
|
933
|
+
this.config = resolveConfig(config);
|
|
934
|
+
this.registry = new Registry(this.config.discoveryCacheTTL);
|
|
935
|
+
this.tracker = new UsageTracker(this.config.onUsage);
|
|
936
|
+
}
|
|
937
|
+
/** Register a custom provider adapter */
|
|
938
|
+
registerProvider(provider) {
|
|
939
|
+
this.registry.addProvider(provider);
|
|
940
|
+
}
|
|
941
|
+
// --- Generation Methods ---
|
|
942
|
+
async chat(options) {
|
|
943
|
+
if (!this.initialized) await this.init();
|
|
944
|
+
const provider = this.resolveProviderForModality("llm", options.provider, options.model);
|
|
945
|
+
if (!provider.chat) {
|
|
946
|
+
throw new NoosphereError(`Provider '${provider.id}' does not support chat`, {
|
|
947
|
+
code: "INVALID_INPUT",
|
|
948
|
+
provider: provider.id,
|
|
949
|
+
modality: "llm"
|
|
950
|
+
});
|
|
951
|
+
}
|
|
952
|
+
const start = Date.now();
|
|
953
|
+
try {
|
|
954
|
+
const result = await this.executeWithRetry(
|
|
955
|
+
"llm",
|
|
956
|
+
provider,
|
|
957
|
+
() => provider.chat(options),
|
|
958
|
+
(alt) => alt.chat ? () => alt.chat(options) : null
|
|
959
|
+
);
|
|
960
|
+
await this.trackUsage(result, options.metadata);
|
|
961
|
+
return result;
|
|
962
|
+
} catch (err) {
|
|
963
|
+
await this.trackError("llm", provider.id, options.model, start, err, options.metadata);
|
|
964
|
+
throw err;
|
|
965
|
+
}
|
|
966
|
+
}
|
|
967
|
+
stream(options) {
|
|
968
|
+
const self = this;
|
|
969
|
+
let innerStream;
|
|
970
|
+
let finalResult;
|
|
971
|
+
let providerRef;
|
|
972
|
+
const ensureInit = async () => {
|
|
973
|
+
if (!self.initialized) await self.init();
|
|
974
|
+
if (!providerRef) {
|
|
975
|
+
providerRef = self.resolveProviderForModality("llm", options.provider, options.model);
|
|
976
|
+
if (!providerRef.stream) {
|
|
977
|
+
throw new NoosphereError(`Provider '${providerRef.id}' does not support streaming`, {
|
|
978
|
+
code: "INVALID_INPUT",
|
|
979
|
+
provider: providerRef.id,
|
|
980
|
+
modality: "llm"
|
|
981
|
+
});
|
|
982
|
+
}
|
|
983
|
+
innerStream = providerRef.stream(options);
|
|
984
|
+
}
|
|
985
|
+
};
|
|
986
|
+
const wrappedIterator = {
|
|
987
|
+
async *[Symbol.asyncIterator]() {
|
|
988
|
+
await ensureInit();
|
|
989
|
+
try {
|
|
990
|
+
for await (const event of innerStream) {
|
|
991
|
+
if (event.type === "done" && event.result) {
|
|
992
|
+
finalResult = event.result;
|
|
993
|
+
await self.trackUsage(event.result, options.metadata);
|
|
994
|
+
}
|
|
995
|
+
yield event;
|
|
996
|
+
}
|
|
997
|
+
} catch (err) {
|
|
998
|
+
await self.trackError("llm", providerRef.id, options.model, Date.now(), err, options.metadata);
|
|
999
|
+
throw err;
|
|
1000
|
+
}
|
|
1001
|
+
}
|
|
1002
|
+
};
|
|
1003
|
+
return {
|
|
1004
|
+
[Symbol.asyncIterator]: () => wrappedIterator[Symbol.asyncIterator](),
|
|
1005
|
+
result: async () => {
|
|
1006
|
+
if (finalResult) return finalResult;
|
|
1007
|
+
for await (const event of wrappedIterator) {
|
|
1008
|
+
if (event.type === "done" && event.result) return event.result;
|
|
1009
|
+
if (event.type === "error" && event.error) throw event.error;
|
|
1010
|
+
}
|
|
1011
|
+
throw new NoosphereError("Stream ended without result", {
|
|
1012
|
+
code: "GENERATION_FAILED",
|
|
1013
|
+
provider: providerRef?.id ?? "unknown",
|
|
1014
|
+
modality: "llm"
|
|
1015
|
+
});
|
|
1016
|
+
},
|
|
1017
|
+
abort: () => innerStream?.abort()
|
|
1018
|
+
};
|
|
1019
|
+
}
|
|
1020
|
+
async image(options) {
|
|
1021
|
+
if (!this.initialized) await this.init();
|
|
1022
|
+
const provider = this.resolveProviderForModality("image", options.provider, options.model);
|
|
1023
|
+
if (!provider.image) {
|
|
1024
|
+
throw new NoosphereError(`Provider '${provider.id}' does not support image generation`, {
|
|
1025
|
+
code: "INVALID_INPUT",
|
|
1026
|
+
provider: provider.id,
|
|
1027
|
+
modality: "image"
|
|
1028
|
+
});
|
|
1029
|
+
}
|
|
1030
|
+
const start = Date.now();
|
|
1031
|
+
try {
|
|
1032
|
+
const result = await this.executeWithRetry(
|
|
1033
|
+
"image",
|
|
1034
|
+
provider,
|
|
1035
|
+
() => provider.image(options),
|
|
1036
|
+
(alt) => alt.image ? () => alt.image(options) : null
|
|
1037
|
+
);
|
|
1038
|
+
await this.trackUsage(result, options.metadata);
|
|
1039
|
+
return result;
|
|
1040
|
+
} catch (err) {
|
|
1041
|
+
await this.trackError("image", provider.id, options.model, start, err, options.metadata);
|
|
1042
|
+
throw err;
|
|
1043
|
+
}
|
|
1044
|
+
}
|
|
1045
|
+
async video(options) {
|
|
1046
|
+
if (!this.initialized) await this.init();
|
|
1047
|
+
const provider = this.resolveProviderForModality("video", options.provider, options.model);
|
|
1048
|
+
if (!provider.video) {
|
|
1049
|
+
throw new NoosphereError(`Provider '${provider.id}' does not support video generation`, {
|
|
1050
|
+
code: "INVALID_INPUT",
|
|
1051
|
+
provider: provider.id,
|
|
1052
|
+
modality: "video"
|
|
1053
|
+
});
|
|
1054
|
+
}
|
|
1055
|
+
const start = Date.now();
|
|
1056
|
+
try {
|
|
1057
|
+
const result = await this.executeWithRetry(
|
|
1058
|
+
"video",
|
|
1059
|
+
provider,
|
|
1060
|
+
() => provider.video(options),
|
|
1061
|
+
(alt) => alt.video ? () => alt.video(options) : null
|
|
1062
|
+
);
|
|
1063
|
+
await this.trackUsage(result, options.metadata);
|
|
1064
|
+
return result;
|
|
1065
|
+
} catch (err) {
|
|
1066
|
+
await this.trackError("video", provider.id, options.model, start, err, options.metadata);
|
|
1067
|
+
throw err;
|
|
1068
|
+
}
|
|
1069
|
+
}
|
|
1070
|
+
async speak(options) {
|
|
1071
|
+
if (!this.initialized) await this.init();
|
|
1072
|
+
const provider = this.resolveProviderForModality("tts", options.provider, options.model);
|
|
1073
|
+
if (!provider.speak) {
|
|
1074
|
+
throw new NoosphereError(`Provider '${provider.id}' does not support TTS`, {
|
|
1075
|
+
code: "INVALID_INPUT",
|
|
1076
|
+
provider: provider.id,
|
|
1077
|
+
modality: "tts"
|
|
1078
|
+
});
|
|
1079
|
+
}
|
|
1080
|
+
const start = Date.now();
|
|
1081
|
+
try {
|
|
1082
|
+
const result = await this.executeWithRetry(
|
|
1083
|
+
"tts",
|
|
1084
|
+
provider,
|
|
1085
|
+
() => provider.speak(options),
|
|
1086
|
+
(alt) => alt.speak ? () => alt.speak(options) : null
|
|
1087
|
+
);
|
|
1088
|
+
await this.trackUsage(result, options.metadata);
|
|
1089
|
+
return result;
|
|
1090
|
+
} catch (err) {
|
|
1091
|
+
await this.trackError("tts", provider.id, options.model, start, err, options.metadata);
|
|
1092
|
+
throw err;
|
|
1093
|
+
}
|
|
1094
|
+
}
|
|
1095
|
+
// --- Discovery Methods ---
|
|
1096
|
+
async getProviders(modality) {
|
|
1097
|
+
if (!this.initialized) await this.init();
|
|
1098
|
+
return this.registry.getProviderInfos(modality);
|
|
1099
|
+
}
|
|
1100
|
+
async getModels(modality) {
|
|
1101
|
+
if (!this.initialized) await this.init();
|
|
1102
|
+
return this.registry.getModels(modality);
|
|
1103
|
+
}
|
|
1104
|
+
async getModel(provider, modelId) {
|
|
1105
|
+
if (!this.initialized) await this.init();
|
|
1106
|
+
return this.registry.getModel(provider, modelId);
|
|
1107
|
+
}
|
|
1108
|
+
async syncModels() {
|
|
1109
|
+
if (!this.initialized) await this.init();
|
|
1110
|
+
return this.registry.syncAll();
|
|
1111
|
+
}
|
|
1112
|
+
// --- Tracking Methods ---
|
|
1113
|
+
getUsage(options) {
|
|
1114
|
+
return this.tracker.getSummary(options);
|
|
1115
|
+
}
|
|
1116
|
+
// --- Lifecycle ---
|
|
1117
|
+
async dispose() {
|
|
1118
|
+
for (const provider of this.registry.getAllProviders()) {
|
|
1119
|
+
if (provider.dispose) {
|
|
1120
|
+
await provider.dispose();
|
|
1121
|
+
}
|
|
1122
|
+
}
|
|
1123
|
+
this.registry.clearCache();
|
|
1124
|
+
this.tracker.clear();
|
|
1125
|
+
}
|
|
1126
|
+
// --- Internal ---
|
|
1127
|
+
async init() {
|
|
1128
|
+
if (this.initialized) return;
|
|
1129
|
+
this.initialized = true;
|
|
1130
|
+
const { keys, local, autoDetectLocal } = this.config;
|
|
1131
|
+
const llmKeys = {
|
|
1132
|
+
openai: keys.openai,
|
|
1133
|
+
anthropic: keys.anthropic,
|
|
1134
|
+
google: keys.google,
|
|
1135
|
+
openrouter: keys.openrouter,
|
|
1136
|
+
groq: keys.groq,
|
|
1137
|
+
mistral: keys.mistral,
|
|
1138
|
+
xai: keys.xai
|
|
1139
|
+
};
|
|
1140
|
+
const hasAnyLLMKey = Object.values(llmKeys).some(Boolean);
|
|
1141
|
+
if (hasAnyLLMKey) {
|
|
1142
|
+
this.registry.addProvider(new PiAiProvider(llmKeys));
|
|
1143
|
+
}
|
|
1144
|
+
if (keys.fal) {
|
|
1145
|
+
this.registry.addProvider(new FalProvider(keys.fal));
|
|
1146
|
+
}
|
|
1147
|
+
if (keys.huggingface) {
|
|
1148
|
+
this.registry.addProvider(new HuggingFaceProvider(keys.huggingface));
|
|
1149
|
+
}
|
|
1150
|
+
if (autoDetectLocal) {
|
|
1151
|
+
const PING_TIMEOUT_MS = 2e3;
|
|
1152
|
+
const pingUrl = async (url) => {
|
|
1153
|
+
try {
|
|
1154
|
+
const controller = new AbortController();
|
|
1155
|
+
const timer = setTimeout(() => controller.abort(), PING_TIMEOUT_MS);
|
|
1156
|
+
try {
|
|
1157
|
+
const res = await fetch(url, { signal: controller.signal });
|
|
1158
|
+
return res.ok;
|
|
1159
|
+
} finally {
|
|
1160
|
+
clearTimeout(timer);
|
|
1161
|
+
}
|
|
1162
|
+
} catch {
|
|
1163
|
+
return false;
|
|
1164
|
+
}
|
|
1165
|
+
};
|
|
1166
|
+
const comfyuiCfg = local["comfyui"];
|
|
1167
|
+
const piperCfg = local["piper"];
|
|
1168
|
+
const kokoroCfg = local["kokoro"];
|
|
1169
|
+
await Promise.allSettled([
|
|
1170
|
+
// ComfyUI
|
|
1171
|
+
(async () => {
|
|
1172
|
+
if (comfyuiCfg?.enabled) {
|
|
1173
|
+
const ok = await pingUrl(`${comfyuiCfg.host}:${comfyuiCfg.port}/system_stats`);
|
|
1174
|
+
if (ok) {
|
|
1175
|
+
this.registry.addProvider(new ComfyUIProvider({ host: comfyuiCfg.host, port: comfyuiCfg.port }));
|
|
1176
|
+
}
|
|
1177
|
+
}
|
|
1178
|
+
})(),
|
|
1179
|
+
// Piper TTS
|
|
1180
|
+
(async () => {
|
|
1181
|
+
if (piperCfg?.enabled) {
|
|
1182
|
+
const ok = await pingUrl(`${piperCfg.host}:${piperCfg.port}/health`);
|
|
1183
|
+
if (ok) {
|
|
1184
|
+
this.registry.addProvider(new LocalTTSProvider({ id: "piper", name: "Piper TTS", host: piperCfg.host, port: piperCfg.port }));
|
|
1185
|
+
}
|
|
1186
|
+
}
|
|
1187
|
+
})(),
|
|
1188
|
+
// Kokoro TTS
|
|
1189
|
+
(async () => {
|
|
1190
|
+
if (kokoroCfg?.enabled) {
|
|
1191
|
+
const ok = await pingUrl(`${kokoroCfg.host}:${kokoroCfg.port}/health`);
|
|
1192
|
+
if (ok) {
|
|
1193
|
+
this.registry.addProvider(new LocalTTSProvider({ id: "kokoro", name: "Kokoro TTS", host: kokoroCfg.host, port: kokoroCfg.port }));
|
|
1194
|
+
}
|
|
1195
|
+
}
|
|
1196
|
+
})()
|
|
1197
|
+
]);
|
|
1198
|
+
}
|
|
1199
|
+
}
|
|
1200
|
+
resolveProviderForModality(modality, preferredId, modelId) {
|
|
1201
|
+
if (modelId && !preferredId) {
|
|
1202
|
+
const resolved = this.registry.resolveModel(modelId, modality);
|
|
1203
|
+
if (resolved) return resolved.provider;
|
|
1204
|
+
}
|
|
1205
|
+
if (!preferredId) {
|
|
1206
|
+
const defaultCfg = this.config.defaults[modality];
|
|
1207
|
+
if (defaultCfg) {
|
|
1208
|
+
preferredId = defaultCfg.provider;
|
|
1209
|
+
}
|
|
1210
|
+
}
|
|
1211
|
+
const provider = this.registry.resolveProvider(modality, preferredId);
|
|
1212
|
+
if (!provider) {
|
|
1213
|
+
throw new NoosphereError(
|
|
1214
|
+
`No provider available for modality '${modality}'${preferredId ? ` (requested: ${preferredId})` : ""}`,
|
|
1215
|
+
{ code: "NO_PROVIDER", provider: preferredId ?? "none", modality }
|
|
1216
|
+
);
|
|
1217
|
+
}
|
|
1218
|
+
return provider;
|
|
1219
|
+
}
|
|
1220
|
+
async executeWithRetry(modality, provider, fn, failoverFnFactory) {
|
|
1221
|
+
const { maxRetries, backoffMs, retryableErrors, failover } = this.config.retry;
|
|
1222
|
+
let lastError;
|
|
1223
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
1224
|
+
try {
|
|
1225
|
+
return await fn();
|
|
1226
|
+
} catch (err) {
|
|
1227
|
+
lastError = err instanceof Error ? err : new Error(String(err));
|
|
1228
|
+
const isNoosphereErr = err instanceof NoosphereError;
|
|
1229
|
+
const code = isNoosphereErr ? err.code : "GENERATION_FAILED";
|
|
1230
|
+
const isRetryable = retryableErrors.includes(code) || code === "GENERATION_FAILED";
|
|
1231
|
+
const allowsFailover = code !== "GENERATION_FAILED" && retryableErrors.includes(code);
|
|
1232
|
+
if (!isRetryable || attempt === maxRetries) {
|
|
1233
|
+
if (failover && allowsFailover && failoverFnFactory) {
|
|
1234
|
+
const altProviders = this.registry.getAllProviders().filter((p) => p.id !== provider.id && p.modalities.includes(modality));
|
|
1235
|
+
for (const alt of altProviders) {
|
|
1236
|
+
try {
|
|
1237
|
+
const altFn = failoverFnFactory(alt);
|
|
1238
|
+
if (altFn) return await altFn();
|
|
1239
|
+
} catch {
|
|
1240
|
+
}
|
|
1241
|
+
}
|
|
1242
|
+
}
|
|
1243
|
+
break;
|
|
1244
|
+
}
|
|
1245
|
+
const delay = backoffMs * Math.pow(2, attempt);
|
|
1246
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
1247
|
+
}
|
|
1248
|
+
}
|
|
1249
|
+
throw lastError ?? new NoosphereError("Generation failed", {
|
|
1250
|
+
code: "GENERATION_FAILED",
|
|
1251
|
+
provider: provider.id,
|
|
1252
|
+
modality
|
|
1253
|
+
});
|
|
1254
|
+
}
|
|
1255
|
+
async trackUsage(result, metadata) {
|
|
1256
|
+
const event = {
|
|
1257
|
+
modality: result.modality,
|
|
1258
|
+
provider: result.provider,
|
|
1259
|
+
model: result.model,
|
|
1260
|
+
cost: result.usage.cost,
|
|
1261
|
+
latencyMs: result.latencyMs,
|
|
1262
|
+
input: result.usage.input,
|
|
1263
|
+
output: result.usage.output,
|
|
1264
|
+
unit: result.usage.unit,
|
|
1265
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1266
|
+
success: true,
|
|
1267
|
+
metadata
|
|
1268
|
+
};
|
|
1269
|
+
await this.tracker.record(event);
|
|
1270
|
+
}
|
|
1271
|
+
async trackError(modality, provider, model, startMs, err, metadata) {
|
|
1272
|
+
const event = {
|
|
1273
|
+
modality,
|
|
1274
|
+
provider,
|
|
1275
|
+
model: model ?? "unknown",
|
|
1276
|
+
cost: 0,
|
|
1277
|
+
latencyMs: Date.now() - startMs,
|
|
1278
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1279
|
+
success: false,
|
|
1280
|
+
error: err instanceof Error ? err.message : String(err),
|
|
1281
|
+
metadata
|
|
1282
|
+
};
|
|
1283
|
+
await this.tracker.record(event);
|
|
1284
|
+
}
|
|
1285
|
+
};
|
|
1286
|
+
export {
|
|
1287
|
+
Noosphere,
|
|
1288
|
+
NoosphereError
|
|
1289
|
+
};
|
|
1290
|
+
//# sourceMappingURL=index.js.map
|