@node-llm/core 0.7.0 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +67 -62
- package/dist/chat/Chat.d.ts +3 -2
- package/dist/chat/Chat.d.ts.map +1 -1
- package/dist/chat/Chat.js +4 -4
- package/dist/chat/ChatStream.d.ts +25 -0
- package/dist/chat/ChatStream.d.ts.map +1 -0
- package/dist/chat/ChatStream.js +93 -0
- package/dist/config.d.ts +6 -2
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +3 -0
- package/dist/embedding/Embedding.d.ts +1 -1
- package/dist/embedding/Embedding.d.ts.map +1 -1
- package/dist/errors/index.d.ts +22 -0
- package/dist/errors/index.d.ts.map +1 -1
- package/dist/errors/index.js +32 -0
- package/dist/index.d.ts +5 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +5 -1
- package/dist/llm.d.ts +3 -1
- package/dist/llm.d.ts.map +1 -1
- package/dist/llm.js +26 -24
- package/dist/models/ModelRegistry.d.ts +3 -2
- package/dist/models/ModelRegistry.d.ts.map +1 -1
- package/dist/models/ModelRegistry.js +15 -3
- package/dist/models/models.d.ts +729 -60
- package/dist/models/models.d.ts.map +1 -1
- package/dist/models/models.js +24809 -2410
- package/dist/models/types.d.ts +3 -3
- package/dist/models/types.d.ts.map +1 -1
- package/dist/models/types.js +3 -0
- package/dist/providers/BaseProvider.d.ts +21 -0
- package/dist/providers/BaseProvider.d.ts.map +1 -0
- package/dist/providers/BaseProvider.js +28 -0
- package/dist/providers/Provider.d.ts +19 -1
- package/dist/providers/Provider.d.ts.map +1 -1
- package/dist/providers/anthropic/AnthropicProvider.d.ts +6 -7
- package/dist/providers/anthropic/AnthropicProvider.d.ts.map +1 -1
- package/dist/providers/anthropic/AnthropicProvider.js +16 -13
- package/dist/providers/anthropic/Streaming.d.ts +1 -1
- package/dist/providers/anthropic/Streaming.d.ts.map +1 -1
- package/dist/providers/anthropic/Streaming.js +80 -54
- package/dist/providers/deepseek/Capabilities.js +1 -1
- package/dist/providers/deepseek/DeepSeekProvider.d.ts +5 -1
- package/dist/providers/deepseek/DeepSeekProvider.d.ts.map +1 -1
- package/dist/providers/deepseek/DeepSeekProvider.js +15 -1
- package/dist/providers/deepseek/Streaming.d.ts +1 -1
- package/dist/providers/deepseek/Streaming.d.ts.map +1 -1
- package/dist/providers/deepseek/Streaming.js +80 -48
- package/dist/providers/gemini/Capabilities.d.ts.map +1 -1
- package/dist/providers/gemini/Embeddings.d.ts +1 -1
- package/dist/providers/gemini/Embeddings.d.ts.map +1 -1
- package/dist/providers/gemini/GeminiProvider.d.ts +6 -4
- package/dist/providers/gemini/GeminiProvider.d.ts.map +1 -1
- package/dist/providers/gemini/GeminiProvider.js +14 -4
- package/dist/providers/gemini/Streaming.d.ts +1 -1
- package/dist/providers/gemini/Streaming.d.ts.map +1 -1
- package/dist/providers/gemini/Streaming.js +62 -39
- package/dist/providers/ollama/Capabilities.d.ts +13 -0
- package/dist/providers/ollama/Capabilities.d.ts.map +1 -0
- package/dist/providers/ollama/Capabilities.js +54 -0
- package/dist/providers/ollama/Embedding.d.ts +6 -0
- package/dist/providers/ollama/Embedding.d.ts.map +1 -0
- package/dist/providers/ollama/Embedding.js +12 -0
- package/dist/providers/ollama/Models.d.ts +8 -0
- package/dist/providers/ollama/Models.d.ts.map +1 -0
- package/dist/providers/ollama/Models.js +31 -0
- package/dist/providers/ollama/OllamaProvider.d.ts +9 -0
- package/dist/providers/ollama/OllamaProvider.d.ts.map +1 -0
- package/dist/providers/ollama/OllamaProvider.js +31 -0
- package/dist/providers/ollama/index.d.ts +9 -0
- package/dist/providers/ollama/index.d.ts.map +1 -0
- package/dist/providers/ollama/index.js +17 -0
- package/dist/providers/openai/Capabilities.d.ts +1 -1
- package/dist/providers/openai/Capabilities.d.ts.map +1 -1
- package/dist/providers/openai/Capabilities.js +4 -2
- package/dist/providers/openai/Embedding.d.ts +5 -3
- package/dist/providers/openai/Embedding.d.ts.map +1 -1
- package/dist/providers/openai/Embedding.js +13 -8
- package/dist/providers/openai/Models.d.ts +12 -2
- package/dist/providers/openai/Models.d.ts.map +1 -1
- package/dist/providers/openai/Models.js +50 -16
- package/dist/providers/openai/OpenAIProvider.d.ts +22 -12
- package/dist/providers/openai/OpenAIProvider.d.ts.map +1 -1
- package/dist/providers/openai/OpenAIProvider.js +16 -2
- package/dist/providers/openai/Streaming.d.ts +1 -1
- package/dist/providers/openai/Streaming.d.ts.map +1 -1
- package/dist/providers/openai/Streaming.js +75 -43
- package/dist/providers/openrouter/Capabilities.d.ts +13 -0
- package/dist/providers/openrouter/Capabilities.d.ts.map +1 -0
- package/dist/providers/openrouter/Capabilities.js +67 -0
- package/dist/providers/openrouter/Models.d.ts +11 -0
- package/dist/providers/openrouter/Models.d.ts.map +1 -0
- package/dist/providers/openrouter/Models.js +88 -0
- package/dist/providers/openrouter/OpenRouterProvider.d.ts +21 -0
- package/dist/providers/openrouter/OpenRouterProvider.d.ts.map +1 -0
- package/dist/providers/openrouter/OpenRouterProvider.js +24 -0
- package/dist/providers/openrouter/index.d.ts +11 -0
- package/dist/providers/openrouter/index.d.ts.map +1 -0
- package/dist/providers/openrouter/index.js +26 -0
- package/dist/providers/registry.d.ts +11 -1
- package/dist/providers/registry.d.ts.map +1 -1
- package/dist/providers/registry.js +14 -0
- package/dist/streaming/Stream.d.ts +29 -0
- package/dist/streaming/Stream.d.ts.map +1 -0
- package/dist/streaming/Stream.js +67 -0
- package/dist/utils/FileLoader.d.ts.map +1 -1
- package/dist/utils/FileLoader.js +34 -3
- package/dist/utils/logger.d.ts +13 -0
- package/dist/utils/logger.d.ts.map +1 -0
- package/dist/utils/logger.js +24 -0
- package/package.json +1 -1
- package/dist/chat/Stream.d.ts +0 -21
- package/dist/chat/Stream.d.ts.map +0 -1
- package/dist/chat/Stream.js +0 -73
- package/dist/providers/Embedding.d.ts +0 -20
- package/dist/providers/Embedding.d.ts.map +0 -1
- package/dist/providers/Embedding.js +0 -1
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { APIError } from "../../errors/index.js";
|
|
1
2
|
export class DeepSeekStreaming {
|
|
2
3
|
baseUrl;
|
|
3
4
|
apiKey;
|
|
@@ -5,7 +6,8 @@ export class DeepSeekStreaming {
|
|
|
5
6
|
this.baseUrl = baseUrl;
|
|
6
7
|
this.apiKey = apiKey;
|
|
7
8
|
}
|
|
8
|
-
async *execute(request) {
|
|
9
|
+
async *execute(request, controller) {
|
|
10
|
+
const abortController = controller || new AbortController();
|
|
9
11
|
const { model, messages, tools, max_tokens, response_format, headers, ...rest } = request;
|
|
10
12
|
const body = {
|
|
11
13
|
model,
|
|
@@ -19,55 +21,85 @@ export class DeepSeekStreaming {
|
|
|
19
21
|
body.tools = tools;
|
|
20
22
|
if (response_format)
|
|
21
23
|
body.response_format = response_format;
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
"
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
const
|
|
61
|
-
if (
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
24
|
+
let done = false;
|
|
25
|
+
try {
|
|
26
|
+
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
27
|
+
method: "POST",
|
|
28
|
+
headers: {
|
|
29
|
+
"Authorization": `Bearer ${this.apiKey}`,
|
|
30
|
+
"Content-Type": "application/json",
|
|
31
|
+
...request.headers,
|
|
32
|
+
},
|
|
33
|
+
body: JSON.stringify(body),
|
|
34
|
+
signal: abortController.signal,
|
|
35
|
+
});
|
|
36
|
+
if (!response.ok) {
|
|
37
|
+
const errorText = await response.text();
|
|
38
|
+
throw new Error(`DeepSeek API error: ${response.status} - ${errorText}`);
|
|
39
|
+
}
|
|
40
|
+
if (!response.body) {
|
|
41
|
+
throw new Error("No response body for streaming");
|
|
42
|
+
}
|
|
43
|
+
const reader = response.body.getReader();
|
|
44
|
+
const decoder = new TextDecoder();
|
|
45
|
+
let buffer = "";
|
|
46
|
+
while (true) {
|
|
47
|
+
const { value, done: readerDone } = await reader.read();
|
|
48
|
+
if (readerDone)
|
|
49
|
+
break;
|
|
50
|
+
const chunk = decoder.decode(value, { stream: true });
|
|
51
|
+
buffer += chunk;
|
|
52
|
+
const lines = buffer.split("\n\n");
|
|
53
|
+
buffer = lines.pop() || "";
|
|
54
|
+
for (const line of lines) {
|
|
55
|
+
let trimmed = line.trim();
|
|
56
|
+
// Handle carriage returns
|
|
57
|
+
if (trimmed.endsWith('\r')) {
|
|
58
|
+
trimmed = trimmed.substring(0, trimmed.length - 1);
|
|
59
|
+
}
|
|
60
|
+
if (!trimmed.startsWith("data: "))
|
|
61
|
+
continue;
|
|
62
|
+
const data = trimmed.replace("data: ", "").trim();
|
|
63
|
+
if (data === "[DONE]") {
|
|
64
|
+
done = true;
|
|
65
|
+
return;
|
|
66
|
+
}
|
|
67
|
+
try {
|
|
68
|
+
const json = JSON.parse(data);
|
|
69
|
+
// Check for errors in the data
|
|
70
|
+
if (json.error) {
|
|
71
|
+
throw new APIError("DeepSeek", response.status, json.error.message || "Stream error");
|
|
72
|
+
}
|
|
73
|
+
const deltaContent = json.choices?.[0]?.delta?.content;
|
|
74
|
+
const deltaReasoning = json.choices?.[0]?.delta?.reasoning_content;
|
|
75
|
+
if (deltaContent || deltaReasoning) {
|
|
76
|
+
yield {
|
|
77
|
+
content: deltaContent || "",
|
|
78
|
+
reasoning: deltaReasoning || ""
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
catch (e) {
|
|
83
|
+
// Re-throw APIError
|
|
84
|
+
if (e instanceof APIError)
|
|
85
|
+
throw e;
|
|
86
|
+
// Ignore other parse errors
|
|
66
87
|
}
|
|
67
88
|
}
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
89
|
+
}
|
|
90
|
+
done = true;
|
|
91
|
+
}
|
|
92
|
+
catch (e) {
|
|
93
|
+
// Graceful exit on abort
|
|
94
|
+
if (e instanceof Error && e.name === 'AbortError') {
|
|
95
|
+
return;
|
|
96
|
+
}
|
|
97
|
+
throw e;
|
|
98
|
+
}
|
|
99
|
+
finally {
|
|
100
|
+
// Cleanup: abort if user breaks early
|
|
101
|
+
if (!done) {
|
|
102
|
+
abortController.abort();
|
|
71
103
|
}
|
|
72
104
|
}
|
|
73
105
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Capabilities.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/Capabilities.ts"],"names":[],"mappings":"AAEA,qBAAa,YAAY;IACvB,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAuBvD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAoBzD,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAW/C,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAW9C,MAAM,CAAC,wBAAwB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAWzD,MAAM,CAAC,0BAA0B,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAI3D,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAIjD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAQnD,MAAM,CAAC,uBAAuB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAQxD,MAAM,CAAC,qBAAqB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAQtD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAInD,MAAM,CAAC,oBAAoB,CAAC,WAAW,EAAE,MAAM,GAAG,SAAS,EAAE,KAAK,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAI/F,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG;QAAE,KAAK,EAAE,MAAM,EAAE,CAAC;QAAC,MAAM,EAAE,MAAM,EAAE,CAAA;KAAE;IAY5E,MAAM,CAAC,eAAe,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE;IAUjD,MAAM,CAAC,UAAU,CAAC,OAAO,EAAE,MAAM;;
|
|
1
|
+
{"version":3,"file":"Capabilities.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/Capabilities.ts"],"names":[],"mappings":"AAEA,qBAAa,YAAY;IACvB,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAuBvD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAoBzD,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAW/C,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAW9C,MAAM,CAAC,wBAAwB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAWzD,MAAM,CAAC,0BAA0B,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAI3D,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAIjD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAQnD,MAAM,CAAC,uBAAuB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAQxD,MAAM,CAAC,qBAAqB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAQtD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAInD,MAAM,CAAC,oBAAoB,CAAC,WAAW,EAAE,MAAM,GAAG,SAAS,EAAE,KAAK,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAI/F,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG;QAAE,KAAK,EAAE,MAAM,EAAE,CAAC;QAAC,MAAM,EAAE,MAAM,EAAE,CAAA;KAAE;IAY5E,MAAM,CAAC,eAAe,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE;IAUjD,MAAM,CAAC,UAAU,CAAC,OAAO,EAAE,MAAM;;oBArIyB,CAAC;iCAA6B,CAAC;wCACrF,CAAD;kCAEM,CAAC;4CACO,CAAC;;iBAEA,CAAC;iCACf,CAAC;kCAEU,CAAC;;;;oBACD,CAAC;qBAET,CAAC;sBACH,CAAC;;iBAEI,CAAC;qBAGP,CAAC;sBAAwB,CAAC;;;;oBACD,CAAC;iCAA6B,CAAC;kCACjC,CAAC;;;;IA8I5B,OAAO,CAAC,MAAM,CAAC,gBAAgB;CAGhC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Embeddings.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/Embeddings.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,
|
|
1
|
+
{"version":3,"file":"Embeddings.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/Embeddings.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,gBAAgB,CAAC;AAIrE,qBAAa,gBAAgB;IACf,OAAO,CAAC,QAAQ,CAAC,OAAO;IAAU,OAAO,CAAC,QAAQ,CAAC,MAAM;gBAAxC,OAAO,EAAE,MAAM,EAAmB,MAAM,EAAE,MAAM;IAEvE,OAAO,CAAC,OAAO,EAAE,gBAAgB,GAAG,OAAO,CAAC,iBAAiB,CAAC;CAwCrE"}
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import { Provider, ChatRequest, ChatResponse, ModelInfo, ChatChunk, ImageRequest, ImageResponse, TranscriptionRequest, TranscriptionResponse,
|
|
2
|
-
import {
|
|
1
|
+
import { Provider, ChatRequest, ChatResponse, ModelInfo, ChatChunk, ImageRequest, ImageResponse, TranscriptionRequest, TranscriptionResponse, EmbeddingRequest, EmbeddingResponse } from "../Provider.js";
|
|
2
|
+
import { BaseProvider } from "../BaseProvider.js";
|
|
3
3
|
export interface GeminiProviderOptions {
|
|
4
4
|
apiKey: string;
|
|
5
5
|
baseUrl?: string;
|
|
6
6
|
}
|
|
7
|
-
export declare class GeminiProvider implements Provider {
|
|
7
|
+
export declare class GeminiProvider extends BaseProvider implements Provider {
|
|
8
8
|
private readonly options;
|
|
9
9
|
private readonly baseUrl;
|
|
10
10
|
private readonly chatHandler;
|
|
@@ -25,12 +25,14 @@ export declare class GeminiProvider implements Provider {
|
|
|
25
25
|
getContextWindow: (model: string) => number | null;
|
|
26
26
|
};
|
|
27
27
|
constructor(options: GeminiProviderOptions);
|
|
28
|
+
apiBase(): string;
|
|
29
|
+
headers(): Record<string, string>;
|
|
30
|
+
protected providerName(): string;
|
|
28
31
|
chat(request: ChatRequest): Promise<ChatResponse>;
|
|
29
32
|
stream(request: ChatRequest): AsyncGenerator<ChatChunk>;
|
|
30
33
|
listModels(): Promise<ModelInfo[]>;
|
|
31
34
|
paint(request: ImageRequest): Promise<ImageResponse>;
|
|
32
35
|
embed(request: EmbeddingRequest): Promise<EmbeddingResponse>;
|
|
33
36
|
transcribe(request: TranscriptionRequest): Promise<TranscriptionResponse>;
|
|
34
|
-
moderate(_request: ModerationRequest): Promise<ModerationResponse>;
|
|
35
37
|
}
|
|
36
38
|
//# sourceMappingURL=GeminiProvider.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"GeminiProvider.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/GeminiProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,QAAQ,EACR,WAAW,EACX,YAAY,EACZ,SAAS,EACT,SAAS,EACT,YAAY,EACZ,aAAa,EACb,oBAAoB,EACpB,qBAAqB,
|
|
1
|
+
{"version":3,"file":"GeminiProvider.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/GeminiProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,QAAQ,EACR,WAAW,EACX,YAAY,EACZ,SAAS,EACT,SAAS,EACT,YAAY,EACZ,aAAa,EACb,oBAAoB,EACpB,qBAAqB,EAGrB,gBAAgB,EAChB,iBAAiB,EAClB,MAAM,gBAAgB,CAAC;AACxB,OAAO,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AASlD,MAAM,WAAW,qBAAqB;IACpC,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,qBAAa,cAAe,SAAQ,YAAa,YAAW,QAAQ;IAqBtD,OAAO,CAAC,QAAQ,CAAC,OAAO;IApBpC,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAS;IACjC,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAa;IACzC,OAAO,CAAC,QAAQ,CAAC,gBAAgB,CAAkB;IACnD,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAe;IAC7C,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAc;IAC3C,OAAO,CAAC,QAAQ,CAAC,gBAAgB,CAAmB;IACpD,OAAO,CAAC,QAAQ,CAAC,oBAAoB,CAAsB;IAEpD,YAAY;gCACO,MAAM;+BACP,MAAM;0CACK,MAAM;oCACZ,MAAM;yCACD,MAAM;uCACR,MAAM;oCACT,MAAM;oCACN,MAAM;kCACR,MAAM;MAChC;gBAE2B,OAAO,EAAE,qBAAqB;IAWpD,OAAO,IAAI,MAAM;IAIjB,OAAO,IAAI,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC;IAMxC,SAAS,CAAC,YAAY,IAAI,MAAM;IAI1B,IAAI,CAAC,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;IAIhD,MAAM,CAAC,OAAO,EAAE,WAAW,GAAG,cAAc,CAAC,SAAS,CAAC;IAIxD,UAAU,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC;IAIlC,KAAK,CAAC,OAAO,EAAE,YAAY,GAAG,OAAO,CAAC,aAAa,CAAC;IAIpD,KAAK,CAAC,OAAO,EAAE,gBAAgB,GAAG,OAAO,CAAC,iBAAiB,CAAC;IAI5D,UAAU,CAAC,OAAO,EAAE,oBAAoB,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAKhF"}
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { BaseProvider } from "../BaseProvider.js";
|
|
1
2
|
import { Capabilities } from "./Capabilities.js";
|
|
2
3
|
import { GeminiChat } from "./Chat.js";
|
|
3
4
|
import { GeminiStreaming } from "./Streaming.js";
|
|
@@ -5,7 +6,7 @@ import { GeminiModels } from "./Models.js";
|
|
|
5
6
|
import { GeminiImage } from "./Image.js";
|
|
6
7
|
import { GeminiEmbeddings } from "./Embeddings.js";
|
|
7
8
|
import { GeminiTranscription } from "./Transcription.js";
|
|
8
|
-
export class GeminiProvider {
|
|
9
|
+
export class GeminiProvider extends BaseProvider {
|
|
9
10
|
options;
|
|
10
11
|
baseUrl;
|
|
11
12
|
chatHandler;
|
|
@@ -26,6 +27,7 @@ export class GeminiProvider {
|
|
|
26
27
|
getContextWindow: (model) => Capabilities.getContextWindow(model),
|
|
27
28
|
};
|
|
28
29
|
constructor(options) {
|
|
30
|
+
super();
|
|
29
31
|
this.options = options;
|
|
30
32
|
this.baseUrl = options.baseUrl ?? "https://generativelanguage.googleapis.com/v1beta";
|
|
31
33
|
this.chatHandler = new GeminiChat(this.baseUrl, options.apiKey);
|
|
@@ -35,6 +37,17 @@ export class GeminiProvider {
|
|
|
35
37
|
this.embeddingHandler = new GeminiEmbeddings(this.baseUrl, options.apiKey);
|
|
36
38
|
this.transcriptionHandler = new GeminiTranscription(this.baseUrl, options.apiKey);
|
|
37
39
|
}
|
|
40
|
+
apiBase() {
|
|
41
|
+
return this.baseUrl;
|
|
42
|
+
}
|
|
43
|
+
headers() {
|
|
44
|
+
return {
|
|
45
|
+
"Content-Type": "application/json",
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
providerName() {
|
|
49
|
+
return "Gemini";
|
|
50
|
+
}
|
|
38
51
|
async chat(request) {
|
|
39
52
|
return this.chatHandler.execute(request);
|
|
40
53
|
}
|
|
@@ -53,7 +66,4 @@ export class GeminiProvider {
|
|
|
53
66
|
async transcribe(request) {
|
|
54
67
|
return this.transcriptionHandler.execute(request);
|
|
55
68
|
}
|
|
56
|
-
async moderate(_request) {
|
|
57
|
-
throw new Error("Gemini doesn't support moderation");
|
|
58
|
-
}
|
|
59
69
|
}
|
|
@@ -3,7 +3,7 @@ export declare class GeminiStreaming {
|
|
|
3
3
|
private readonly baseUrl;
|
|
4
4
|
private readonly apiKey;
|
|
5
5
|
constructor(baseUrl: string, apiKey: string);
|
|
6
|
-
execute(request: ChatRequest): AsyncGenerator<ChatChunk>;
|
|
6
|
+
execute(request: ChatRequest, controller?: AbortController): AsyncGenerator<ChatChunk>;
|
|
7
7
|
private sanitizeSchema;
|
|
8
8
|
}
|
|
9
9
|
//# sourceMappingURL=Streaming.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Streaming.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/Streaming.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAMxD,qBAAa,eAAe;IACd,OAAO,CAAC,QAAQ,CAAC,OAAO;IAAU,OAAO,CAAC,QAAQ,CAAC,MAAM;gBAAxC,OAAO,EAAE,MAAM,EAAmB,MAAM,EAAE,MAAM;IAEtE,OAAO,
|
|
1
|
+
{"version":3,"file":"Streaming.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/Streaming.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAMxD,qBAAa,eAAe;IACd,OAAO,CAAC,QAAQ,CAAC,OAAO;IAAU,OAAO,CAAC,QAAQ,CAAC,MAAM;gBAAxC,OAAO,EAAE,MAAM,EAAmB,MAAM,EAAE,MAAM;IAEtE,OAAO,CACZ,OAAO,EAAE,WAAW,EACpB,UAAU,CAAC,EAAE,eAAe,GAC3B,cAAc,CAAC,SAAS,CAAC;IAuG5B,OAAO,CAAC,cAAc;CAwBvB"}
|
|
@@ -8,7 +8,8 @@ export class GeminiStreaming {
|
|
|
8
8
|
this.baseUrl = baseUrl;
|
|
9
9
|
this.apiKey = apiKey;
|
|
10
10
|
}
|
|
11
|
-
async *execute(request) {
|
|
11
|
+
async *execute(request, controller) {
|
|
12
|
+
const abortController = controller || new AbortController();
|
|
12
13
|
const temperature = Capabilities.normalizeTemperature(request.temperature, request.model);
|
|
13
14
|
const url = `${this.baseUrl}/models/${request.model}:streamGenerateContent?alt=sse&key=${this.apiKey}`;
|
|
14
15
|
const { contents, systemInstructionParts } = await GeminiChatUtils.convertMessages(request.messages);
|
|
@@ -32,49 +33,71 @@ export class GeminiStreaming {
|
|
|
32
33
|
if (systemInstructionParts.length > 0) {
|
|
33
34
|
payload.systemInstruction = { parts: systemInstructionParts };
|
|
34
35
|
}
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
const
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
36
|
+
let done = false;
|
|
37
|
+
try {
|
|
38
|
+
const response = await fetch(url, {
|
|
39
|
+
method: "POST",
|
|
40
|
+
headers: {
|
|
41
|
+
"Content-Type": "application/json",
|
|
42
|
+
},
|
|
43
|
+
body: JSON.stringify(payload),
|
|
44
|
+
signal: abortController.signal,
|
|
45
|
+
});
|
|
46
|
+
if (!response.ok) {
|
|
47
|
+
await handleGeminiError(response, request.model);
|
|
48
|
+
}
|
|
49
|
+
if (!response.body) {
|
|
50
|
+
throw new Error("No response body for streaming");
|
|
51
|
+
}
|
|
52
|
+
const reader = response.body.getReader();
|
|
53
|
+
const decoder = new TextDecoder();
|
|
54
|
+
let buffer = "";
|
|
55
|
+
while (true) {
|
|
56
|
+
const { value, done: readerDone } = await reader.read();
|
|
57
|
+
if (readerDone)
|
|
58
|
+
break;
|
|
59
|
+
buffer += decoder.decode(value, { stream: true });
|
|
60
|
+
let lineEnd;
|
|
61
|
+
while ((lineEnd = buffer.indexOf("\n")) !== -1) {
|
|
62
|
+
let line = buffer.substring(0, lineEnd).trim();
|
|
63
|
+
buffer = buffer.substring(lineEnd + 1);
|
|
64
|
+
// Handle carriage returns
|
|
65
|
+
if (line.endsWith('\r')) {
|
|
66
|
+
line = line.substring(0, line.length - 1);
|
|
67
|
+
}
|
|
68
|
+
if (line.startsWith("data: ")) {
|
|
69
|
+
const data = line.substring(6).trim();
|
|
70
|
+
if (!data)
|
|
71
|
+
continue;
|
|
72
|
+
try {
|
|
73
|
+
const json = JSON.parse(data);
|
|
74
|
+
const parts = json.candidates?.[0]?.content?.parts || [];
|
|
75
|
+
for (const part of parts) {
|
|
76
|
+
if (part.text) {
|
|
77
|
+
yield { content: part.text };
|
|
78
|
+
}
|
|
70
79
|
}
|
|
71
80
|
}
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
81
|
+
catch (e) {
|
|
82
|
+
// Ignore parse errors
|
|
83
|
+
}
|
|
75
84
|
}
|
|
76
85
|
}
|
|
77
86
|
}
|
|
87
|
+
done = true;
|
|
88
|
+
}
|
|
89
|
+
catch (e) {
|
|
90
|
+
// Graceful exit on abort
|
|
91
|
+
if (e instanceof Error && e.name === 'AbortError') {
|
|
92
|
+
return;
|
|
93
|
+
}
|
|
94
|
+
throw e;
|
|
95
|
+
}
|
|
96
|
+
finally {
|
|
97
|
+
// Cleanup: abort if user breaks early
|
|
98
|
+
if (!done) {
|
|
99
|
+
abortController.abort();
|
|
100
|
+
}
|
|
78
101
|
}
|
|
79
102
|
}
|
|
80
103
|
sanitizeSchema(schema) {
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export declare class OllamaCapabilities {
|
|
2
|
+
static findModel(modelId: string): import("../../models/types.js").Model | undefined;
|
|
3
|
+
static getContextWindow(modelId: string): number | null;
|
|
4
|
+
static supportsVision(modelId: string): boolean;
|
|
5
|
+
static supportsTools(modelId: string): boolean;
|
|
6
|
+
static supportsStructuredOutput(modelId: string): boolean;
|
|
7
|
+
static supportsEmbeddings(modelId: string): boolean;
|
|
8
|
+
static supportsReasoning(modelId: string): boolean;
|
|
9
|
+
static supportsImageGeneration(modelId: string): boolean;
|
|
10
|
+
static supportsTranscription(modelId: string): boolean;
|
|
11
|
+
static supportsModeration(modelId: string): boolean;
|
|
12
|
+
}
|
|
13
|
+
//# sourceMappingURL=Capabilities.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Capabilities.d.ts","sourceRoot":"","sources":["../../../src/providers/ollama/Capabilities.ts"],"names":[],"mappings":"AAEA,qBAAa,kBAAkB;IAC7B,MAAM,CAAC,SAAS,CAAC,OAAO,EAAE,MAAM;IAYhC,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAKvD,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAU/C,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAK9C,MAAM,CAAC,wBAAwB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAKzD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAKnD,MAAM,CAAC,iBAAiB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAKlD,MAAM,CAAC,uBAAuB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAKxD,MAAM,CAAC,qBAAqB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAKtD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;CAIpD"}
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { ModelRegistry } from "../../models/ModelRegistry.js";
|
|
2
|
+
export class OllamaCapabilities {
|
|
3
|
+
static findModel(modelId) {
|
|
4
|
+
// Ollama specific: try exact match first, then strip tags
|
|
5
|
+
let model = ModelRegistry.find(modelId, "ollama");
|
|
6
|
+
if (!model && modelId?.includes(":")) {
|
|
7
|
+
const baseId = modelId.split(":")[0];
|
|
8
|
+
if (baseId) {
|
|
9
|
+
model = ModelRegistry.find(baseId, "ollama");
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
return model;
|
|
13
|
+
}
|
|
14
|
+
static getContextWindow(modelId) {
|
|
15
|
+
const model = this.findModel(modelId);
|
|
16
|
+
return model?.context_window || 8192;
|
|
17
|
+
}
|
|
18
|
+
static supportsVision(modelId) {
|
|
19
|
+
const model = this.findModel(modelId);
|
|
20
|
+
if (model) {
|
|
21
|
+
return model.modalities?.input?.includes("image") || model.capabilities?.includes("vision") || false;
|
|
22
|
+
}
|
|
23
|
+
// Fallback for custom models not in registry
|
|
24
|
+
return /vision|llava|moondream|bakllava/.test(modelId.toLowerCase());
|
|
25
|
+
}
|
|
26
|
+
static supportsTools(modelId) {
|
|
27
|
+
const model = this.findModel(modelId);
|
|
28
|
+
return model?.capabilities?.includes("tools") || false;
|
|
29
|
+
}
|
|
30
|
+
static supportsStructuredOutput(modelId) {
|
|
31
|
+
const model = this.findModel(modelId);
|
|
32
|
+
return model?.capabilities?.includes("structured_output") || false;
|
|
33
|
+
}
|
|
34
|
+
static supportsEmbeddings(modelId) {
|
|
35
|
+
const model = this.findModel(modelId);
|
|
36
|
+
return model?.modalities?.output?.includes("embeddings") || model?.capabilities?.includes("embeddings") || false;
|
|
37
|
+
}
|
|
38
|
+
static supportsReasoning(modelId) {
|
|
39
|
+
const model = this.findModel(modelId);
|
|
40
|
+
return model?.capabilities?.includes("reasoning") || false;
|
|
41
|
+
}
|
|
42
|
+
static supportsImageGeneration(modelId) {
|
|
43
|
+
const model = this.findModel(modelId);
|
|
44
|
+
return model?.modalities?.output?.includes("image") || false;
|
|
45
|
+
}
|
|
46
|
+
static supportsTranscription(modelId) {
|
|
47
|
+
const model = this.findModel(modelId);
|
|
48
|
+
return model?.modalities?.input?.includes("audio") || false;
|
|
49
|
+
}
|
|
50
|
+
static supportsModeration(modelId) {
|
|
51
|
+
const model = this.findModel(modelId);
|
|
52
|
+
return model?.modalities?.output?.includes("moderation") || false;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Embedding.d.ts","sourceRoot":"","sources":["../../../src/providers/ollama/Embedding.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,wBAAwB,CAAC;AAGzD,qBAAa,eAAgB,SAAQ,eAAe;cAC/B,eAAe,IAAI,MAAM;cAIzB,aAAa,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;CAKtD"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { OpenAIEmbedding } from "../openai/Embedding.js";
|
|
2
|
+
import { OllamaCapabilities } from "./Capabilities.js";
|
|
3
|
+
export class OllamaEmbedding extends OpenAIEmbedding {
|
|
4
|
+
getProviderName() {
|
|
5
|
+
return "ollama";
|
|
6
|
+
}
|
|
7
|
+
validateModel(model) {
|
|
8
|
+
if (!OllamaCapabilities.supportsEmbeddings(model)) {
|
|
9
|
+
throw new Error(`Model ${model} does not support embeddings.`);
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { OpenAIModels } from "../openai/Models.js";
|
|
2
|
+
export declare class OllamaModels extends OpenAIModels {
|
|
3
|
+
protected getProviderName(): string;
|
|
4
|
+
protected formatDisplayName(modelId: string): string;
|
|
5
|
+
protected getContextWindow(modelId: string): number | null;
|
|
6
|
+
protected getCapabilities(modelId: string): string[];
|
|
7
|
+
}
|
|
8
|
+
//# sourceMappingURL=Models.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Models.d.ts","sourceRoot":"","sources":["../../../src/providers/ollama/Models.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAC;AAInD,qBAAa,YAAa,SAAQ,YAAY;cACzB,eAAe,IAAI,MAAM;cAIzB,iBAAiB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM;cAO1C,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;cAIhD,eAAe,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE;CAW9D"}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { OpenAIModels } from "../openai/Models.js";
|
|
2
|
+
import { OllamaCapabilities } from "./Capabilities.js";
|
|
3
|
+
export class OllamaModels extends OpenAIModels {
|
|
4
|
+
getProviderName() {
|
|
5
|
+
return "ollama";
|
|
6
|
+
}
|
|
7
|
+
formatDisplayName(modelId) {
|
|
8
|
+
const model = OllamaCapabilities.findModel(modelId);
|
|
9
|
+
if (model?.name && model.name !== modelId)
|
|
10
|
+
return model.name;
|
|
11
|
+
const baseId = modelId.split(":")[0] || modelId;
|
|
12
|
+
return baseId.replace(/-/g, " ").replace(/\b\w/g, c => c.toUpperCase());
|
|
13
|
+
}
|
|
14
|
+
getContextWindow(modelId) {
|
|
15
|
+
return OllamaCapabilities.getContextWindow(modelId);
|
|
16
|
+
}
|
|
17
|
+
getCapabilities(modelId) {
|
|
18
|
+
const model = OllamaCapabilities.findModel(modelId);
|
|
19
|
+
if (model)
|
|
20
|
+
return model.capabilities;
|
|
21
|
+
// Fallback for custom pulled models
|
|
22
|
+
const caps = ["streaming"];
|
|
23
|
+
if (OllamaCapabilities.supportsTools(modelId))
|
|
24
|
+
caps.push("tools");
|
|
25
|
+
if (OllamaCapabilities.supportsVision(modelId))
|
|
26
|
+
caps.push("vision");
|
|
27
|
+
if (OllamaCapabilities.supportsEmbeddings(modelId))
|
|
28
|
+
caps.push("embeddings");
|
|
29
|
+
return caps;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { OpenAIProvider } from "../openai/OpenAIProvider.js";
|
|
2
|
+
export interface OllamaProviderOptions {
|
|
3
|
+
baseUrl?: string;
|
|
4
|
+
}
|
|
5
|
+
export declare class OllamaProvider extends OpenAIProvider {
|
|
6
|
+
constructor(options?: OllamaProviderOptions);
|
|
7
|
+
protected providerName(): string;
|
|
8
|
+
}
|
|
9
|
+
//# sourceMappingURL=OllamaProvider.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"OllamaProvider.d.ts","sourceRoot":"","sources":["../../../src/providers/ollama/OllamaProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAM7D,MAAM,WAAW,qBAAqB;IACpC,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,qBAAa,cAAe,SAAQ,cAAc;gBACpC,OAAO,GAAE,qBAA0B;IAwB/C,SAAS,CAAC,YAAY,IAAI,MAAM;CAGjC"}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { OpenAIProvider } from "../openai/OpenAIProvider.js";
|
|
2
|
+
import { config } from "../../config.js";
|
|
3
|
+
import { OllamaModels } from "./Models.js";
|
|
4
|
+
import { OllamaEmbedding } from "./Embedding.js";
|
|
5
|
+
import { OllamaCapabilities } from "./Capabilities.js";
|
|
6
|
+
export class OllamaProvider extends OpenAIProvider {
|
|
7
|
+
constructor(options = {}) {
|
|
8
|
+
super({
|
|
9
|
+
apiKey: "ollama",
|
|
10
|
+
baseUrl: options.baseUrl || config.ollamaApiBase || "http://localhost:11434/v1"
|
|
11
|
+
});
|
|
12
|
+
// Override handlers with Ollama-specific ones
|
|
13
|
+
this.modelsHandler = new OllamaModels(this.baseUrl, this.options.apiKey);
|
|
14
|
+
this.embeddingHandler = new OllamaEmbedding(this.baseUrl, this.options.apiKey);
|
|
15
|
+
// Override capabilities to use OllamaCapabilities
|
|
16
|
+
this.capabilities = {
|
|
17
|
+
supportsVision: (modelId) => OllamaCapabilities.supportsVision(modelId),
|
|
18
|
+
supportsTools: (modelId) => OllamaCapabilities.supportsTools(modelId),
|
|
19
|
+
supportsStructuredOutput: (modelId) => OllamaCapabilities.supportsStructuredOutput(modelId),
|
|
20
|
+
supportsEmbeddings: (modelId) => OllamaCapabilities.supportsEmbeddings(modelId),
|
|
21
|
+
supportsImageGeneration: (modelId) => OllamaCapabilities.supportsImageGeneration(modelId),
|
|
22
|
+
supportsTranscription: (modelId) => OllamaCapabilities.supportsTranscription(modelId),
|
|
23
|
+
supportsModeration: (modelId) => OllamaCapabilities.supportsModeration(modelId),
|
|
24
|
+
supportsReasoning: (modelId) => OllamaCapabilities.supportsReasoning(modelId),
|
|
25
|
+
getContextWindow: (modelId) => OllamaCapabilities.getContextWindow(modelId),
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
providerName() {
|
|
29
|
+
return "Ollama";
|
|
30
|
+
}
|
|
31
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { OllamaProvider } from "./OllamaProvider.js";
|
|
2
|
+
export { OllamaProvider };
|
|
3
|
+
/**
|
|
4
|
+
* Idempotent registration of the Ollama provider.
|
|
5
|
+
* Automatically called by LLM.configure({ provider: 'ollama' })
|
|
6
|
+
*/
|
|
7
|
+
export declare function registerOllamaProvider(): void;
|
|
8
|
+
export declare const ensureOllamaRegistered: typeof registerOllamaProvider;
|
|
9
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/providers/ollama/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AAErD,OAAO,EAAE,cAAc,EAAE,CAAC;AAI1B;;;GAGG;AACH,wBAAgB,sBAAsB,SAQrC;AAED,eAAO,MAAM,sBAAsB,+BAAyB,CAAC"}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { providerRegistry } from "../registry.js";
|
|
2
|
+
import { OllamaProvider } from "./OllamaProvider.js";
|
|
3
|
+
export { OllamaProvider };
|
|
4
|
+
let registered = false;
|
|
5
|
+
/**
|
|
6
|
+
* Idempotent registration of the Ollama provider.
|
|
7
|
+
* Automatically called by LLM.configure({ provider: 'ollama' })
|
|
8
|
+
*/
|
|
9
|
+
export function registerOllamaProvider() {
|
|
10
|
+
if (registered)
|
|
11
|
+
return;
|
|
12
|
+
providerRegistry.register("ollama", () => {
|
|
13
|
+
return new OllamaProvider();
|
|
14
|
+
});
|
|
15
|
+
registered = true;
|
|
16
|
+
}
|
|
17
|
+
export const ensureOllamaRegistered = registerOllamaProvider;
|
|
@@ -10,7 +10,7 @@ export declare class Capabilities {
|
|
|
10
10
|
static supportsTranscription(modelId: string): boolean;
|
|
11
11
|
static supportsModeration(modelId: string): boolean;
|
|
12
12
|
static supportsReasoning(modelId: string): boolean;
|
|
13
|
-
static getModelType(modelId: string): "
|
|
13
|
+
static getModelType(modelId: string): "embeddings" | "audio" | "moderation" | "image" | "chat" | "audio_transcription" | "audio_speech";
|
|
14
14
|
static getModalities(modelId: string): {
|
|
15
15
|
input: string[];
|
|
16
16
|
output: string[];
|