@node-llm/core 0.6.0 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +89 -458
- package/dist/chat/Chat.d.ts.map +1 -1
- package/dist/chat/Chat.js +5 -3
- package/dist/chat/ChatResponse.d.ts +2 -1
- package/dist/chat/ChatResponse.d.ts.map +1 -1
- package/dist/chat/ChatResponse.js +3 -1
- package/dist/chat/Stream.d.ts.map +1 -1
- package/dist/chat/Stream.js +7 -1
- package/dist/config.d.ts +31 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +12 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +2 -0
- package/dist/llm.d.ts +16 -9
- package/dist/llm.d.ts.map +1 -1
- package/dist/llm.js +41 -20
- package/dist/models/ModelRegistry.d.ts +0 -3
- package/dist/models/ModelRegistry.d.ts.map +1 -1
- package/dist/models/ModelRegistry.js +0 -3
- package/dist/models/models.d.ts.map +1 -1
- package/dist/models/models.js +386 -0
- package/dist/models/types.d.ts +2 -2
- package/dist/models/types.d.ts.map +1 -1
- package/dist/providers/Provider.d.ts +3 -0
- package/dist/providers/Provider.d.ts.map +1 -1
- package/dist/providers/anthropic/AnthropicProvider.d.ts +1 -0
- package/dist/providers/anthropic/AnthropicProvider.d.ts.map +1 -1
- package/dist/providers/anthropic/AnthropicProvider.js +1 -0
- package/dist/providers/anthropic/index.d.ts.map +1 -1
- package/dist/providers/anthropic/index.js +3 -2
- package/dist/providers/deepseek/Capabilities.d.ts +14 -0
- package/dist/providers/deepseek/Capabilities.d.ts.map +1 -0
- package/dist/providers/deepseek/Capabilities.js +52 -0
- package/dist/providers/deepseek/Chat.d.ts +8 -0
- package/dist/providers/deepseek/Chat.d.ts.map +1 -0
- package/dist/providers/deepseek/Chat.js +89 -0
- package/dist/providers/deepseek/DeepSeekProvider.d.ts +28 -0
- package/dist/providers/deepseek/DeepSeekProvider.d.ts.map +1 -0
- package/dist/providers/deepseek/DeepSeekProvider.js +38 -0
- package/dist/providers/deepseek/Models.d.ts +8 -0
- package/dist/providers/deepseek/Models.d.ts.map +1 -0
- package/dist/providers/deepseek/Models.js +67 -0
- package/dist/providers/deepseek/Streaming.d.ts +8 -0
- package/dist/providers/deepseek/Streaming.d.ts.map +1 -0
- package/dist/providers/deepseek/Streaming.js +74 -0
- package/dist/providers/deepseek/index.d.ts +7 -0
- package/dist/providers/deepseek/index.d.ts.map +1 -0
- package/dist/providers/deepseek/index.js +22 -0
- package/dist/providers/gemini/Capabilities.d.ts.map +1 -1
- package/dist/providers/gemini/GeminiProvider.d.ts +1 -0
- package/dist/providers/gemini/GeminiProvider.d.ts.map +1 -1
- package/dist/providers/gemini/GeminiProvider.js +1 -0
- package/dist/providers/gemini/index.d.ts.map +1 -1
- package/dist/providers/gemini/index.js +3 -2
- package/dist/providers/ollama/Capabilities.d.ts +13 -0
- package/dist/providers/ollama/Capabilities.d.ts.map +1 -0
- package/dist/providers/ollama/Capabilities.js +50 -0
- package/dist/providers/ollama/Embedding.d.ts +6 -0
- package/dist/providers/ollama/Embedding.d.ts.map +1 -0
- package/dist/providers/ollama/Embedding.js +12 -0
- package/dist/providers/ollama/Models.d.ts +8 -0
- package/dist/providers/ollama/Models.d.ts.map +1 -0
- package/dist/providers/ollama/Models.js +31 -0
- package/dist/providers/ollama/OllamaProvider.d.ts +8 -0
- package/dist/providers/ollama/OllamaProvider.d.ts.map +1 -0
- package/dist/providers/ollama/OllamaProvider.js +28 -0
- package/dist/providers/ollama/index.d.ts +9 -0
- package/dist/providers/ollama/index.d.ts.map +1 -0
- package/dist/providers/ollama/index.js +17 -0
- package/dist/providers/openai/Capabilities.d.ts +2 -1
- package/dist/providers/openai/Capabilities.d.ts.map +1 -1
- package/dist/providers/openai/Capabilities.js +10 -2
- package/dist/providers/openai/Embedding.d.ts +4 -2
- package/dist/providers/openai/Embedding.d.ts.map +1 -1
- package/dist/providers/openai/Embedding.js +13 -8
- package/dist/providers/openai/Models.d.ts +12 -2
- package/dist/providers/openai/Models.d.ts.map +1 -1
- package/dist/providers/openai/Models.js +50 -16
- package/dist/providers/openai/OpenAIProvider.d.ts +17 -9
- package/dist/providers/openai/OpenAIProvider.d.ts.map +1 -1
- package/dist/providers/openai/OpenAIProvider.js +2 -1
- package/dist/providers/openai/index.d.ts.map +1 -1
- package/dist/providers/openai/index.js +4 -3
- package/package.json +1 -1
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import { ModelRegistry } from "../../models/ModelRegistry.js";
|
|
2
|
+
export class DeepSeekChat {
|
|
3
|
+
baseUrl;
|
|
4
|
+
apiKey;
|
|
5
|
+
constructor(baseUrl, apiKey) {
|
|
6
|
+
this.baseUrl = baseUrl;
|
|
7
|
+
this.apiKey = apiKey;
|
|
8
|
+
}
|
|
9
|
+
async execute(request) {
|
|
10
|
+
const { model, messages, tools, max_tokens, response_format, headers, ...rest } = request;
|
|
11
|
+
const body = {
|
|
12
|
+
model,
|
|
13
|
+
messages,
|
|
14
|
+
...rest
|
|
15
|
+
};
|
|
16
|
+
if (max_tokens)
|
|
17
|
+
body.max_tokens = max_tokens;
|
|
18
|
+
if (tools && tools.length > 0)
|
|
19
|
+
body.tools = tools;
|
|
20
|
+
if (max_tokens)
|
|
21
|
+
body.max_tokens = max_tokens;
|
|
22
|
+
if (tools && tools.length > 0)
|
|
23
|
+
body.tools = tools;
|
|
24
|
+
// Handle structured output for DeepSeek
|
|
25
|
+
if (response_format) {
|
|
26
|
+
if (response_format.type === "json_schema") {
|
|
27
|
+
body.response_format = { type: "json_object" };
|
|
28
|
+
// Append schema instructions to the system prompt or convert to a new system message
|
|
29
|
+
const schema = response_format.json_schema;
|
|
30
|
+
const schemaString = JSON.stringify(schema.schema, null, 2);
|
|
31
|
+
const instruction = `\n\nIMPORTANT: You must output strictly valid JSON conforming to the following schema:\n${schemaString}\n\nOutput only the JSON object.`;
|
|
32
|
+
// Find system message or append to last user message
|
|
33
|
+
const systemMessage = body.messages.find((m) => m.role === "system");
|
|
34
|
+
if (systemMessage) {
|
|
35
|
+
systemMessage.content += instruction;
|
|
36
|
+
}
|
|
37
|
+
else {
|
|
38
|
+
// Insert system message at the beginning
|
|
39
|
+
body.messages.unshift({
|
|
40
|
+
role: "system",
|
|
41
|
+
content: "You are a helpful assistant." + instruction
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
else {
|
|
46
|
+
body.response_format = response_format;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
if (process.env.NODELLM_DEBUG === "true") {
|
|
50
|
+
console.log(`[DeepSeek Request] ${JSON.stringify(body, null, 2)}`);
|
|
51
|
+
}
|
|
52
|
+
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
53
|
+
method: "POST",
|
|
54
|
+
headers: {
|
|
55
|
+
"Authorization": `Bearer ${this.apiKey}`,
|
|
56
|
+
"Content-Type": "application/json",
|
|
57
|
+
...request.headers,
|
|
58
|
+
},
|
|
59
|
+
body: JSON.stringify(body),
|
|
60
|
+
});
|
|
61
|
+
if (!response.ok) {
|
|
62
|
+
const errorText = await response.text();
|
|
63
|
+
throw new Error(`DeepSeek API error: ${response.status} - ${errorText}`);
|
|
64
|
+
}
|
|
65
|
+
const json = (await response.json());
|
|
66
|
+
const message = json.choices[0]?.message;
|
|
67
|
+
const content = message?.content ?? null;
|
|
68
|
+
const reasoning = message?.reasoning_content ?? null;
|
|
69
|
+
const usage = {
|
|
70
|
+
input_tokens: json.usage.prompt_tokens,
|
|
71
|
+
output_tokens: json.usage.completion_tokens,
|
|
72
|
+
total_tokens: json.usage.total_tokens,
|
|
73
|
+
};
|
|
74
|
+
const toolCalls = message?.tool_calls?.map((tc) => ({
|
|
75
|
+
id: tc.id,
|
|
76
|
+
type: tc.type,
|
|
77
|
+
function: {
|
|
78
|
+
name: tc.function.name,
|
|
79
|
+
arguments: tc.function.arguments
|
|
80
|
+
}
|
|
81
|
+
}));
|
|
82
|
+
if (!content && !reasoning && (!toolCalls || toolCalls.length === 0)) {
|
|
83
|
+
throw new Error("DeepSeek returned empty response");
|
|
84
|
+
}
|
|
85
|
+
// deepseek cost calculation if needed, otherwise just return usage
|
|
86
|
+
const calculatedUsage = ModelRegistry.calculateCost(usage, model, "deepseek");
|
|
87
|
+
return { content, reasoning, usage: calculatedUsage, tool_calls: toolCalls };
|
|
88
|
+
}
|
|
89
|
+
}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { Provider, ChatRequest, ChatResponse, ModelInfo, ChatChunk } from "../Provider.js";
|
|
2
|
+
export interface DeepSeekProviderOptions {
|
|
3
|
+
apiKey: string;
|
|
4
|
+
baseUrl?: string;
|
|
5
|
+
}
|
|
6
|
+
export declare class DeepSeekProvider implements Provider {
|
|
7
|
+
private readonly options;
|
|
8
|
+
private readonly baseUrl;
|
|
9
|
+
private readonly chatHandler;
|
|
10
|
+
private readonly streamingHandler;
|
|
11
|
+
private readonly modelsHandler;
|
|
12
|
+
capabilities: {
|
|
13
|
+
supportsVision: (model: string) => boolean;
|
|
14
|
+
supportsTools: (model: string) => boolean;
|
|
15
|
+
supportsStructuredOutput: (model: string) => boolean;
|
|
16
|
+
supportsEmbeddings: (model: string) => boolean;
|
|
17
|
+
supportsImageGeneration: (model: string) => boolean;
|
|
18
|
+
supportsTranscription: (model: string) => boolean;
|
|
19
|
+
supportsModeration: (model: string) => boolean;
|
|
20
|
+
supportsReasoning: (model: string) => boolean;
|
|
21
|
+
getContextWindow: (model: string) => number | null;
|
|
22
|
+
};
|
|
23
|
+
constructor(options: DeepSeekProviderOptions);
|
|
24
|
+
chat(request: ChatRequest): Promise<ChatResponse>;
|
|
25
|
+
stream(request: ChatRequest): AsyncGenerator<ChatChunk>;
|
|
26
|
+
listModels(): Promise<ModelInfo[]>;
|
|
27
|
+
}
|
|
28
|
+
//# sourceMappingURL=DeepSeekProvider.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"DeepSeekProvider.d.ts","sourceRoot":"","sources":["../../../src/providers/deepseek/DeepSeekProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,WAAW,EAAE,YAAY,EAAE,SAAS,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAM3F,MAAM,WAAW,uBAAuB;IACtC,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,qBAAa,gBAAiB,YAAW,QAAQ;IAkBnC,OAAO,CAAC,QAAQ,CAAC,OAAO;IAjBpC,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAS;IACjC,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAe;IAC3C,OAAO,CAAC,QAAQ,CAAC,gBAAgB,CAAoB;IACrD,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAiB;IAExC,YAAY;gCACO,MAAM;+BACP,MAAM;0CACK,MAAM;oCACZ,MAAM;yCACD,MAAM;uCACR,MAAM;oCACT,MAAM;mCACP,MAAM;kCACP,MAAM;MAChC;gBAE2B,OAAO,EAAE,uBAAuB;IAOvD,IAAI,CAAC,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;IAIhD,MAAM,CAAC,OAAO,EAAE,WAAW,GAAG,cAAc,CAAC,SAAS,CAAC;IAIxD,UAAU,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC;CAGzC"}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { DeepSeekChat } from "./Chat.js";
|
|
2
|
+
import { DeepSeekModels } from "./Models.js";
|
|
3
|
+
import { DeepSeekStreaming } from "./Streaming.js";
|
|
4
|
+
import { Capabilities } from "./Capabilities.js";
|
|
5
|
+
export class DeepSeekProvider {
|
|
6
|
+
options;
|
|
7
|
+
baseUrl;
|
|
8
|
+
chatHandler;
|
|
9
|
+
streamingHandler;
|
|
10
|
+
modelsHandler;
|
|
11
|
+
capabilities = {
|
|
12
|
+
supportsVision: (model) => Capabilities.supportsVision(model),
|
|
13
|
+
supportsTools: (model) => Capabilities.supportsTools(model),
|
|
14
|
+
supportsStructuredOutput: (model) => Capabilities.supportsStructuredOutput(model),
|
|
15
|
+
supportsEmbeddings: (model) => Capabilities.supportsEmbeddings(model),
|
|
16
|
+
supportsImageGeneration: (model) => Capabilities.supportsImageGeneration(model),
|
|
17
|
+
supportsTranscription: (model) => Capabilities.supportsTranscription(model),
|
|
18
|
+
supportsModeration: (model) => Capabilities.supportsModeration(model),
|
|
19
|
+
supportsReasoning: (model) => Capabilities.supportsReasoning(model),
|
|
20
|
+
getContextWindow: (model) => Capabilities.getContextWindow(model),
|
|
21
|
+
};
|
|
22
|
+
constructor(options) {
|
|
23
|
+
this.options = options;
|
|
24
|
+
this.baseUrl = options.baseUrl ?? "https://api.deepseek.com";
|
|
25
|
+
this.chatHandler = new DeepSeekChat(this.baseUrl, options.apiKey);
|
|
26
|
+
this.streamingHandler = new DeepSeekStreaming(this.baseUrl, options.apiKey);
|
|
27
|
+
this.modelsHandler = new DeepSeekModels(this.baseUrl, options.apiKey);
|
|
28
|
+
}
|
|
29
|
+
async chat(request) {
|
|
30
|
+
return this.chatHandler.execute(request);
|
|
31
|
+
}
|
|
32
|
+
async *stream(request) {
|
|
33
|
+
yield* this.streamingHandler.execute(request);
|
|
34
|
+
}
|
|
35
|
+
async listModels() {
|
|
36
|
+
return this.modelsHandler.execute();
|
|
37
|
+
}
|
|
38
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Models.d.ts","sourceRoot":"","sources":["../../../src/providers/deepseek/Models.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAe3C,qBAAa,cAAc;IACb,OAAO,CAAC,QAAQ,CAAC,OAAO;IAAU,OAAO,CAAC,QAAQ,CAAC,MAAM;gBAAxC,OAAO,EAAE,MAAM,EAAmB,MAAM,EAAE,MAAM;IAEvE,OAAO,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC;CA+DtC"}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import { Capabilities } from "./Capabilities.js";
|
|
2
|
+
import { ModelRegistry } from "../../models/ModelRegistry.js";
|
|
3
|
+
export class DeepSeekModels {
|
|
4
|
+
baseUrl;
|
|
5
|
+
apiKey;
|
|
6
|
+
constructor(baseUrl, apiKey) {
|
|
7
|
+
this.baseUrl = baseUrl;
|
|
8
|
+
this.apiKey = apiKey;
|
|
9
|
+
}
|
|
10
|
+
async execute() {
|
|
11
|
+
const response = await fetch(`${this.baseUrl}/models`, {
|
|
12
|
+
method: "GET",
|
|
13
|
+
headers: {
|
|
14
|
+
"Authorization": `Bearer ${this.apiKey}`,
|
|
15
|
+
},
|
|
16
|
+
});
|
|
17
|
+
if (!response.ok) {
|
|
18
|
+
// Fallback to local registry
|
|
19
|
+
const localModels = ModelRegistry.all()
|
|
20
|
+
.filter(m => m.provider === "deepseek")
|
|
21
|
+
.map(m => ({
|
|
22
|
+
id: m.id,
|
|
23
|
+
name: m.name,
|
|
24
|
+
provider: "deepseek",
|
|
25
|
+
family: m.family ?? "deepseek",
|
|
26
|
+
context_window: m.context_window ?? null,
|
|
27
|
+
max_output_tokens: m.max_output_tokens ?? null,
|
|
28
|
+
modalities: m.modalities,
|
|
29
|
+
capabilities: m.capabilities,
|
|
30
|
+
pricing: m.pricing,
|
|
31
|
+
metadata: m.metadata
|
|
32
|
+
}));
|
|
33
|
+
return localModels;
|
|
34
|
+
}
|
|
35
|
+
const json = (await response.json());
|
|
36
|
+
const localRegistry = ModelRegistry.all().filter(m => m.provider === "deepseek");
|
|
37
|
+
return json.data.map(m => {
|
|
38
|
+
// Try to find in local registry for enriched data (pricing, limits)
|
|
39
|
+
const local = localRegistry.find(l => l.id === m.id);
|
|
40
|
+
if (local) {
|
|
41
|
+
return {
|
|
42
|
+
id: local.id,
|
|
43
|
+
name: local.name,
|
|
44
|
+
provider: "deepseek", // Ensure literal type
|
|
45
|
+
family: local.family ?? "deepseek", // Handle missing family
|
|
46
|
+
context_window: local.context_window ?? null,
|
|
47
|
+
max_output_tokens: local.max_output_tokens ?? null,
|
|
48
|
+
modalities: local.modalities,
|
|
49
|
+
capabilities: local.capabilities,
|
|
50
|
+
pricing: local.pricing,
|
|
51
|
+
metadata: local.metadata
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
return {
|
|
55
|
+
id: m.id,
|
|
56
|
+
name: m.id,
|
|
57
|
+
provider: "deepseek",
|
|
58
|
+
family: "deepseek",
|
|
59
|
+
context_window: Capabilities.getContextWindow(m.id),
|
|
60
|
+
max_output_tokens: Capabilities.getMaxOutputTokens(m.id),
|
|
61
|
+
modalities: { input: ["text"], output: ["text"] },
|
|
62
|
+
capabilities: Capabilities.getCapabilities(m.id),
|
|
63
|
+
pricing: {}
|
|
64
|
+
};
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { ChatRequest, ChatChunk } from "../Provider.js";
|
|
2
|
+
export declare class DeepSeekStreaming {
|
|
3
|
+
private readonly baseUrl;
|
|
4
|
+
private readonly apiKey;
|
|
5
|
+
constructor(baseUrl: string, apiKey: string);
|
|
6
|
+
execute(request: ChatRequest): AsyncGenerator<ChatChunk>;
|
|
7
|
+
}
|
|
8
|
+
//# sourceMappingURL=Streaming.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Streaming.d.ts","sourceRoot":"","sources":["../../../src/providers/deepseek/Streaming.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAExD,qBAAa,iBAAiB;IAChB,OAAO,CAAC,QAAQ,CAAC,OAAO;IAAU,OAAO,CAAC,QAAQ,CAAC,MAAM;gBAAxC,OAAO,EAAE,MAAM,EAAmB,MAAM,EAAE,MAAM;IAEtE,OAAO,CAAC,OAAO,EAAE,WAAW,GAAG,cAAc,CAAC,SAAS,CAAC;CAwEhE"}
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
export class DeepSeekStreaming {
|
|
2
|
+
baseUrl;
|
|
3
|
+
apiKey;
|
|
4
|
+
constructor(baseUrl, apiKey) {
|
|
5
|
+
this.baseUrl = baseUrl;
|
|
6
|
+
this.apiKey = apiKey;
|
|
7
|
+
}
|
|
8
|
+
async *execute(request) {
|
|
9
|
+
const { model, messages, tools, max_tokens, response_format, headers, ...rest } = request;
|
|
10
|
+
const body = {
|
|
11
|
+
model,
|
|
12
|
+
messages,
|
|
13
|
+
stream: true,
|
|
14
|
+
...rest
|
|
15
|
+
};
|
|
16
|
+
if (max_tokens)
|
|
17
|
+
body.max_tokens = max_tokens;
|
|
18
|
+
if (tools && tools.length > 0)
|
|
19
|
+
body.tools = tools;
|
|
20
|
+
if (response_format)
|
|
21
|
+
body.response_format = response_format;
|
|
22
|
+
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
23
|
+
method: "POST",
|
|
24
|
+
headers: {
|
|
25
|
+
"Authorization": `Bearer ${this.apiKey}`,
|
|
26
|
+
"Content-Type": "application/json",
|
|
27
|
+
...request.headers,
|
|
28
|
+
},
|
|
29
|
+
body: JSON.stringify(body),
|
|
30
|
+
});
|
|
31
|
+
if (!response.ok) {
|
|
32
|
+
const errorText = await response.text();
|
|
33
|
+
throw new Error(`DeepSeek API error: ${response.status} - ${errorText}`);
|
|
34
|
+
}
|
|
35
|
+
if (!response.body) {
|
|
36
|
+
throw new Error("No response body for streaming");
|
|
37
|
+
}
|
|
38
|
+
// @ts-ignore
|
|
39
|
+
const reader = response.body.getReader();
|
|
40
|
+
const decoder = new TextDecoder();
|
|
41
|
+
let buffer = "";
|
|
42
|
+
while (true) {
|
|
43
|
+
const { value, done } = await reader.read();
|
|
44
|
+
if (done)
|
|
45
|
+
break;
|
|
46
|
+
const chunk = decoder.decode(value, { stream: true });
|
|
47
|
+
buffer += chunk;
|
|
48
|
+
const lines = buffer.split("\n\n");
|
|
49
|
+
buffer = lines.pop() || "";
|
|
50
|
+
for (const line of lines) {
|
|
51
|
+
const trimmed = line.trim();
|
|
52
|
+
if (!trimmed.startsWith("data: "))
|
|
53
|
+
continue;
|
|
54
|
+
const data = trimmed.replace("data: ", "").trim();
|
|
55
|
+
if (data === "[DONE]")
|
|
56
|
+
return;
|
|
57
|
+
try {
|
|
58
|
+
const json = JSON.parse(data);
|
|
59
|
+
const deltaContent = json.choices?.[0]?.delta?.content;
|
|
60
|
+
const deltaReasoning = json.choices?.[0]?.delta?.reasoning_content;
|
|
61
|
+
if (deltaContent || deltaReasoning) {
|
|
62
|
+
yield {
|
|
63
|
+
content: deltaContent || "",
|
|
64
|
+
reasoning: deltaReasoning || ""
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
catch (e) {
|
|
69
|
+
// ignore
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
export * from "./DeepSeekProvider.js";
|
|
2
|
+
export * from "./Chat.js";
|
|
3
|
+
export * from "./Models.js";
|
|
4
|
+
export * from "./Capabilities.js";
|
|
5
|
+
export declare function registerDeepSeekProvider(): void;
|
|
6
|
+
export declare const ensureDeepSeekRegistered: typeof registerDeepSeekProvider;
|
|
7
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/providers/deepseek/index.ts"],"names":[],"mappings":"AAIA,cAAc,uBAAuB,CAAC;AACtC,cAAc,WAAW,CAAC;AAC1B,cAAc,aAAa,CAAC;AAC5B,cAAc,mBAAmB,CAAC;AAIlC,wBAAgB,wBAAwB,SAevC;AAED,eAAO,MAAM,wBAAwB,iCAA2B,CAAC"}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { config } from "../../config.js";
|
|
2
|
+
import { providerRegistry } from "../registry.js";
|
|
3
|
+
import { DeepSeekProvider } from "./DeepSeekProvider.js";
|
|
4
|
+
export * from "./DeepSeekProvider.js";
|
|
5
|
+
export * from "./Chat.js";
|
|
6
|
+
export * from "./Models.js";
|
|
7
|
+
export * from "./Capabilities.js";
|
|
8
|
+
let registered = false;
|
|
9
|
+
export function registerDeepSeekProvider() {
|
|
10
|
+
if (registered)
|
|
11
|
+
return;
|
|
12
|
+
providerRegistry.register("deepseek", () => {
|
|
13
|
+
const apiKey = config.deepseekApiKey;
|
|
14
|
+
const baseUrl = config.deepseekApiBase; // Optional override
|
|
15
|
+
if (!apiKey) {
|
|
16
|
+
throw new Error("deepseek_api_key is not set in config or DEEPSEEK_API_KEY environment variable");
|
|
17
|
+
}
|
|
18
|
+
return new DeepSeekProvider({ apiKey, baseUrl });
|
|
19
|
+
});
|
|
20
|
+
registered = true;
|
|
21
|
+
}
|
|
22
|
+
export const ensureDeepSeekRegistered = registerDeepSeekProvider;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Capabilities.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/Capabilities.ts"],"names":[],"mappings":"AAEA,qBAAa,YAAY;IACvB,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAuBvD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAoBzD,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAW/C,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAW9C,MAAM,CAAC,wBAAwB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAWzD,MAAM,CAAC,0BAA0B,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAI3D,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAIjD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAQnD,MAAM,CAAC,uBAAuB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAQxD,MAAM,CAAC,qBAAqB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAQtD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAInD,MAAM,CAAC,oBAAoB,CAAC,WAAW,EAAE,MAAM,GAAG,SAAS,EAAE,KAAK,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAI/F,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG;QAAE,KAAK,EAAE,MAAM,EAAE,CAAC;QAAC,MAAM,EAAE,MAAM,EAAE,CAAA;KAAE;IAY5E,MAAM,CAAC,eAAe,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE;IAUjD,MAAM,CAAC,UAAU,CAAC,OAAO,EAAE,MAAM;;
|
|
1
|
+
{"version":3,"file":"Capabilities.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/Capabilities.ts"],"names":[],"mappings":"AAEA,qBAAa,YAAY;IACvB,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAuBvD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAoBzD,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAW/C,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAW9C,MAAM,CAAC,wBAAwB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAWzD,MAAM,CAAC,0BAA0B,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAI3D,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAIjD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAQnD,MAAM,CAAC,uBAAuB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAQxD,MAAM,CAAC,qBAAqB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAQtD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAInD,MAAM,CAAC,oBAAoB,CAAC,WAAW,EAAE,MAAM,GAAG,SAAS,EAAE,KAAK,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAI/F,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG;QAAE,KAAK,EAAE,MAAM,EAAE,CAAC;QAAC,MAAM,EAAE,MAAM,EAAE,CAAA;KAAE;IAY5E,MAAM,CAAC,eAAe,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE;IAUjD,MAAM,CAAC,UAAU,CAAC,OAAO,EAAE,MAAM;;oBArI2D,CAAC;iCAA6B,CAAC;wCAGrH,CAAC;kCACH,CAAC;4CAEgB,CAAC;;iBACd,CAAC;iCAEG,CAAC;kCAAoC,CAAC;;;;oBAG3B,CAAC;qBACV,CAAC;sBAEF,CAAC;;iBAGM,CAAC;qBAAiB,CAAC;sBACrC,CAAA;;;;oBAA6D,CAAC;iCACxC,CAAC;kCAEpB,CAAC;;;;IA4IN,OAAO,CAAC,MAAM,CAAC,gBAAgB;CAGhC"}
|
|
@@ -21,6 +21,7 @@ export declare class GeminiProvider implements Provider {
|
|
|
21
21
|
supportsImageGeneration: (model: string) => boolean;
|
|
22
22
|
supportsTranscription: (model: string) => boolean;
|
|
23
23
|
supportsModeration: (model: string) => boolean;
|
|
24
|
+
supportsReasoning: (_model: string) => boolean;
|
|
24
25
|
getContextWindow: (model: string) => number | null;
|
|
25
26
|
};
|
|
26
27
|
constructor(options: GeminiProviderOptions);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"GeminiProvider.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/GeminiProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,QAAQ,EACR,WAAW,EACX,YAAY,EACZ,SAAS,EACT,SAAS,EACT,YAAY,EACZ,aAAa,EACb,oBAAoB,EACpB,qBAAqB,EACrB,iBAAiB,EACjB,kBAAkB,EACnB,MAAM,gBAAgB,CAAC;AAQxB,OAAO,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAEtE,MAAM,WAAW,qBAAqB;IACpC,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,qBAAa,cAAe,YAAW,QAAQ;
|
|
1
|
+
{"version":3,"file":"GeminiProvider.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/GeminiProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,QAAQ,EACR,WAAW,EACX,YAAY,EACZ,SAAS,EACT,SAAS,EACT,YAAY,EACZ,aAAa,EACb,oBAAoB,EACpB,qBAAqB,EACrB,iBAAiB,EACjB,kBAAkB,EACnB,MAAM,gBAAgB,CAAC;AAQxB,OAAO,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAEtE,MAAM,WAAW,qBAAqB;IACpC,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,qBAAa,cAAe,YAAW,QAAQ;IAqBjC,OAAO,CAAC,QAAQ,CAAC,OAAO;IApBpC,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAS;IACjC,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAa;IACzC,OAAO,CAAC,QAAQ,CAAC,gBAAgB,CAAkB;IACnD,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAe;IAC7C,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAc;IAC3C,OAAO,CAAC,QAAQ,CAAC,gBAAgB,CAAmB;IACpD,OAAO,CAAC,QAAQ,CAAC,oBAAoB,CAAsB;IAEpD,YAAY;gCACO,MAAM;+BACP,MAAM;0CACK,MAAM;oCACZ,MAAM;yCACD,MAAM;uCACR,MAAM;oCACT,MAAM;oCACN,MAAM;kCACR,MAAM;MAChC;gBAE2B,OAAO,EAAE,qBAAqB;IAUrD,IAAI,CAAC,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;IAIhD,MAAM,CAAC,OAAO,EAAE,WAAW,GAAG,cAAc,CAAC,SAAS,CAAC;IAIxD,UAAU,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC;IAIlC,KAAK,CAAC,OAAO,EAAE,YAAY,GAAG,OAAO,CAAC,aAAa,CAAC;IAIpD,KAAK,CAAC,OAAO,EAAE,gBAAgB,GAAG,OAAO,CAAC,iBAAiB,CAAC;IAI5D,UAAU,CAAC,OAAO,EAAE,oBAAoB,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAIzE,QAAQ,CAAC,QAAQ,EAAE,iBAAiB,GAAG,OAAO,CAAC,kBAAkB,CAAC;CAGzE"}
|
|
@@ -22,6 +22,7 @@ export class GeminiProvider {
|
|
|
22
22
|
supportsImageGeneration: (model) => Capabilities.supportsImageGeneration(model),
|
|
23
23
|
supportsTranscription: (model) => Capabilities.supportsTranscription(model),
|
|
24
24
|
supportsModeration: (model) => Capabilities.supportsModeration(model),
|
|
25
|
+
supportsReasoning: (_model) => false,
|
|
25
26
|
getContextWindow: (model) => Capabilities.getContextWindow(model),
|
|
26
27
|
};
|
|
27
28
|
constructor(options) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/index.ts"],"names":[],"mappings":"AAMA;;;GAGG;AACH,wBAAgB,sBAAsB,SAcrC;AAED;;GAEG;AACH,eAAO,MAAM,sBAAsB,+BAAyB,CAAC;AAE7D,cAAc,qBAAqB,CAAC"}
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { config } from "../../config.js";
|
|
1
2
|
import { providerRegistry } from "../registry.js";
|
|
2
3
|
import { GeminiProvider } from "./GeminiProvider.js";
|
|
3
4
|
let registered = false;
|
|
@@ -9,9 +10,9 @@ export function registerGeminiProvider() {
|
|
|
9
10
|
if (registered)
|
|
10
11
|
return;
|
|
11
12
|
providerRegistry.register("gemini", () => {
|
|
12
|
-
const apiKey =
|
|
13
|
+
const apiKey = config.geminiApiKey;
|
|
13
14
|
if (!apiKey) {
|
|
14
|
-
throw new Error("
|
|
15
|
+
throw new Error("geminiApiKey is not set in config or GEMINI_API_KEY environment variable");
|
|
15
16
|
}
|
|
16
17
|
return new GeminiProvider({ apiKey });
|
|
17
18
|
});
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export declare class OllamaCapabilities {
|
|
2
|
+
static findModel(modelId: string): import("../../models/types.js").Model | undefined;
|
|
3
|
+
static getContextWindow(modelId: string): number | null;
|
|
4
|
+
static supportsVision(modelId: string): boolean;
|
|
5
|
+
static supportsTools(modelId: string): boolean;
|
|
6
|
+
static supportsStructuredOutput(modelId: string): boolean;
|
|
7
|
+
static supportsEmbeddings(modelId: string): boolean;
|
|
8
|
+
static supportsReasoning(modelId: string): boolean;
|
|
9
|
+
static supportsImageGeneration(modelId: string): boolean;
|
|
10
|
+
static supportsTranscription(modelId: string): boolean;
|
|
11
|
+
static supportsModeration(modelId: string): boolean;
|
|
12
|
+
}
|
|
13
|
+
//# sourceMappingURL=Capabilities.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Capabilities.d.ts","sourceRoot":"","sources":["../../../src/providers/ollama/Capabilities.ts"],"names":[],"mappings":"AAEA,qBAAa,kBAAkB;IAC7B,MAAM,CAAC,SAAS,CAAC,OAAO,EAAE,MAAM;IAYhC,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAKvD,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAK/C,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAK9C,MAAM,CAAC,wBAAwB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAKzD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAKnD,MAAM,CAAC,iBAAiB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAKlD,MAAM,CAAC,uBAAuB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAKxD,MAAM,CAAC,qBAAqB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAKtD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;CAIpD"}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import { ModelRegistry } from "../../models/ModelRegistry.js";
|
|
2
|
+
export class OllamaCapabilities {
|
|
3
|
+
static findModel(modelId) {
|
|
4
|
+
// Ollama specific: try exact match first, then strip tags
|
|
5
|
+
let model = ModelRegistry.find(modelId, "ollama");
|
|
6
|
+
if (!model && modelId?.includes(":")) {
|
|
7
|
+
const baseId = modelId.split(":")[0];
|
|
8
|
+
if (baseId) {
|
|
9
|
+
model = ModelRegistry.find(baseId, "ollama");
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
return model;
|
|
13
|
+
}
|
|
14
|
+
static getContextWindow(modelId) {
|
|
15
|
+
const model = this.findModel(modelId);
|
|
16
|
+
return model?.context_window || 8192;
|
|
17
|
+
}
|
|
18
|
+
static supportsVision(modelId) {
|
|
19
|
+
const model = this.findModel(modelId);
|
|
20
|
+
return model?.modalities?.input?.includes("image") || model?.capabilities?.includes("vision") || false;
|
|
21
|
+
}
|
|
22
|
+
static supportsTools(modelId) {
|
|
23
|
+
const model = this.findModel(modelId);
|
|
24
|
+
return model?.capabilities?.includes("tools") || false;
|
|
25
|
+
}
|
|
26
|
+
static supportsStructuredOutput(modelId) {
|
|
27
|
+
const model = this.findModel(modelId);
|
|
28
|
+
return model?.capabilities?.includes("structured_output") || false;
|
|
29
|
+
}
|
|
30
|
+
static supportsEmbeddings(modelId) {
|
|
31
|
+
const model = this.findModel(modelId);
|
|
32
|
+
return model?.modalities?.output?.includes("embeddings") || model?.capabilities?.includes("embeddings") || false;
|
|
33
|
+
}
|
|
34
|
+
static supportsReasoning(modelId) {
|
|
35
|
+
const model = this.findModel(modelId);
|
|
36
|
+
return model?.capabilities?.includes("reasoning") || false;
|
|
37
|
+
}
|
|
38
|
+
static supportsImageGeneration(modelId) {
|
|
39
|
+
const model = this.findModel(modelId);
|
|
40
|
+
return model?.modalities?.output?.includes("image") || false;
|
|
41
|
+
}
|
|
42
|
+
static supportsTranscription(modelId) {
|
|
43
|
+
const model = this.findModel(modelId);
|
|
44
|
+
return model?.modalities?.input?.includes("audio") || false;
|
|
45
|
+
}
|
|
46
|
+
static supportsModeration(modelId) {
|
|
47
|
+
const model = this.findModel(modelId);
|
|
48
|
+
return model?.modalities?.output?.includes("moderation") || false;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Embedding.d.ts","sourceRoot":"","sources":["../../../src/providers/ollama/Embedding.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,wBAAwB,CAAC;AAGzD,qBAAa,eAAgB,SAAQ,eAAe;cAC/B,eAAe,IAAI,MAAM;cAIzB,aAAa,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;CAKtD"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { OpenAIEmbedding } from "../openai/Embedding.js";
|
|
2
|
+
import { OllamaCapabilities } from "./Capabilities.js";
|
|
3
|
+
export class OllamaEmbedding extends OpenAIEmbedding {
|
|
4
|
+
getProviderName() {
|
|
5
|
+
return "ollama";
|
|
6
|
+
}
|
|
7
|
+
validateModel(model) {
|
|
8
|
+
if (!OllamaCapabilities.supportsEmbeddings(model)) {
|
|
9
|
+
throw new Error(`Model ${model} does not support embeddings.`);
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { OpenAIModels } from "../openai/Models.js";
|
|
2
|
+
export declare class OllamaModels extends OpenAIModels {
|
|
3
|
+
protected getProviderName(): string;
|
|
4
|
+
protected formatDisplayName(modelId: string): string;
|
|
5
|
+
protected getContextWindow(modelId: string): number | null;
|
|
6
|
+
protected getCapabilities(modelId: string): string[];
|
|
7
|
+
}
|
|
8
|
+
//# sourceMappingURL=Models.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Models.d.ts","sourceRoot":"","sources":["../../../src/providers/ollama/Models.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAC;AAInD,qBAAa,YAAa,SAAQ,YAAY;cACzB,eAAe,IAAI,MAAM;cAIzB,iBAAiB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM;cAO1C,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;cAIhD,eAAe,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE;CAW9D"}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { OpenAIModels } from "../openai/Models.js";
|
|
2
|
+
import { OllamaCapabilities } from "./Capabilities.js";
|
|
3
|
+
export class OllamaModels extends OpenAIModels {
|
|
4
|
+
getProviderName() {
|
|
5
|
+
return "ollama";
|
|
6
|
+
}
|
|
7
|
+
formatDisplayName(modelId) {
|
|
8
|
+
const model = OllamaCapabilities.findModel(modelId);
|
|
9
|
+
if (model?.name && model.name !== modelId)
|
|
10
|
+
return model.name;
|
|
11
|
+
const baseId = modelId.split(":")[0] || modelId;
|
|
12
|
+
return baseId.replace(/-/g, " ").replace(/\b\w/g, c => c.toUpperCase());
|
|
13
|
+
}
|
|
14
|
+
getContextWindow(modelId) {
|
|
15
|
+
return OllamaCapabilities.getContextWindow(modelId);
|
|
16
|
+
}
|
|
17
|
+
getCapabilities(modelId) {
|
|
18
|
+
const model = OllamaCapabilities.findModel(modelId);
|
|
19
|
+
if (model)
|
|
20
|
+
return model.capabilities;
|
|
21
|
+
// Fallback for custom pulled models
|
|
22
|
+
const caps = ["streaming"];
|
|
23
|
+
if (OllamaCapabilities.supportsTools(modelId))
|
|
24
|
+
caps.push("tools");
|
|
25
|
+
if (OllamaCapabilities.supportsVision(modelId))
|
|
26
|
+
caps.push("vision");
|
|
27
|
+
if (OllamaCapabilities.supportsEmbeddings(modelId))
|
|
28
|
+
caps.push("embeddings");
|
|
29
|
+
return caps;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { OpenAIProvider } from "../openai/OpenAIProvider.js";
|
|
2
|
+
export interface OllamaProviderOptions {
|
|
3
|
+
baseUrl?: string;
|
|
4
|
+
}
|
|
5
|
+
export declare class OllamaProvider extends OpenAIProvider {
|
|
6
|
+
constructor(options?: OllamaProviderOptions);
|
|
7
|
+
}
|
|
8
|
+
//# sourceMappingURL=OllamaProvider.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"OllamaProvider.d.ts","sourceRoot":"","sources":["../../../src/providers/ollama/OllamaProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAM7D,MAAM,WAAW,qBAAqB;IACpC,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,qBAAa,cAAe,SAAQ,cAAc;gBACpC,OAAO,GAAE,qBAA0B;CAuBhD"}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { OpenAIProvider } from "../openai/OpenAIProvider.js";
|
|
2
|
+
import { config } from "../../config.js";
|
|
3
|
+
import { OllamaModels } from "./Models.js";
|
|
4
|
+
import { OllamaEmbedding } from "./Embedding.js";
|
|
5
|
+
import { OllamaCapabilities } from "./Capabilities.js";
|
|
6
|
+
export class OllamaProvider extends OpenAIProvider {
|
|
7
|
+
constructor(options = {}) {
|
|
8
|
+
super({
|
|
9
|
+
apiKey: "ollama",
|
|
10
|
+
baseUrl: options.baseUrl || config.ollamaApiBase || "http://localhost:11434/v1"
|
|
11
|
+
});
|
|
12
|
+
// Override handlers with Ollama-specific ones
|
|
13
|
+
this.modelsHandler = new OllamaModels(this.baseUrl, this.options.apiKey);
|
|
14
|
+
this.embeddingHandler = new OllamaEmbedding(this.baseUrl, this.options.apiKey);
|
|
15
|
+
// Override capabilities to use OllamaCapabilities
|
|
16
|
+
this.capabilities = {
|
|
17
|
+
supportsVision: (modelId) => OllamaCapabilities.supportsVision(modelId),
|
|
18
|
+
supportsTools: (modelId) => OllamaCapabilities.supportsTools(modelId),
|
|
19
|
+
supportsStructuredOutput: (modelId) => OllamaCapabilities.supportsStructuredOutput(modelId),
|
|
20
|
+
supportsEmbeddings: (modelId) => OllamaCapabilities.supportsEmbeddings(modelId),
|
|
21
|
+
supportsImageGeneration: (modelId) => OllamaCapabilities.supportsImageGeneration(modelId),
|
|
22
|
+
supportsTranscription: (modelId) => OllamaCapabilities.supportsTranscription(modelId),
|
|
23
|
+
supportsModeration: (modelId) => OllamaCapabilities.supportsModeration(modelId),
|
|
24
|
+
supportsReasoning: (modelId) => OllamaCapabilities.supportsReasoning(modelId),
|
|
25
|
+
getContextWindow: (modelId) => OllamaCapabilities.getContextWindow(modelId),
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { OllamaProvider } from "./OllamaProvider.js";
|
|
2
|
+
export { OllamaProvider };
|
|
3
|
+
/**
|
|
4
|
+
* Idempotent registration of the Ollama provider.
|
|
5
|
+
* Automatically called by LLM.configure({ provider: 'ollama' })
|
|
6
|
+
*/
|
|
7
|
+
export declare function registerOllamaProvider(): void;
|
|
8
|
+
export declare const ensureOllamaRegistered: typeof registerOllamaProvider;
|
|
9
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/providers/ollama/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AAErD,OAAO,EAAE,cAAc,EAAE,CAAC;AAI1B;;;GAGG;AACH,wBAAgB,sBAAsB,SAQrC;AAED,eAAO,MAAM,sBAAsB,+BAAyB,CAAC"}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { providerRegistry } from "../registry.js";
|
|
2
|
+
import { OllamaProvider } from "./OllamaProvider.js";
|
|
3
|
+
export { OllamaProvider };
|
|
4
|
+
let registered = false;
|
|
5
|
+
/**
|
|
6
|
+
* Idempotent registration of the Ollama provider.
|
|
7
|
+
* Automatically called by LLM.configure({ provider: 'ollama' })
|
|
8
|
+
*/
|
|
9
|
+
export function registerOllamaProvider() {
|
|
10
|
+
if (registered)
|
|
11
|
+
return;
|
|
12
|
+
providerRegistry.register("ollama", () => {
|
|
13
|
+
return new OllamaProvider();
|
|
14
|
+
});
|
|
15
|
+
registered = true;
|
|
16
|
+
}
|
|
17
|
+
export const ensureOllamaRegistered = registerOllamaProvider;
|