@node-llm/core 0.7.0 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -1
- package/dist/config.d.ts +2 -0
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +1 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -0
- package/dist/llm.d.ts +1 -0
- package/dist/llm.d.ts.map +1 -1
- package/dist/llm.js +4 -0
- package/dist/models/ModelRegistry.d.ts +0 -3
- package/dist/models/ModelRegistry.d.ts.map +1 -1
- package/dist/models/ModelRegistry.js +0 -3
- package/dist/models/models.d.ts.map +1 -1
- package/dist/models/models.js +336 -0
- package/dist/providers/ollama/Capabilities.d.ts +13 -0
- package/dist/providers/ollama/Capabilities.d.ts.map +1 -0
- package/dist/providers/ollama/Capabilities.js +50 -0
- package/dist/providers/ollama/Embedding.d.ts +6 -0
- package/dist/providers/ollama/Embedding.d.ts.map +1 -0
- package/dist/providers/ollama/Embedding.js +12 -0
- package/dist/providers/ollama/Models.d.ts +8 -0
- package/dist/providers/ollama/Models.d.ts.map +1 -0
- package/dist/providers/ollama/Models.js +31 -0
- package/dist/providers/ollama/OllamaProvider.d.ts +8 -0
- package/dist/providers/ollama/OllamaProvider.d.ts.map +1 -0
- package/dist/providers/ollama/OllamaProvider.js +28 -0
- package/dist/providers/ollama/index.d.ts +9 -0
- package/dist/providers/ollama/index.d.ts.map +1 -0
- package/dist/providers/ollama/index.js +17 -0
- package/dist/providers/openai/Capabilities.d.ts +1 -1
- package/dist/providers/openai/Capabilities.d.ts.map +1 -1
- package/dist/providers/openai/Capabilities.js +4 -2
- package/dist/providers/openai/Embedding.d.ts +4 -2
- package/dist/providers/openai/Embedding.d.ts.map +1 -1
- package/dist/providers/openai/Embedding.js +13 -8
- package/dist/providers/openai/Models.d.ts +12 -2
- package/dist/providers/openai/Models.d.ts.map +1 -1
- package/dist/providers/openai/Models.js +50 -16
- package/dist/providers/openai/OpenAIProvider.d.ts +16 -9
- package/dist/providers/openai/OpenAIProvider.d.ts.map +1 -1
- package/dist/providers/openai/OpenAIProvider.js +1 -1
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -25,6 +25,9 @@
|
|
|
25
25
|
|
|
26
26
|
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/deepseek-color.svg" height="28" />
|
|
27
27
|
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/deepseek-text.svg" height="20" />
|
|
28
|
+
|
|
29
|
+
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/ollama.svg" height="28" />
|
|
30
|
+
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/ollama-text.svg" height="18" />
|
|
28
31
|
</p>
|
|
29
32
|
|
|
30
33
|
<br/>
|
|
@@ -39,8 +42,12 @@ import { LLM } from "@node-llm/core";
|
|
|
39
42
|
// 1. Configure once
|
|
40
43
|
LLM.configure({ provider: "openai" });
|
|
41
44
|
|
|
42
|
-
// 2. Chat
|
|
45
|
+
// 2. Basic Chat
|
|
43
46
|
const chat = LLM.chat("gpt-4o");
|
|
47
|
+
const response = await chat.ask("Explain Node.js");
|
|
48
|
+
console.log(response.content);
|
|
49
|
+
|
|
50
|
+
// 3. Streaming
|
|
44
51
|
for await (const chunk of chat.stream("Explain Node.js")) {
|
|
45
52
|
process.stdout.write(chunk.content);
|
|
46
53
|
}
|
|
@@ -123,6 +130,7 @@ console.log(res.content); // Output the final answer
|
|
|
123
130
|
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/gemini-color.svg" height="18"> <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/gemini-text.svg" height="14"> | Chat, Streaming, Tools, Vision, Audio, Video, Embeddings |
|
|
124
131
|
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/anthropic-text.svg" height="12"> | Chat, Streaming, Tools, Vision, PDF Support, Structured Output |
|
|
125
132
|
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/deepseek-color.svg" height="18"> <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/deepseek-text.svg" height="14"> | Chat (V3), **Reasoning (R1)**, Tools, Streaming, Structured Output |
|
|
133
|
+
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/ollama.svg" height="18"> <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/ollama-text.svg" height="12"> | **Local Inference**, Chat, Streaming, Tools, Vision, Embeddings |
|
|
126
134
|
|
|
127
135
|
---
|
|
128
136
|
|
package/dist/config.d.ts
CHANGED
|
@@ -11,6 +11,7 @@ export interface NodeLLMConfig {
|
|
|
11
11
|
geminiApiBase?: string;
|
|
12
12
|
deepseekApiKey?: string;
|
|
13
13
|
deepseekApiBase?: string;
|
|
14
|
+
ollamaApiBase?: string;
|
|
14
15
|
[key: string]: any;
|
|
15
16
|
}
|
|
16
17
|
declare class Configuration implements NodeLLMConfig {
|
|
@@ -22,6 +23,7 @@ declare class Configuration implements NodeLLMConfig {
|
|
|
22
23
|
geminiApiBase?: string;
|
|
23
24
|
deepseekApiKey?: string;
|
|
24
25
|
deepseekApiBase?: string;
|
|
26
|
+
ollamaApiBase?: string;
|
|
25
27
|
[key: string]: any;
|
|
26
28
|
}
|
|
27
29
|
export declare const config: Configuration;
|
package/dist/config.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,MAAM,WAAW,aAAa;IAC5B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB;AAED,cAAM,aAAc,YAAW,aAAa;IACnC,YAAY,CAAC,EAAE,MAAM,CAAsC;IAC3D,aAAa,CAAC,EAAE,MAAM,CAAuC;IAC7D,eAAe,CAAC,EAAE,MAAM,CAAyC;IACjE,gBAAgB,CAAC,EAAE,MAAM,CAA0C;IACnE,YAAY,CAAC,EAAE,MAAM,CAAsC;IAC3D,aAAa,CAAC,EAAE,MAAM,CAAuC;IAC7D,cAAc,CAAC,EAAE,MAAM,CAAwC;IAC/D,eAAe,CAAC,EAAE,MAAM,CAAyC;
|
|
1
|
+
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,MAAM,WAAW,aAAa;IAC5B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB;AAED,cAAM,aAAc,YAAW,aAAa;IACnC,YAAY,CAAC,EAAE,MAAM,CAAsC;IAC3D,aAAa,CAAC,EAAE,MAAM,CAAuC;IAC7D,eAAe,CAAC,EAAE,MAAM,CAAyC;IACjE,gBAAgB,CAAC,EAAE,MAAM,CAA0C;IACnE,YAAY,CAAC,EAAE,MAAM,CAAsC;IAC3D,aAAa,CAAC,EAAE,MAAM,CAAuC;IAC7D,cAAc,CAAC,EAAE,MAAM,CAAwC;IAC/D,eAAe,CAAC,EAAE,MAAM,CAAyC;IACjE,aAAa,CAAC,EAAE,MAAM,CAAsE;IAEnG,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB;AAED,eAAO,MAAM,MAAM,eAAsB,CAAC"}
|
package/dist/config.js
CHANGED
|
@@ -7,5 +7,6 @@ class Configuration {
|
|
|
7
7
|
geminiApiBase = process.env.GEMINI_API_BASE?.trim();
|
|
8
8
|
deepseekApiKey = process.env.DEEPSEEK_API_KEY?.trim();
|
|
9
9
|
deepseekApiBase = process.env.DEEPSEEK_API_BASE?.trim();
|
|
10
|
+
ollamaApiBase = process.env.OLLAMA_API_BASE?.trim() || "http://localhost:11434/v1";
|
|
10
11
|
}
|
|
11
12
|
export const config = new Configuration();
|
package/dist/index.d.ts
CHANGED
|
@@ -14,6 +14,7 @@ export { providerRegistry } from "./providers/registry.js";
|
|
|
14
14
|
export { OpenAIProvider } from "./providers/openai/OpenAIProvider.js";
|
|
15
15
|
export { registerOpenAIProvider } from "./providers/openai/index.js";
|
|
16
16
|
export { registerAnthropicProvider } from "./providers/anthropic/index.js";
|
|
17
|
+
export { registerOllamaProvider, OllamaProvider } from "./providers/ollama/index.js";
|
|
17
18
|
export type { ImageRequest, ImageResponse } from "./providers/Provider.js";
|
|
18
19
|
export * from "./errors/index.js";
|
|
19
20
|
//# sourceMappingURL=index.d.ts.map
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AACtC,OAAO,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAC1C,OAAO,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAC3D,YAAY,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AACjD,YAAY,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AAC3C,YAAY,EAAE,WAAW,EAAE,MAAM,uBAAuB,CAAC;AACzD,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AACrD,YAAY,EAAE,cAAc,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAErE,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,EAAE,GAAG,EAAE,aAAa,EAAE,UAAU,EAAE,SAAS,EAAE,MAAM,UAAU,CAAC;AACrE,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAC;AACrC,YAAY,EAAE,aAAa,EAAE,MAAM,aAAa,CAAC;AACjD,OAAO,EAAE,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AAE3D,OAAO,EAAE,cAAc,EAAE,MAAM,sCAAsC,CAAC;AACtE,OAAO,EAAE,sBAAsB,EAAE,MAAM,6BAA6B,CAAC;AACrE,OAAO,EAAE,yBAAyB,EAAE,MAAM,gCAAgC,CAAC;AAC3E,YAAY,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,yBAAyB,CAAC;AAC3E,cAAc,mBAAmB,CAAC"}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AACtC,OAAO,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAC1C,OAAO,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAC3D,YAAY,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AACjD,YAAY,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AAC3C,YAAY,EAAE,WAAW,EAAE,MAAM,uBAAuB,CAAC;AACzD,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AACrD,YAAY,EAAE,cAAc,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAErE,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,EAAE,GAAG,EAAE,aAAa,EAAE,UAAU,EAAE,SAAS,EAAE,MAAM,UAAU,CAAC;AACrE,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAC;AACrC,YAAY,EAAE,aAAa,EAAE,MAAM,aAAa,CAAC;AACjD,OAAO,EAAE,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AAE3D,OAAO,EAAE,cAAc,EAAE,MAAM,sCAAsC,CAAC;AACtE,OAAO,EAAE,sBAAsB,EAAE,MAAM,6BAA6B,CAAC;AACrE,OAAO,EAAE,yBAAyB,EAAE,MAAM,gCAAgC,CAAC;AAC3E,OAAO,EAAE,sBAAsB,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AACrF,YAAY,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,yBAAyB,CAAC;AAC3E,cAAc,mBAAmB,CAAC"}
|
package/dist/index.js
CHANGED
|
@@ -8,4 +8,5 @@ export { providerRegistry } from "./providers/registry.js";
|
|
|
8
8
|
export { OpenAIProvider } from "./providers/openai/OpenAIProvider.js";
|
|
9
9
|
export { registerOpenAIProvider } from "./providers/openai/index.js";
|
|
10
10
|
export { registerAnthropicProvider } from "./providers/anthropic/index.js";
|
|
11
|
+
export { registerOllamaProvider, OllamaProvider } from "./providers/ollama/index.js";
|
|
11
12
|
export * from "./errors/index.js";
|
package/dist/llm.d.ts
CHANGED
package/dist/llm.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llm.d.ts","sourceRoot":"","sources":["../src/llm.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AACtC,OAAO,EAAE,WAAW,EAAE,MAAM,uBAAuB,CAAC;AACpD,OAAO,EACL,QAAQ,EACR,SAAS,EAKV,MAAM,yBAAyB,CAAC;
|
|
1
|
+
{"version":3,"file":"llm.d.ts","sourceRoot":"","sources":["../src/llm.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AACtC,OAAO,EAAE,WAAW,EAAE,MAAM,uBAAuB,CAAC;AACpD,OAAO,EACL,QAAQ,EACR,SAAS,EAKV,MAAM,yBAAyB,CAAC;AAOjC,OAAO,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAC3D,OAAO,EAAE,aAAa,EAAE,MAAM,2BAA2B,CAAC;AAC1D,OAAO,EAAE,aAAa,EAAE,MAAM,kCAAkC,CAAC;AACjE,OAAO,EAAE,UAAU,EAAE,MAAM,4BAA4B,CAAC;AACxD,OAAO,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAIrD,OAAO,EAAU,aAAa,EAAE,MAAM,aAAa,CAAC;AAEpD,MAAM,WAAW,YAAY;IAC3B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,KAAK,SAAS,GAAG;IACf,QAAQ,CAAC,EAAE,QAAQ,GAAG,MAAM,CAAC;IAC7B,KAAK,CAAC,EAAE,YAAY,CAAC;IACrB,yBAAyB,CAAC,EAAE,MAAM,CAAC;IACnC,sBAAsB,CAAC,EAAE,MAAM,CAAC;IAChC,qBAAqB,CAAC,EAAE,MAAM,CAAC;CAChC,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC;AAE3B,cAAM,OAAO;IACX,SAAgB,MAAM,uBAAiB;IACvC,SAAgB,MAAM;;;;;;;;;;;MAAU;IAChC,OAAO,CAAC,QAAQ,CAAC,CAAW;IAC5B,OAAO,CAAC,2BAA2B,CAAC,CAAS;IAC7C,OAAO,CAAC,wBAAwB,CAAC,CAAS;IAC1C,OAAO,CAAC,uBAAuB,CAAC,CAAS;IAEzC,OAAO,CAAC,KAAK,CAGX;IAEF,SAAS,CAAC,gBAAgB,EAAE,SAAS,GAAG,CAAC,CAAC,MAAM,EAAE,aAAa,KAAK,IAAI,CAAC;IAqEzE,OAAO,CAAC,qBAAqB;IAU7B,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,WAAW,GAAG,IAAI;IAQ1C,UAAU,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC;IAKlC,KAAK,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,IAAI,CAAC,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAC;QAAC,iBAAiB,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,OAAO,CAAC,cAAc,CAAC;IAkB1I,UAAU,CACd,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE;QACR,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,YAAY,CAAC,EAAE,MAAM,EAAE,CAAC;QACxB,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAC;QAC7B,iBAAiB,CAAC,EAAE,OAAO,CAAC;KAC7B,GACA,OAAO,CAAC,aAAa,CAAC;IAmBzB,IAAI,yBAAyB,IAAI,MAAM,GAAG,SAAS,CAElD;IAED,IAAI,sBAAsB,IAAI,MAAM,GAAG,SAAS,CAE/C;IAED,IAAI,qBAAqB,IAAI,MAAM,GAAG,SAAS,CAE9C;IAED,cAAc;IAIR,QAAQ,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,iBAAiB,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,OAAO,CAAC,UAAU,CAAC;IAmBlH,KAAK,CACT,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EACxB,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,UAAU,CAAC,EAAE,MAAM,CAAC;QAAC,iBAAiB,CAAC,EAAE,OAAO,CAAA;KAAE,GAC7E,OAAO,CAAC,SAAS,CAAC;CAoBtB;AAED,OAAO,EAAE,aAAa,EAAE,UAAU,EAAE,SAAS,EAAE,CAAC;AAEhD,eAAO,MAAM,GAAG,SAAgB,CAAC"}
|
package/dist/llm.js
CHANGED
|
@@ -4,6 +4,7 @@ import { ensureOpenAIRegistered } from "./providers/openai/index.js";
|
|
|
4
4
|
import { registerGeminiProvider } from "./providers/gemini/index.js";
|
|
5
5
|
import { registerAnthropicProvider } from "./providers/anthropic/index.js";
|
|
6
6
|
import { registerDeepSeekProvider } from "./providers/deepseek/index.js";
|
|
7
|
+
import { registerOllamaProvider } from "./providers/ollama/index.js";
|
|
7
8
|
import { GeneratedImage } from "./image/GeneratedImage.js";
|
|
8
9
|
import { ModelRegistry } from "./models/ModelRegistry.js";
|
|
9
10
|
import { Transcription } from "./transcription/Transcription.js";
|
|
@@ -61,6 +62,9 @@ class LLMCore {
|
|
|
61
62
|
if (provider === "deepseek") {
|
|
62
63
|
registerDeepSeekProvider();
|
|
63
64
|
}
|
|
65
|
+
if (provider === "ollama") {
|
|
66
|
+
registerOllamaProvider();
|
|
67
|
+
}
|
|
64
68
|
this.provider = providerRegistry.resolve(provider);
|
|
65
69
|
}
|
|
66
70
|
else if (provider) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ModelRegistry.d.ts","sourceRoot":"","sources":["../../src/models/ModelRegistry.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,YAAY,CAAC;AAGnC,qBAAa,aAAa;IACtB,OAAO,CAAC,MAAM,CAAC,MAAM,CAA6C;IAElE
|
|
1
|
+
{"version":3,"file":"ModelRegistry.d.ts","sourceRoot":"","sources":["../../src/models/ModelRegistry.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,YAAY,CAAC;AAGnC,qBAAa,aAAa;IACtB,OAAO,CAAC,MAAM,CAAC,MAAM,CAA6C;IAElE,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,MAAM,GAAG,KAAK,GAAG,SAAS;IAMlE;;OAEG;IACH,MAAM,CAAC,GAAG,IAAI,KAAK,EAAE;IAIrB;;OAEG;IACH,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAKhF;;OAEG;IACH,MAAM,CAAC,QAAQ,CAAC,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO;IAK/E;;OAEG;IACH,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAK9E;;OAEG;IACH,MAAM,CAAC,aAAa,CAAC,KAAK,EAAE;QAAE,YAAY,EAAE,MAAM,CAAC;QAAC,aAAa,EAAE,MAAM,CAAC;QAAC,YAAY,EAAE,MAAM,CAAC;QAAC,aAAa,CAAC,EAAE,MAAM,CAAC;QAAC,gBAAgB,CAAC,EAAE,MAAM,CAAA;KAAE,EAAE,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM;sBAA3I,MAAM;uBAAiB,MAAM;sBAAgB,MAAM;wBAAkB,MAAM;2BAAqB,MAAM;;;;;sBAAtG,MAAM;uBAAiB,MAAM;sBAAgB,MAAM;wBAAkB,MAAM;2BAAqB,MAAM;;CA+BrJ"}
|
|
@@ -1,9 +1,6 @@
|
|
|
1
1
|
import { modelsData } from "./models.js";
|
|
2
2
|
export class ModelRegistry {
|
|
3
3
|
static models = modelsData;
|
|
4
|
-
/**
|
|
5
|
-
* Find a model by its ID.
|
|
6
|
-
*/
|
|
7
4
|
static find(modelId, provider) {
|
|
8
5
|
return this.models.find(m => (m.id === modelId || m.family === modelId) && (!provider || m.provider === provider));
|
|
9
6
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../src/models/models.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,UAAU;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
1
|
+
{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../../src/models/models.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,UAAU;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;IAovOtB,CAAC"}
|
package/dist/models/models.js
CHANGED
|
@@ -7072,5 +7072,341 @@ export const modelsData = [
|
|
|
7072
7072
|
}
|
|
7073
7073
|
},
|
|
7074
7074
|
"metadata": {}
|
|
7075
|
+
},
|
|
7076
|
+
{
|
|
7077
|
+
"id": "llama3",
|
|
7078
|
+
"name": "Llama 3",
|
|
7079
|
+
"provider": "ollama",
|
|
7080
|
+
"family": "llama3",
|
|
7081
|
+
"created_at": null,
|
|
7082
|
+
"context_window": 8192,
|
|
7083
|
+
"max_output_tokens": 8192,
|
|
7084
|
+
"knowledge_cutoff": null,
|
|
7085
|
+
"modalities": { "input": ["text"], "output": ["text", "embeddings"] },
|
|
7086
|
+
"capabilities": ["chat", "streaming", "tools"],
|
|
7087
|
+
"pricing": {},
|
|
7088
|
+
"metadata": {}
|
|
7089
|
+
},
|
|
7090
|
+
{
|
|
7091
|
+
"id": "llama3:8b",
|
|
7092
|
+
"name": "Llama 3 8B",
|
|
7093
|
+
"provider": "ollama",
|
|
7094
|
+
"family": "llama3",
|
|
7095
|
+
"created_at": null,
|
|
7096
|
+
"context_window": 8192,
|
|
7097
|
+
"max_output_tokens": 8192,
|
|
7098
|
+
"knowledge_cutoff": null,
|
|
7099
|
+
"modalities": { "input": ["text"], "output": ["text", "embeddings"] },
|
|
7100
|
+
"capabilities": ["chat", "streaming", "tools"],
|
|
7101
|
+
"pricing": {},
|
|
7102
|
+
"metadata": {}
|
|
7103
|
+
},
|
|
7104
|
+
{
|
|
7105
|
+
"id": "llama3:70b",
|
|
7106
|
+
"name": "Llama 3 70B",
|
|
7107
|
+
"provider": "ollama",
|
|
7108
|
+
"family": "llama3",
|
|
7109
|
+
"created_at": null,
|
|
7110
|
+
"context_window": 8192,
|
|
7111
|
+
"max_output_tokens": 8192,
|
|
7112
|
+
"knowledge_cutoff": null,
|
|
7113
|
+
"modalities": { "input": ["text"], "output": ["text", "embeddings"] },
|
|
7114
|
+
"capabilities": ["chat", "streaming", "tools"],
|
|
7115
|
+
"pricing": {},
|
|
7116
|
+
"metadata": {}
|
|
7117
|
+
},
|
|
7118
|
+
{
|
|
7119
|
+
"id": "llama3.1",
|
|
7120
|
+
"name": "Llama 3.1",
|
|
7121
|
+
"provider": "ollama",
|
|
7122
|
+
"family": "llama3",
|
|
7123
|
+
"created_at": null,
|
|
7124
|
+
"context_window": 128000,
|
|
7125
|
+
"max_output_tokens": 8192,
|
|
7126
|
+
"knowledge_cutoff": null,
|
|
7127
|
+
"modalities": { "input": ["text"], "output": ["text", "embeddings"] },
|
|
7128
|
+
"capabilities": ["chat", "streaming", "tools"],
|
|
7129
|
+
"pricing": {},
|
|
7130
|
+
"metadata": {}
|
|
7131
|
+
},
|
|
7132
|
+
{
|
|
7133
|
+
"id": "llama3.1:8b",
|
|
7134
|
+
"name": "Llama 3.1 8B",
|
|
7135
|
+
"provider": "ollama",
|
|
7136
|
+
"family": "llama3",
|
|
7137
|
+
"created_at": null,
|
|
7138
|
+
"context_window": 128000,
|
|
7139
|
+
"max_output_tokens": 8192,
|
|
7140
|
+
"knowledge_cutoff": null,
|
|
7141
|
+
"modalities": { "input": ["text"], "output": ["text", "embeddings"] },
|
|
7142
|
+
"capabilities": ["chat", "streaming", "tools"],
|
|
7143
|
+
"pricing": {},
|
|
7144
|
+
"metadata": {}
|
|
7145
|
+
},
|
|
7146
|
+
{
|
|
7147
|
+
"id": "llama3.1:70b",
|
|
7148
|
+
"name": "Llama 3.1 70B",
|
|
7149
|
+
"provider": "ollama",
|
|
7150
|
+
"family": "llama3",
|
|
7151
|
+
"created_at": null,
|
|
7152
|
+
"context_window": 128000,
|
|
7153
|
+
"max_output_tokens": 8192,
|
|
7154
|
+
"knowledge_cutoff": null,
|
|
7155
|
+
"modalities": { "input": ["text"], "output": ["text", "embeddings"] },
|
|
7156
|
+
"capabilities": ["chat", "streaming", "tools"],
|
|
7157
|
+
"pricing": {},
|
|
7158
|
+
"metadata": {}
|
|
7159
|
+
},
|
|
7160
|
+
{
|
|
7161
|
+
"id": "llama3.1:405b",
|
|
7162
|
+
"name": "Llama 3.1 405B",
|
|
7163
|
+
"provider": "ollama",
|
|
7164
|
+
"family": "llama3",
|
|
7165
|
+
"created_at": null,
|
|
7166
|
+
"context_window": 128000,
|
|
7167
|
+
"max_output_tokens": 8192,
|
|
7168
|
+
"knowledge_cutoff": null,
|
|
7169
|
+
"modalities": { "input": ["text"], "output": ["text", "embeddings"] },
|
|
7170
|
+
"capabilities": ["chat", "streaming", "tools"],
|
|
7171
|
+
"pricing": {},
|
|
7172
|
+
"metadata": {}
|
|
7173
|
+
},
|
|
7174
|
+
{
|
|
7175
|
+
"id": "llama3.2",
|
|
7176
|
+
"name": "Llama 3.2",
|
|
7177
|
+
"provider": "ollama",
|
|
7178
|
+
"family": "llama3",
|
|
7179
|
+
"created_at": null,
|
|
7180
|
+
"context_window": 128000,
|
|
7181
|
+
"max_output_tokens": 8192,
|
|
7182
|
+
"knowledge_cutoff": null,
|
|
7183
|
+
"modalities": { "input": ["text", "image"], "output": ["text", "embeddings"] },
|
|
7184
|
+
"capabilities": ["chat", "streaming", "tools", "vision"],
|
|
7185
|
+
"pricing": {},
|
|
7186
|
+
"metadata": {}
|
|
7187
|
+
},
|
|
7188
|
+
{
|
|
7189
|
+
"id": "llama3.2:1b",
|
|
7190
|
+
"name": "Llama 3.2 1B",
|
|
7191
|
+
"provider": "ollama",
|
|
7192
|
+
"family": "llama3",
|
|
7193
|
+
"created_at": null,
|
|
7194
|
+
"context_window": 128000,
|
|
7195
|
+
"max_output_tokens": 8192,
|
|
7196
|
+
"knowledge_cutoff": null,
|
|
7197
|
+
"modalities": { "input": ["text"], "output": ["text", "embeddings"] },
|
|
7198
|
+
"capabilities": ["chat", "streaming", "tools"],
|
|
7199
|
+
"pricing": {},
|
|
7200
|
+
"metadata": {}
|
|
7201
|
+
},
|
|
7202
|
+
{
|
|
7203
|
+
"id": "llama3.2:3b",
|
|
7204
|
+
"name": "Llama 3.2 3B",
|
|
7205
|
+
"provider": "ollama",
|
|
7206
|
+
"family": "llama3",
|
|
7207
|
+
"created_at": null,
|
|
7208
|
+
"context_window": 128000,
|
|
7209
|
+
"max_output_tokens": 8192,
|
|
7210
|
+
"knowledge_cutoff": null,
|
|
7211
|
+
"modalities": { "input": ["text"], "output": ["text", "embeddings"] },
|
|
7212
|
+
"capabilities": ["chat", "streaming", "tools"],
|
|
7213
|
+
"pricing": {},
|
|
7214
|
+
"metadata": {}
|
|
7215
|
+
},
|
|
7216
|
+
{
|
|
7217
|
+
"id": "llama3.2:11b",
|
|
7218
|
+
"name": "Llama 3.2 11B (Vision)",
|
|
7219
|
+
"provider": "ollama",
|
|
7220
|
+
"family": "llama3",
|
|
7221
|
+
"created_at": null,
|
|
7222
|
+
"context_window": 128000,
|
|
7223
|
+
"max_output_tokens": 8192,
|
|
7224
|
+
"knowledge_cutoff": null,
|
|
7225
|
+
"modalities": { "input": ["text", "image"], "output": ["text", "embeddings"] },
|
|
7226
|
+
"capabilities": ["chat", "streaming", "tools", "vision"],
|
|
7227
|
+
"pricing": {},
|
|
7228
|
+
"metadata": {}
|
|
7229
|
+
},
|
|
7230
|
+
{
|
|
7231
|
+
"id": "llama3.2:90b",
|
|
7232
|
+
"name": "Llama 3.2 90B (Vision)",
|
|
7233
|
+
"provider": "ollama",
|
|
7234
|
+
"family": "llama3",
|
|
7235
|
+
"created_at": null,
|
|
7236
|
+
"context_window": 128000,
|
|
7237
|
+
"max_output_tokens": 8192,
|
|
7238
|
+
"knowledge_cutoff": null,
|
|
7239
|
+
"modalities": { "input": ["text", "image"], "output": ["text", "embeddings"] },
|
|
7240
|
+
"capabilities": ["chat", "streaming", "tools", "vision"],
|
|
7241
|
+
"pricing": {},
|
|
7242
|
+
"metadata": {}
|
|
7243
|
+
},
|
|
7244
|
+
{
|
|
7245
|
+
"id": "llama2",
|
|
7246
|
+
"name": "Llama 2",
|
|
7247
|
+
"provider": "ollama",
|
|
7248
|
+
"family": "llama2",
|
|
7249
|
+
"created_at": null,
|
|
7250
|
+
"context_window": 4096,
|
|
7251
|
+
"max_output_tokens": 4096,
|
|
7252
|
+
"knowledge_cutoff": null,
|
|
7253
|
+
"modalities": { "input": ["text"], "output": ["text", "embeddings"] },
|
|
7254
|
+
"capabilities": ["chat", "streaming"],
|
|
7255
|
+
"pricing": {},
|
|
7256
|
+
"metadata": {}
|
|
7257
|
+
},
|
|
7258
|
+
{
|
|
7259
|
+
"id": "llama2:7b",
|
|
7260
|
+
"name": "Llama 2 7B",
|
|
7261
|
+
"provider": "ollama",
|
|
7262
|
+
"family": "llama2",
|
|
7263
|
+
"created_at": null,
|
|
7264
|
+
"context_window": 4096,
|
|
7265
|
+
"max_output_tokens": 4096,
|
|
7266
|
+
"knowledge_cutoff": null,
|
|
7267
|
+
"modalities": { "input": ["text"], "output": ["text", "embeddings"] },
|
|
7268
|
+
"capabilities": ["chat", "streaming"],
|
|
7269
|
+
"pricing": {},
|
|
7270
|
+
"metadata": {}
|
|
7271
|
+
},
|
|
7272
|
+
{
|
|
7273
|
+
"id": "llama2:13b",
|
|
7274
|
+
"name": "Llama 2 13B",
|
|
7275
|
+
"provider": "ollama",
|
|
7276
|
+
"family": "llama2",
|
|
7277
|
+
"created_at": null,
|
|
7278
|
+
"context_window": 4096,
|
|
7279
|
+
"max_output_tokens": 4096,
|
|
7280
|
+
"knowledge_cutoff": null,
|
|
7281
|
+
"modalities": { "input": ["text"], "output": ["text", "embeddings"] },
|
|
7282
|
+
"capabilities": ["chat", "streaming"],
|
|
7283
|
+
"pricing": {},
|
|
7284
|
+
"metadata": {}
|
|
7285
|
+
},
|
|
7286
|
+
{
|
|
7287
|
+
"id": "llama2:70b",
|
|
7288
|
+
"name": "Llama 2 70B",
|
|
7289
|
+
"provider": "ollama",
|
|
7290
|
+
"family": "llama2",
|
|
7291
|
+
"created_at": null,
|
|
7292
|
+
"context_window": 4096,
|
|
7293
|
+
"max_output_tokens": 4096,
|
|
7294
|
+
"knowledge_cutoff": null,
|
|
7295
|
+
"modalities": { "input": ["text"], "output": ["text", "embeddings"] },
|
|
7296
|
+
"capabilities": ["chat", "streaming"],
|
|
7297
|
+
"pricing": {},
|
|
7298
|
+
"metadata": {}
|
|
7299
|
+
},
|
|
7300
|
+
{
|
|
7301
|
+
"id": "codellama",
|
|
7302
|
+
"name": "Code Llama",
|
|
7303
|
+
"provider": "ollama",
|
|
7304
|
+
"family": "llama2",
|
|
7305
|
+
"created_at": null,
|
|
7306
|
+
"context_window": 16384,
|
|
7307
|
+
"max_output_tokens": 4096,
|
|
7308
|
+
"knowledge_cutoff": null,
|
|
7309
|
+
"modalities": { "input": ["text"], "output": ["text", "embeddings"] },
|
|
7310
|
+
"capabilities": ["chat", "streaming"],
|
|
7311
|
+
"pricing": {},
|
|
7312
|
+
"metadata": {}
|
|
7313
|
+
},
|
|
7314
|
+
{
|
|
7315
|
+
"id": "mistral",
|
|
7316
|
+
"name": "Mistral",
|
|
7317
|
+
"provider": "ollama",
|
|
7318
|
+
"family": "mistral",
|
|
7319
|
+
"created_at": null,
|
|
7320
|
+
"context_window": 32000,
|
|
7321
|
+
"max_output_tokens": 8192,
|
|
7322
|
+
"knowledge_cutoff": null,
|
|
7323
|
+
"modalities": { "input": ["text"], "output": ["text", "embeddings"] },
|
|
7324
|
+
"capabilities": ["chat", "streaming", "tools"],
|
|
7325
|
+
"pricing": {},
|
|
7326
|
+
"metadata": {}
|
|
7327
|
+
},
|
|
7328
|
+
{
|
|
7329
|
+
"id": "mixtral",
|
|
7330
|
+
"name": "Mixtral",
|
|
7331
|
+
"provider": "ollama",
|
|
7332
|
+
"family": "mistral",
|
|
7333
|
+
"created_at": null,
|
|
7334
|
+
"context_window": 32000,
|
|
7335
|
+
"max_output_tokens": 8192,
|
|
7336
|
+
"knowledge_cutoff": null,
|
|
7337
|
+
"modalities": { "input": ["text"], "output": ["text", "embeddings"] },
|
|
7338
|
+
"capabilities": ["chat", "streaming", "tools"],
|
|
7339
|
+
"pricing": {},
|
|
7340
|
+
"metadata": {}
|
|
7341
|
+
},
|
|
7342
|
+
{
|
|
7343
|
+
"id": "phi3",
|
|
7344
|
+
"name": "Phi-3",
|
|
7345
|
+
"provider": "ollama",
|
|
7346
|
+
"family": "phi3",
|
|
7347
|
+
"created_at": null,
|
|
7348
|
+
"context_window": 128000,
|
|
7349
|
+
"max_output_tokens": 4096,
|
|
7350
|
+
"knowledge_cutoff": null,
|
|
7351
|
+
"modalities": { "input": ["text"], "output": ["text", "embeddings"] },
|
|
7352
|
+
"capabilities": ["chat", "streaming"],
|
|
7353
|
+
"pricing": {},
|
|
7354
|
+
"metadata": {}
|
|
7355
|
+
},
|
|
7356
|
+
{
|
|
7357
|
+
"id": "gemma",
|
|
7358
|
+
"name": "Gemma",
|
|
7359
|
+
"provider": "ollama",
|
|
7360
|
+
"family": "gemma",
|
|
7361
|
+
"created_at": null,
|
|
7362
|
+
"context_window": 8192,
|
|
7363
|
+
"max_output_tokens": 8192,
|
|
7364
|
+
"knowledge_cutoff": null,
|
|
7365
|
+
"modalities": { "input": ["text"], "output": ["text", "embeddings"] },
|
|
7366
|
+
"capabilities": ["chat", "streaming"],
|
|
7367
|
+
"pricing": {},
|
|
7368
|
+
"metadata": {}
|
|
7369
|
+
},
|
|
7370
|
+
{
|
|
7371
|
+
"id": "gemma2",
|
|
7372
|
+
"name": "Gemma 2",
|
|
7373
|
+
"provider": "ollama",
|
|
7374
|
+
"family": "gemma",
|
|
7375
|
+
"created_at": null,
|
|
7376
|
+
"context_window": 8192,
|
|
7377
|
+
"max_output_tokens": 8192,
|
|
7378
|
+
"knowledge_cutoff": null,
|
|
7379
|
+
"modalities": { "input": ["text"], "output": ["text", "embeddings"] },
|
|
7380
|
+
"capabilities": ["chat", "streaming"],
|
|
7381
|
+
"pricing": {},
|
|
7382
|
+
"metadata": {}
|
|
7383
|
+
},
|
|
7384
|
+
{
|
|
7385
|
+
"id": "nomic-embed-text",
|
|
7386
|
+
"name": "Nomic Embed Text",
|
|
7387
|
+
"provider": "ollama",
|
|
7388
|
+
"family": "nomic",
|
|
7389
|
+
"created_at": null,
|
|
7390
|
+
"context_window": 8192,
|
|
7391
|
+
"max_output_tokens": 0,
|
|
7392
|
+
"knowledge_cutoff": null,
|
|
7393
|
+
"modalities": { "input": ["text"], "output": ["embeddings"] },
|
|
7394
|
+
"capabilities": ["embeddings"],
|
|
7395
|
+
"pricing": {},
|
|
7396
|
+
"metadata": {}
|
|
7397
|
+
},
|
|
7398
|
+
{
|
|
7399
|
+
"id": "mxbai-embed-large",
|
|
7400
|
+
"name": "mxbai-embed-large",
|
|
7401
|
+
"provider": "ollama",
|
|
7402
|
+
"family": "mxbai",
|
|
7403
|
+
"created_at": null,
|
|
7404
|
+
"context_window": 512,
|
|
7405
|
+
"max_output_tokens": 0,
|
|
7406
|
+
"knowledge_cutoff": null,
|
|
7407
|
+
"modalities": { "input": ["text"], "output": ["embeddings"] },
|
|
7408
|
+
"capabilities": ["embeddings"],
|
|
7409
|
+
"pricing": {},
|
|
7410
|
+
"metadata": {}
|
|
7075
7411
|
}
|
|
7076
7412
|
];
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export declare class OllamaCapabilities {
|
|
2
|
+
static findModel(modelId: string): import("../../models/types.js").Model | undefined;
|
|
3
|
+
static getContextWindow(modelId: string): number | null;
|
|
4
|
+
static supportsVision(modelId: string): boolean;
|
|
5
|
+
static supportsTools(modelId: string): boolean;
|
|
6
|
+
static supportsStructuredOutput(modelId: string): boolean;
|
|
7
|
+
static supportsEmbeddings(modelId: string): boolean;
|
|
8
|
+
static supportsReasoning(modelId: string): boolean;
|
|
9
|
+
static supportsImageGeneration(modelId: string): boolean;
|
|
10
|
+
static supportsTranscription(modelId: string): boolean;
|
|
11
|
+
static supportsModeration(modelId: string): boolean;
|
|
12
|
+
}
|
|
13
|
+
//# sourceMappingURL=Capabilities.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Capabilities.d.ts","sourceRoot":"","sources":["../../../src/providers/ollama/Capabilities.ts"],"names":[],"mappings":"AAEA,qBAAa,kBAAkB;IAC7B,MAAM,CAAC,SAAS,CAAC,OAAO,EAAE,MAAM;IAYhC,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAKvD,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAK/C,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAK9C,MAAM,CAAC,wBAAwB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAKzD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAKnD,MAAM,CAAC,iBAAiB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAKlD,MAAM,CAAC,uBAAuB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAKxD,MAAM,CAAC,qBAAqB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAKtD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;CAIpD"}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import { ModelRegistry } from "../../models/ModelRegistry.js";
|
|
2
|
+
export class OllamaCapabilities {
|
|
3
|
+
static findModel(modelId) {
|
|
4
|
+
// Ollama specific: try exact match first, then strip tags
|
|
5
|
+
let model = ModelRegistry.find(modelId, "ollama");
|
|
6
|
+
if (!model && modelId?.includes(":")) {
|
|
7
|
+
const baseId = modelId.split(":")[0];
|
|
8
|
+
if (baseId) {
|
|
9
|
+
model = ModelRegistry.find(baseId, "ollama");
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
return model;
|
|
13
|
+
}
|
|
14
|
+
static getContextWindow(modelId) {
|
|
15
|
+
const model = this.findModel(modelId);
|
|
16
|
+
return model?.context_window || 8192;
|
|
17
|
+
}
|
|
18
|
+
static supportsVision(modelId) {
|
|
19
|
+
const model = this.findModel(modelId);
|
|
20
|
+
return model?.modalities?.input?.includes("image") || model?.capabilities?.includes("vision") || false;
|
|
21
|
+
}
|
|
22
|
+
static supportsTools(modelId) {
|
|
23
|
+
const model = this.findModel(modelId);
|
|
24
|
+
return model?.capabilities?.includes("tools") || false;
|
|
25
|
+
}
|
|
26
|
+
static supportsStructuredOutput(modelId) {
|
|
27
|
+
const model = this.findModel(modelId);
|
|
28
|
+
return model?.capabilities?.includes("structured_output") || false;
|
|
29
|
+
}
|
|
30
|
+
static supportsEmbeddings(modelId) {
|
|
31
|
+
const model = this.findModel(modelId);
|
|
32
|
+
return model?.modalities?.output?.includes("embeddings") || model?.capabilities?.includes("embeddings") || false;
|
|
33
|
+
}
|
|
34
|
+
static supportsReasoning(modelId) {
|
|
35
|
+
const model = this.findModel(modelId);
|
|
36
|
+
return model?.capabilities?.includes("reasoning") || false;
|
|
37
|
+
}
|
|
38
|
+
static supportsImageGeneration(modelId) {
|
|
39
|
+
const model = this.findModel(modelId);
|
|
40
|
+
return model?.modalities?.output?.includes("image") || false;
|
|
41
|
+
}
|
|
42
|
+
static supportsTranscription(modelId) {
|
|
43
|
+
const model = this.findModel(modelId);
|
|
44
|
+
return model?.modalities?.input?.includes("audio") || false;
|
|
45
|
+
}
|
|
46
|
+
static supportsModeration(modelId) {
|
|
47
|
+
const model = this.findModel(modelId);
|
|
48
|
+
return model?.modalities?.output?.includes("moderation") || false;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Embedding.d.ts","sourceRoot":"","sources":["../../../src/providers/ollama/Embedding.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,wBAAwB,CAAC;AAGzD,qBAAa,eAAgB,SAAQ,eAAe;cAC/B,eAAe,IAAI,MAAM;cAIzB,aAAa,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;CAKtD"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { OpenAIEmbedding } from "../openai/Embedding.js";
|
|
2
|
+
import { OllamaCapabilities } from "./Capabilities.js";
|
|
3
|
+
export class OllamaEmbedding extends OpenAIEmbedding {
|
|
4
|
+
getProviderName() {
|
|
5
|
+
return "ollama";
|
|
6
|
+
}
|
|
7
|
+
validateModel(model) {
|
|
8
|
+
if (!OllamaCapabilities.supportsEmbeddings(model)) {
|
|
9
|
+
throw new Error(`Model ${model} does not support embeddings.`);
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { OpenAIModels } from "../openai/Models.js";
|
|
2
|
+
export declare class OllamaModels extends OpenAIModels {
|
|
3
|
+
protected getProviderName(): string;
|
|
4
|
+
protected formatDisplayName(modelId: string): string;
|
|
5
|
+
protected getContextWindow(modelId: string): number | null;
|
|
6
|
+
protected getCapabilities(modelId: string): string[];
|
|
7
|
+
}
|
|
8
|
+
//# sourceMappingURL=Models.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Models.d.ts","sourceRoot":"","sources":["../../../src/providers/ollama/Models.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAC;AAInD,qBAAa,YAAa,SAAQ,YAAY;cACzB,eAAe,IAAI,MAAM;cAIzB,iBAAiB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM;cAO1C,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;cAIhD,eAAe,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE;CAW9D"}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { OpenAIModels } from "../openai/Models.js";
|
|
2
|
+
import { OllamaCapabilities } from "./Capabilities.js";
|
|
3
|
+
export class OllamaModels extends OpenAIModels {
|
|
4
|
+
getProviderName() {
|
|
5
|
+
return "ollama";
|
|
6
|
+
}
|
|
7
|
+
formatDisplayName(modelId) {
|
|
8
|
+
const model = OllamaCapabilities.findModel(modelId);
|
|
9
|
+
if (model?.name && model.name !== modelId)
|
|
10
|
+
return model.name;
|
|
11
|
+
const baseId = modelId.split(":")[0] || modelId;
|
|
12
|
+
return baseId.replace(/-/g, " ").replace(/\b\w/g, c => c.toUpperCase());
|
|
13
|
+
}
|
|
14
|
+
getContextWindow(modelId) {
|
|
15
|
+
return OllamaCapabilities.getContextWindow(modelId);
|
|
16
|
+
}
|
|
17
|
+
getCapabilities(modelId) {
|
|
18
|
+
const model = OllamaCapabilities.findModel(modelId);
|
|
19
|
+
if (model)
|
|
20
|
+
return model.capabilities;
|
|
21
|
+
// Fallback for custom pulled models
|
|
22
|
+
const caps = ["streaming"];
|
|
23
|
+
if (OllamaCapabilities.supportsTools(modelId))
|
|
24
|
+
caps.push("tools");
|
|
25
|
+
if (OllamaCapabilities.supportsVision(modelId))
|
|
26
|
+
caps.push("vision");
|
|
27
|
+
if (OllamaCapabilities.supportsEmbeddings(modelId))
|
|
28
|
+
caps.push("embeddings");
|
|
29
|
+
return caps;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { OpenAIProvider } from "../openai/OpenAIProvider.js";
|
|
2
|
+
export interface OllamaProviderOptions {
|
|
3
|
+
baseUrl?: string;
|
|
4
|
+
}
|
|
5
|
+
export declare class OllamaProvider extends OpenAIProvider {
|
|
6
|
+
constructor(options?: OllamaProviderOptions);
|
|
7
|
+
}
|
|
8
|
+
//# sourceMappingURL=OllamaProvider.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"OllamaProvider.d.ts","sourceRoot":"","sources":["../../../src/providers/ollama/OllamaProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAM7D,MAAM,WAAW,qBAAqB;IACpC,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,qBAAa,cAAe,SAAQ,cAAc;gBACpC,OAAO,GAAE,qBAA0B;CAuBhD"}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { OpenAIProvider } from "../openai/OpenAIProvider.js";
|
|
2
|
+
import { config } from "../../config.js";
|
|
3
|
+
import { OllamaModels } from "./Models.js";
|
|
4
|
+
import { OllamaEmbedding } from "./Embedding.js";
|
|
5
|
+
import { OllamaCapabilities } from "./Capabilities.js";
|
|
6
|
+
export class OllamaProvider extends OpenAIProvider {
|
|
7
|
+
constructor(options = {}) {
|
|
8
|
+
super({
|
|
9
|
+
apiKey: "ollama",
|
|
10
|
+
baseUrl: options.baseUrl || config.ollamaApiBase || "http://localhost:11434/v1"
|
|
11
|
+
});
|
|
12
|
+
// Override handlers with Ollama-specific ones
|
|
13
|
+
this.modelsHandler = new OllamaModels(this.baseUrl, this.options.apiKey);
|
|
14
|
+
this.embeddingHandler = new OllamaEmbedding(this.baseUrl, this.options.apiKey);
|
|
15
|
+
// Override capabilities to use OllamaCapabilities
|
|
16
|
+
this.capabilities = {
|
|
17
|
+
supportsVision: (modelId) => OllamaCapabilities.supportsVision(modelId),
|
|
18
|
+
supportsTools: (modelId) => OllamaCapabilities.supportsTools(modelId),
|
|
19
|
+
supportsStructuredOutput: (modelId) => OllamaCapabilities.supportsStructuredOutput(modelId),
|
|
20
|
+
supportsEmbeddings: (modelId) => OllamaCapabilities.supportsEmbeddings(modelId),
|
|
21
|
+
supportsImageGeneration: (modelId) => OllamaCapabilities.supportsImageGeneration(modelId),
|
|
22
|
+
supportsTranscription: (modelId) => OllamaCapabilities.supportsTranscription(modelId),
|
|
23
|
+
supportsModeration: (modelId) => OllamaCapabilities.supportsModeration(modelId),
|
|
24
|
+
supportsReasoning: (modelId) => OllamaCapabilities.supportsReasoning(modelId),
|
|
25
|
+
getContextWindow: (modelId) => OllamaCapabilities.getContextWindow(modelId),
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { OllamaProvider } from "./OllamaProvider.js";
|
|
2
|
+
export { OllamaProvider };
|
|
3
|
+
/**
|
|
4
|
+
* Idempotent registration of the Ollama provider.
|
|
5
|
+
* Automatically called by LLM.configure({ provider: 'ollama' })
|
|
6
|
+
*/
|
|
7
|
+
export declare function registerOllamaProvider(): void;
|
|
8
|
+
export declare const ensureOllamaRegistered: typeof registerOllamaProvider;
|
|
9
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/providers/ollama/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AAErD,OAAO,EAAE,cAAc,EAAE,CAAC;AAI1B;;;GAGG;AACH,wBAAgB,sBAAsB,SAQrC;AAED,eAAO,MAAM,sBAAsB,+BAAyB,CAAC"}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { providerRegistry } from "../registry.js";
|
|
2
|
+
import { OllamaProvider } from "./OllamaProvider.js";
|
|
3
|
+
export { OllamaProvider };
|
|
4
|
+
let registered = false;
|
|
5
|
+
/**
|
|
6
|
+
* Idempotent registration of the Ollama provider.
|
|
7
|
+
* Automatically called by LLM.configure({ provider: 'ollama' })
|
|
8
|
+
*/
|
|
9
|
+
export function registerOllamaProvider() {
|
|
10
|
+
if (registered)
|
|
11
|
+
return;
|
|
12
|
+
providerRegistry.register("ollama", () => {
|
|
13
|
+
return new OllamaProvider();
|
|
14
|
+
});
|
|
15
|
+
registered = true;
|
|
16
|
+
}
|
|
17
|
+
export const ensureOllamaRegistered = registerOllamaProvider;
|
|
@@ -10,7 +10,7 @@ export declare class Capabilities {
|
|
|
10
10
|
static supportsTranscription(modelId: string): boolean;
|
|
11
11
|
static supportsModeration(modelId: string): boolean;
|
|
12
12
|
static supportsReasoning(modelId: string): boolean;
|
|
13
|
-
static getModelType(modelId: string): "
|
|
13
|
+
static getModelType(modelId: string): "embeddings" | "audio" | "moderation" | "image" | "chat" | "audio_transcription" | "audio_speech";
|
|
14
14
|
static getModalities(modelId: string): {
|
|
15
15
|
input: string[];
|
|
16
16
|
output: string[];
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Capabilities.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/Capabilities.ts"],"names":[],"mappings":"AAEA,qBAAa,YAAY;IACvB,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAUvD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IASzD,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAO/C,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAO9C,MAAM,CAAC,wBAAwB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAOzD,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAIjD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAOnD,MAAM,CAAC,uBAAuB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAOxD,MAAM,CAAC,qBAAqB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAOtD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAOnD,MAAM,CAAC,iBAAiB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAOlD,MAAM,CAAC,YAAY,CAAC,OAAO,EAAE,MAAM,GAAG,
|
|
1
|
+
{"version":3,"file":"Capabilities.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/Capabilities.ts"],"names":[],"mappings":"AAEA,qBAAa,YAAY;IACvB,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAUvD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IASzD,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAO/C,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAO9C,MAAM,CAAC,wBAAwB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAOzD,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAIjD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAOnD,MAAM,CAAC,uBAAuB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAOxD,MAAM,CAAC,qBAAqB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAOtD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAOnD,MAAM,CAAC,iBAAiB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAOlD,MAAM,CAAC,YAAY,CAAC,OAAO,EAAE,MAAM,GAAG,YAAY,GAAG,OAAO,GAAG,YAAY,GAAG,OAAO,GAAG,MAAM,GAAG,qBAAqB,GAAG,cAAc;IAWvI,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG;QAAE,KAAK,EAAE,MAAM,EAAE,CAAC;QAAC,MAAM,EAAE,MAAM,EAAE,CAAA;KAAE;IAiB5E,MAAM,CAAC,eAAe,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE;IAoBjD,MAAM,CAAC,oBAAoB,CAAC,WAAW,EAAE,MAAM,GAAG,SAAS,EAAE,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,GAAG,IAAI;IAMxG,MAAM,CAAC,iBAAiB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM;IAOjD,MAAM,CAAC,UAAU,CAAC,OAAO,EAAE,MAAM,GAAG,GAAG;CAcxC"}
|
|
@@ -30,7 +30,7 @@ export class Capabilities {
|
|
|
30
30
|
}
|
|
31
31
|
static supportsTools(modelId) {
|
|
32
32
|
const model = ModelRegistry.find(modelId, "openai");
|
|
33
|
-
if (model?.capabilities?.includes("function_calling"))
|
|
33
|
+
if (model?.capabilities?.includes("function_calling") || model?.capabilities?.includes("tools"))
|
|
34
34
|
return true;
|
|
35
35
|
return !/embedding|moderation|dall-e|tts|whisper/.test(modelId);
|
|
36
36
|
}
|
|
@@ -74,10 +74,12 @@ export class Capabilities {
|
|
|
74
74
|
return /o\d|gpt-5/.test(modelId);
|
|
75
75
|
}
|
|
76
76
|
static getModelType(modelId) {
|
|
77
|
+
if (this.supportsEmbeddings(modelId))
|
|
78
|
+
return "embeddings";
|
|
77
79
|
if (/moderation/.test(modelId))
|
|
78
80
|
return "moderation";
|
|
79
81
|
if (/embedding/.test(modelId))
|
|
80
|
-
return "
|
|
82
|
+
return "embeddings";
|
|
81
83
|
if (/dall-e|image/.test(modelId))
|
|
82
84
|
return "image";
|
|
83
85
|
if (/whisper|transcribe/.test(modelId))
|
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
import { EmbeddingRequest, EmbeddingResponse } from "../Embedding.js";
|
|
2
2
|
export declare class OpenAIEmbedding {
|
|
3
|
-
|
|
4
|
-
|
|
3
|
+
protected readonly baseUrl: string;
|
|
4
|
+
protected readonly apiKey: string;
|
|
5
5
|
constructor(baseUrl: string, apiKey: string);
|
|
6
|
+
protected getProviderName(): string;
|
|
7
|
+
protected validateModel(model: string): void;
|
|
6
8
|
execute(request: EmbeddingRequest): Promise<EmbeddingResponse>;
|
|
7
9
|
}
|
|
8
10
|
//# sourceMappingURL=Embedding.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Embedding.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/Embedding.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAMtE,qBAAa,eAAe;IAExB,
|
|
1
|
+
{"version":3,"file":"Embedding.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/Embedding.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAMtE,qBAAa,eAAe;IAExB,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,MAAM;IAClC,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE,MAAM;gBADd,OAAO,EAAE,MAAM,EACf,MAAM,EAAE,MAAM;IAGnC,SAAS,CAAC,eAAe,IAAI,MAAM;IAInC,SAAS,CAAC,aAAa,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAMtC,OAAO,CAAC,OAAO,EAAE,gBAAgB,GAAG,OAAO,CAAC,iBAAiB,CAAC;CA2CrE"}
|
|
@@ -9,12 +9,17 @@ export class OpenAIEmbedding {
|
|
|
9
9
|
this.baseUrl = baseUrl;
|
|
10
10
|
this.apiKey = apiKey;
|
|
11
11
|
}
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
12
|
+
getProviderName() {
|
|
13
|
+
return "openai";
|
|
14
|
+
}
|
|
15
|
+
validateModel(model) {
|
|
16
|
+
if (Capabilities.getModelType(model) !== "embeddings") {
|
|
16
17
|
throw new Error(`Model ${model} does not support embeddings.`);
|
|
17
18
|
}
|
|
19
|
+
}
|
|
20
|
+
async execute(request) {
|
|
21
|
+
const model = request.model || DEFAULT_MODELS.EMBEDDING;
|
|
22
|
+
this.validateModel(model);
|
|
18
23
|
const body = {
|
|
19
24
|
input: request.input,
|
|
20
25
|
model,
|
|
@@ -36,13 +41,13 @@ export class OpenAIEmbedding {
|
|
|
36
41
|
if (!response.ok) {
|
|
37
42
|
await handleOpenAIError(response, request.model || DEFAULT_MODELS.EMBEDDING);
|
|
38
43
|
}
|
|
39
|
-
const
|
|
44
|
+
const { data, model: responseModel, usage } = await response.json();
|
|
40
45
|
// Extract vectors from the response
|
|
41
|
-
const vectors =
|
|
46
|
+
const vectors = data.map((item) => item.embedding);
|
|
42
47
|
return {
|
|
43
48
|
vectors,
|
|
44
|
-
model:
|
|
45
|
-
input_tokens:
|
|
49
|
+
model: responseModel,
|
|
50
|
+
input_tokens: usage.prompt_tokens,
|
|
46
51
|
dimensions: vectors[0]?.length || 0,
|
|
47
52
|
};
|
|
48
53
|
}
|
|
@@ -1,8 +1,18 @@
|
|
|
1
1
|
import { ModelInfo } from "../Provider.js";
|
|
2
2
|
export declare class OpenAIModels {
|
|
3
|
-
|
|
4
|
-
|
|
3
|
+
protected readonly baseUrl: string;
|
|
4
|
+
protected readonly apiKey: string;
|
|
5
5
|
constructor(baseUrl: string, apiKey: string);
|
|
6
|
+
protected getProviderName(): string;
|
|
7
|
+
protected formatDisplayName(modelId: string): string;
|
|
8
|
+
protected getContextWindow(modelId: string): number | null;
|
|
9
|
+
protected getMaxOutputTokens(modelId: string): number | null;
|
|
10
|
+
protected getModalities(modelId: string): {
|
|
11
|
+
input: string[];
|
|
12
|
+
output: string[];
|
|
13
|
+
};
|
|
14
|
+
protected getCapabilities(modelId: string): string[];
|
|
15
|
+
protected getPricing(modelId: string): any;
|
|
6
16
|
execute(): Promise<ModelInfo[]>;
|
|
7
17
|
find(modelId: string): import("../../models/types.js").Model | undefined;
|
|
8
18
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Models.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/Models.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;
|
|
1
|
+
{"version":3,"file":"Models.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/Models.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAK3C,qBAAa,YAAY;IAErB,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,MAAM;IAClC,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE,MAAM;gBADd,OAAO,EAAE,MAAM,EACf,MAAM,EAAE,MAAM;IAGnC,SAAS,CAAC,eAAe,IAAI,MAAM;IAInC,SAAS,CAAC,iBAAiB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM;IAMpD,SAAS,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAI1D,SAAS,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAI5D,SAAS,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM;;;;IAKvC,SAAS,CAAC,eAAe,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE;IAKpD,SAAS,CAAC,UAAU,CAAC,OAAO,EAAE,MAAM;IAK9B,OAAO,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC;IAwDrC,IAAI,CAAC,OAAO,EAAE,MAAM;CAGrB"}
|
|
@@ -8,7 +8,35 @@ export class OpenAIModels {
|
|
|
8
8
|
this.baseUrl = baseUrl;
|
|
9
9
|
this.apiKey = apiKey;
|
|
10
10
|
}
|
|
11
|
+
getProviderName() {
|
|
12
|
+
return "openai";
|
|
13
|
+
}
|
|
14
|
+
formatDisplayName(modelId) {
|
|
15
|
+
const model = ModelRegistry.find(modelId, this.getProviderName());
|
|
16
|
+
if (model?.name && model.name !== modelId)
|
|
17
|
+
return model.name;
|
|
18
|
+
return Capabilities.formatDisplayName(modelId);
|
|
19
|
+
}
|
|
20
|
+
getContextWindow(modelId) {
|
|
21
|
+
return ModelRegistry.getContextWindow(modelId, this.getProviderName()) || Capabilities.getContextWindow(modelId) || null;
|
|
22
|
+
}
|
|
23
|
+
getMaxOutputTokens(modelId) {
|
|
24
|
+
return ModelRegistry.getMaxOutputTokens(modelId, this.getProviderName()) || Capabilities.getMaxOutputTokens(modelId) || null;
|
|
25
|
+
}
|
|
26
|
+
getModalities(modelId) {
|
|
27
|
+
const model = ModelRegistry.find(modelId, this.getProviderName());
|
|
28
|
+
return model?.modalities || Capabilities.getModalities(modelId);
|
|
29
|
+
}
|
|
30
|
+
getCapabilities(modelId) {
|
|
31
|
+
const model = ModelRegistry.find(modelId, this.getProviderName());
|
|
32
|
+
return model?.capabilities || Capabilities.getCapabilities(modelId);
|
|
33
|
+
}
|
|
34
|
+
getPricing(modelId) {
|
|
35
|
+
const model = ModelRegistry.find(modelId, this.getProviderName());
|
|
36
|
+
return model?.pricing || Capabilities.getPricing(modelId);
|
|
37
|
+
}
|
|
11
38
|
async execute() {
|
|
39
|
+
const provider = this.getProviderName();
|
|
12
40
|
try {
|
|
13
41
|
const response = await fetch(buildUrl(this.baseUrl, '/models'), {
|
|
14
42
|
method: "GET",
|
|
@@ -21,39 +49,45 @@ export class OpenAIModels {
|
|
|
21
49
|
const { data } = await response.json();
|
|
22
50
|
return data.map(m => {
|
|
23
51
|
const modelId = m.id;
|
|
24
|
-
const registryModel = ModelRegistry.find(modelId,
|
|
25
|
-
|
|
52
|
+
const registryModel = ModelRegistry.find(modelId, provider);
|
|
53
|
+
return {
|
|
26
54
|
id: modelId,
|
|
27
|
-
name:
|
|
28
|
-
provider:
|
|
55
|
+
name: this.formatDisplayName(modelId),
|
|
56
|
+
provider: provider,
|
|
29
57
|
family: registryModel?.family || modelId,
|
|
30
|
-
context_window:
|
|
31
|
-
max_output_tokens:
|
|
32
|
-
modalities:
|
|
33
|
-
capabilities:
|
|
34
|
-
pricing:
|
|
58
|
+
context_window: this.getContextWindow(modelId),
|
|
59
|
+
max_output_tokens: this.getMaxOutputTokens(modelId),
|
|
60
|
+
modalities: this.getModalities(modelId),
|
|
61
|
+
capabilities: this.getCapabilities(modelId),
|
|
62
|
+
pricing: this.getPricing(modelId),
|
|
35
63
|
metadata: {
|
|
36
64
|
...(registryModel?.metadata || {}),
|
|
37
65
|
created: m.created,
|
|
38
66
|
owned_by: m.owned_by
|
|
39
67
|
}
|
|
40
68
|
};
|
|
41
|
-
return info;
|
|
42
69
|
});
|
|
43
70
|
}
|
|
44
71
|
}
|
|
45
72
|
catch (_error) {
|
|
46
|
-
// Fallback to registry if API call fails
|
|
73
|
+
// Fallback to registry if API call fails
|
|
47
74
|
}
|
|
48
75
|
// Fallback to registry data
|
|
49
76
|
return ModelRegistry.all()
|
|
50
|
-
.filter(m => m.provider ===
|
|
51
|
-
.map(m => ({
|
|
52
|
-
|
|
53
|
-
|
|
77
|
+
.filter((m) => m.provider === provider)
|
|
78
|
+
.map((m) => ({
|
|
79
|
+
id: m.id,
|
|
80
|
+
name: m.name,
|
|
81
|
+
family: m.family || m.id,
|
|
82
|
+
provider: provider,
|
|
83
|
+
context_window: m.context_window ?? null,
|
|
84
|
+
capabilities: m.capabilities,
|
|
85
|
+
modalities: m.modalities,
|
|
86
|
+
max_output_tokens: m.max_output_tokens ?? null,
|
|
87
|
+
pricing: m.pricing || {}
|
|
54
88
|
}));
|
|
55
89
|
}
|
|
56
90
|
find(modelId) {
|
|
57
|
-
return ModelRegistry.find(modelId,
|
|
91
|
+
return ModelRegistry.find(modelId, this.getProviderName());
|
|
58
92
|
}
|
|
59
93
|
}
|
|
@@ -1,4 +1,11 @@
|
|
|
1
1
|
import { Provider, ChatRequest, ChatResponse, ModelInfo, ChatChunk, ImageRequest, ImageResponse, ModerationRequest, ModerationResponse } from "../Provider.js";
|
|
2
|
+
import { OpenAIChat } from "./Chat.js";
|
|
3
|
+
import { OpenAIStreaming } from "./Streaming.js";
|
|
4
|
+
import { OpenAIModels } from "./Models.js";
|
|
5
|
+
import { OpenAIImage } from "./Image.js";
|
|
6
|
+
import { OpenAITranscription } from "./Transcription.js";
|
|
7
|
+
import { OpenAIModeration } from "./Moderation.js";
|
|
8
|
+
import { OpenAIEmbedding } from "./Embedding.js";
|
|
2
9
|
import { TranscriptionRequest, TranscriptionResponse } from "../Provider.js";
|
|
3
10
|
import { EmbeddingRequest, EmbeddingResponse } from "../Embedding.js";
|
|
4
11
|
export interface OpenAIProviderOptions {
|
|
@@ -6,15 +13,15 @@ export interface OpenAIProviderOptions {
|
|
|
6
13
|
baseUrl?: string;
|
|
7
14
|
}
|
|
8
15
|
export declare class OpenAIProvider implements Provider {
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
16
|
+
protected readonly options: OpenAIProviderOptions;
|
|
17
|
+
protected baseUrl: string;
|
|
18
|
+
protected chatHandler: OpenAIChat;
|
|
19
|
+
protected streamingHandler: OpenAIStreaming;
|
|
20
|
+
protected modelsHandler: OpenAIModels;
|
|
21
|
+
protected imageHandler: OpenAIImage;
|
|
22
|
+
protected transcriptionHandler: OpenAITranscription;
|
|
23
|
+
protected moderationHandler: OpenAIModeration;
|
|
24
|
+
protected embeddingHandler: OpenAIEmbedding;
|
|
18
25
|
capabilities: {
|
|
19
26
|
supportsVision: (model: string) => boolean;
|
|
20
27
|
supportsTools: (model: string) => boolean;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"OpenAIProvider.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/OpenAIProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,WAAW,EAAE,YAAY,EAAE,SAAS,EAAE,SAAS,EAAE,YAAY,EAAE,aAAa,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,MAAM,gBAAgB,CAAC;
|
|
1
|
+
{"version":3,"file":"OpenAIProvider.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/OpenAIProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,WAAW,EAAE,YAAY,EAAE,SAAS,EAAE,SAAS,EAAE,YAAY,EAAE,aAAa,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,MAAM,gBAAgB,CAAC;AAE/J,OAAO,EAAE,UAAU,EAAE,MAAM,WAAW,CAAC;AACvC,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAC3C,OAAO,EAAE,WAAW,EAAE,MAAM,YAAY,CAAC;AACzC,OAAO,EAAE,mBAAmB,EAAE,MAAM,oBAAoB,CAAC;AACzD,OAAO,EAAE,gBAAgB,EAAE,MAAM,iBAAiB,CAAC;AACnD,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EAAE,oBAAoB,EAAE,qBAAqB,EAAE,MAAM,gBAAgB,CAAC;AAC7E,OAAO,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAEtE,MAAM,WAAW,qBAAqB;IACpC,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,qBAAa,cAAe,YAAW,QAAQ;IAsBjC,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,qBAAqB;IArB7D,SAAS,CAAC,OAAO,EAAE,MAAM,CAAC;IAC1B,SAAS,CAAC,WAAW,EAAE,UAAU,CAAC;IAClC,SAAS,CAAC,gBAAgB,EAAE,eAAe,CAAC;IAC5C,SAAS,CAAC,aAAa,EAAE,YAAY,CAAC;IACtC,SAAS,CAAC,YAAY,EAAE,WAAW,CAAC;IACpC,SAAS,CAAC,oBAAoB,EAAE,mBAAmB,CAAC;IACpD,SAAS,CAAC,iBAAiB,EAAE,gBAAgB,CAAC;IAC9C,SAAS,CAAC,gBAAgB,EAAE,eAAe,CAAC;IAErC,YAAY;gCACO,MAAM;+BACP,MAAM;0CACK,MAAM;oCACZ,MAAM;yCACD,MAAM;uCACR,MAAM;oCACT,MAAM;mCACP,MAAM;kCACP,MAAM;MAChC;gBAE6B,OAAO,EAAE,qBAAqB;IAWvD,IAAI,CAAC,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;IAIhD,MAAM,CAAC,OAAO,EAAE,WAAW,GAAG,cAAc,CAAC,SAAS,CAAC;IAIxD,UAAU,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC;IAIlC,KAAK,CAAC,OAAO,EAAE,YAAY,GAAG,OAAO,CAAC,aAAa,CAAC;IAIpD,UAAU,CAAC,OAAO,EAAE,oBAAoB,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAIzE,QAAQ,CAAC,OAAO,EAAE,iBAAiB,GAAG,OAAO,CAAC,kBAAkB,CAAC;IAIjE,KAAK,CAAC,OAAO,EAAE,gBAAgB,GAAG,OAAO,CAAC,iBAAiB,CAAC;CAGnE"}
|
|
@@ -25,7 +25,7 @@ export class OpenAIProvider {
|
|
|
25
25
|
supportsTranscription: (model) => Capabilities.supportsTranscription(model),
|
|
26
26
|
supportsModeration: (model) => Capabilities.supportsModeration(model),
|
|
27
27
|
supportsReasoning: (model) => Capabilities.supportsReasoning(model),
|
|
28
|
-
getContextWindow: (model) => Capabilities.getContextWindow(model),
|
|
28
|
+
getContextWindow: (model) => Capabilities.getContextWindow(model) || null,
|
|
29
29
|
};
|
|
30
30
|
constructor(options) {
|
|
31
31
|
this.options = options;
|