@push.rocks/smartai 0.0.14 → 0.0.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist_ts/00_commitinfo_data.d.ts +1 -1
- package/dist_ts/00_commitinfo_data.js +2 -2
- package/dist_ts/abstract.classes.multimodal.d.ts +50 -17
- package/dist_ts/abstract.classes.multimodal.js +5 -1
- package/dist_ts/plugins.d.ts +0 -1
- package/dist_ts/provider.anthropic.d.ts +5 -12
- package/dist_ts/provider.anthropic.js +79 -9
- package/dist_ts/provider.groq.d.ts +18 -0
- package/dist_ts/provider.groq.js +157 -0
- package/dist_ts/provider.ollama.d.ts +18 -1
- package/dist_ts/provider.ollama.js +145 -2
- package/dist_ts/provider.openai.d.ts +1 -2
- package/dist_ts/provider.openai.js +62 -6
- package/dist_ts/provider.perplexity.d.ts +15 -1
- package/dist_ts/provider.perplexity.js +136 -2
- package/package.json +13 -13
- package/readme.md +122 -39
- package/ts/00_commitinfo_data.ts +2 -2
- package/ts/abstract.classes.multimodal.ts +51 -18
- package/ts/provider.anthropic.ts +88 -17
- package/ts/provider.groq.ts +179 -0
- package/ts/provider.ollama.ts +168 -1
- package/ts/provider.openai.ts +67 -6
- package/ts/provider.perplexity.ts +156 -1
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
/**
|
|
2
|
-
* autocreated commitinfo by @
|
|
2
|
+
* autocreated commitinfo by @push.rocks/commitinfo
|
|
3
3
|
*/
|
|
4
4
|
export const commitinfo = {
|
|
5
5
|
name: '@push.rocks/smartai',
|
|
6
|
-
version: '0.0.
|
|
6
|
+
version: '0.0.19',
|
|
7
7
|
description: 'A TypeScript library for integrating and interacting with multiple AI models, offering capabilities for chat and potentially audio responses.'
|
|
8
8
|
};
|
|
9
9
|
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiMDBfY29tbWl0aW5mb19kYXRhLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vdHMvMDBfY29tbWl0aW5mb19kYXRhLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBOztHQUVHO0FBQ0gsTUFBTSxDQUFDLE1BQU0sVUFBVSxHQUFHO0lBQ3hCLElBQUksRUFBRSxxQkFBcUI7SUFDM0IsT0FBTyxFQUFFLFFBQVE7SUFDakIsV0FBVyxFQUFFLCtJQUErSTtDQUM3SixDQUFBIn0=
|
|
@@ -1,27 +1,60 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Message format for chat interactions
|
|
3
|
+
*/
|
|
4
|
+
export interface ChatMessage {
|
|
5
|
+
role: 'assistant' | 'user' | 'system';
|
|
6
|
+
content: string;
|
|
7
|
+
}
|
|
8
|
+
/**
|
|
9
|
+
* Options for chat interactions
|
|
10
|
+
*/
|
|
11
|
+
export interface ChatOptions {
|
|
12
|
+
systemMessage: string;
|
|
13
|
+
userMessage: string;
|
|
14
|
+
messageHistory: ChatMessage[];
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Response format for chat interactions
|
|
18
|
+
*/
|
|
19
|
+
export interface ChatResponse {
|
|
20
|
+
role: 'assistant';
|
|
21
|
+
message: string;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Abstract base class for multi-modal AI models.
|
|
25
|
+
* Provides a common interface for different AI providers (OpenAI, Anthropic, Perplexity, Ollama)
|
|
26
|
+
*/
|
|
1
27
|
export declare abstract class MultiModalModel {
|
|
2
28
|
/**
|
|
3
|
-
*
|
|
29
|
+
* Initializes the model and any necessary resources
|
|
30
|
+
* Should be called before using any other methods
|
|
4
31
|
*/
|
|
5
32
|
abstract start(): Promise<void>;
|
|
6
33
|
/**
|
|
7
|
-
*
|
|
34
|
+
* Cleans up any resources used by the model
|
|
35
|
+
* Should be called when the model is no longer needed
|
|
8
36
|
*/
|
|
9
37
|
abstract stop(): Promise<void>;
|
|
10
|
-
abstract chat(optionsArg: {
|
|
11
|
-
systemMessage: string;
|
|
12
|
-
userMessage: string;
|
|
13
|
-
messageHistory: {
|
|
14
|
-
role: 'assistant' | 'user';
|
|
15
|
-
content: string;
|
|
16
|
-
}[];
|
|
17
|
-
}): Promise<{
|
|
18
|
-
role: 'assistant';
|
|
19
|
-
message: string;
|
|
20
|
-
}>;
|
|
21
38
|
/**
|
|
22
|
-
*
|
|
23
|
-
*
|
|
24
|
-
* @
|
|
39
|
+
* Synchronous chat interaction with the model
|
|
40
|
+
* @param optionsArg Options containing system message, user message, and message history
|
|
41
|
+
* @returns Promise resolving to the assistant's response
|
|
25
42
|
*/
|
|
26
|
-
abstract
|
|
43
|
+
abstract chat(optionsArg: ChatOptions): Promise<ChatResponse>;
|
|
44
|
+
/**
|
|
45
|
+
* Streaming interface for chat interactions
|
|
46
|
+
* Allows for real-time responses from the model
|
|
47
|
+
* @param input Stream of user messages
|
|
48
|
+
* @returns Stream of model responses
|
|
49
|
+
*/
|
|
50
|
+
abstract chatStream(input: ReadableStream<Uint8Array>): Promise<ReadableStream<string>>;
|
|
51
|
+
/**
|
|
52
|
+
* Text-to-speech conversion
|
|
53
|
+
* @param optionsArg Options containing the message to convert to speech
|
|
54
|
+
* @returns Promise resolving to a readable stream of audio data
|
|
55
|
+
* @throws Error if the provider doesn't support audio generation
|
|
56
|
+
*/
|
|
57
|
+
abstract audio(optionsArg: {
|
|
58
|
+
message: string;
|
|
59
|
+
}): Promise<NodeJS.ReadableStream>;
|
|
27
60
|
}
|
|
@@ -1,3 +1,7 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Abstract base class for multi-modal AI models.
|
|
3
|
+
* Provides a common interface for different AI providers (OpenAI, Anthropic, Perplexity, Ollama)
|
|
4
|
+
*/
|
|
1
5
|
export class MultiModalModel {
|
|
2
6
|
}
|
|
3
|
-
//# sourceMappingURL=data:application/json;base64,
|
|
7
|
+
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiYWJzdHJhY3QuY2xhc3Nlcy5tdWx0aW1vZGFsLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vdHMvYWJzdHJhY3QuY2xhc3Nlcy5tdWx0aW1vZGFsLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQXlCQTs7O0dBR0c7QUFDSCxNQUFNLE9BQWdCLGVBQWU7Q0FtQ3BDIn0=
|
package/dist_ts/plugins.d.ts
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import * as plugins from './plugins.js';
|
|
2
2
|
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
|
3
|
+
import type { ChatOptions, ChatResponse } from './abstract.classes.multimodal.js';
|
|
3
4
|
export interface IAnthropicProviderOptions {
|
|
4
5
|
anthropicToken: string;
|
|
5
6
|
}
|
|
@@ -9,17 +10,9 @@ export declare class AnthropicProvider extends MultiModalModel {
|
|
|
9
10
|
constructor(optionsArg: IAnthropicProviderOptions);
|
|
10
11
|
start(): Promise<void>;
|
|
11
12
|
stop(): Promise<void>;
|
|
12
|
-
chatStream(input: ReadableStream<
|
|
13
|
-
chat(optionsArg:
|
|
14
|
-
|
|
15
|
-
userMessage: string;
|
|
16
|
-
messageHistory: {
|
|
17
|
-
role: 'assistant' | 'user';
|
|
18
|
-
content: string;
|
|
19
|
-
}[];
|
|
20
|
-
}): Promise<{
|
|
21
|
-
role: "assistant";
|
|
13
|
+
chatStream(input: ReadableStream<Uint8Array>): Promise<ReadableStream<string>>;
|
|
14
|
+
chat(optionsArg: ChatOptions): Promise<ChatResponse>;
|
|
15
|
+
audio(optionsArg: {
|
|
22
16
|
message: string;
|
|
23
|
-
}>;
|
|
24
|
-
private audio;
|
|
17
|
+
}): Promise<NodeJS.ReadableStream>;
|
|
25
18
|
}
|
|
@@ -13,29 +13,99 @@ export class AnthropicProvider extends MultiModalModel {
|
|
|
13
13
|
}
|
|
14
14
|
async stop() { }
|
|
15
15
|
async chatStream(input) {
|
|
16
|
-
//
|
|
17
|
-
const
|
|
18
|
-
|
|
16
|
+
// Create a TextDecoder to handle incoming chunks
|
|
17
|
+
const decoder = new TextDecoder();
|
|
18
|
+
let buffer = '';
|
|
19
|
+
let currentMessage = null;
|
|
20
|
+
// Create a TransformStream to process the input
|
|
21
|
+
const transform = new TransformStream({
|
|
22
|
+
async transform(chunk, controller) {
|
|
23
|
+
buffer += decoder.decode(chunk, { stream: true });
|
|
24
|
+
// Try to parse complete JSON messages from the buffer
|
|
25
|
+
while (true) {
|
|
26
|
+
const newlineIndex = buffer.indexOf('\n');
|
|
27
|
+
if (newlineIndex === -1)
|
|
28
|
+
break;
|
|
29
|
+
const line = buffer.slice(0, newlineIndex);
|
|
30
|
+
buffer = buffer.slice(newlineIndex + 1);
|
|
31
|
+
if (line.trim()) {
|
|
32
|
+
try {
|
|
33
|
+
const message = JSON.parse(line);
|
|
34
|
+
currentMessage = {
|
|
35
|
+
role: message.role || 'user',
|
|
36
|
+
content: message.content || '',
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
catch (e) {
|
|
40
|
+
console.error('Failed to parse message:', e);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
// If we have a complete message, send it to Anthropic
|
|
45
|
+
if (currentMessage) {
|
|
46
|
+
const stream = await this.anthropicApiClient.messages.create({
|
|
47
|
+
model: 'claude-3-opus-20240229',
|
|
48
|
+
messages: [{ role: currentMessage.role, content: currentMessage.content }],
|
|
49
|
+
system: '',
|
|
50
|
+
stream: true,
|
|
51
|
+
max_tokens: 4000,
|
|
52
|
+
});
|
|
53
|
+
// Process each chunk from Anthropic
|
|
54
|
+
for await (const chunk of stream) {
|
|
55
|
+
const content = chunk.delta?.text;
|
|
56
|
+
if (content) {
|
|
57
|
+
controller.enqueue(content);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
currentMessage = null;
|
|
61
|
+
}
|
|
62
|
+
},
|
|
63
|
+
flush(controller) {
|
|
64
|
+
if (buffer) {
|
|
65
|
+
try {
|
|
66
|
+
const message = JSON.parse(buffer);
|
|
67
|
+
controller.enqueue(message.content || '');
|
|
68
|
+
}
|
|
69
|
+
catch (e) {
|
|
70
|
+
console.error('Failed to parse remaining buffer:', e);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
});
|
|
75
|
+
// Connect the input to our transform stream
|
|
76
|
+
return input.pipeThrough(transform);
|
|
19
77
|
}
|
|
20
78
|
// Implementing the synchronous chat interaction
|
|
21
79
|
async chat(optionsArg) {
|
|
80
|
+
// Convert message history to Anthropic format
|
|
81
|
+
const messages = optionsArg.messageHistory.map(msg => ({
|
|
82
|
+
role: msg.role === 'assistant' ? 'assistant' : 'user',
|
|
83
|
+
content: msg.content
|
|
84
|
+
}));
|
|
22
85
|
const result = await this.anthropicApiClient.messages.create({
|
|
23
86
|
model: 'claude-3-opus-20240229',
|
|
24
87
|
system: optionsArg.systemMessage,
|
|
25
88
|
messages: [
|
|
26
|
-
...
|
|
27
|
-
{ role: 'user', content: optionsArg.userMessage }
|
|
89
|
+
...messages,
|
|
90
|
+
{ role: 'user', content: optionsArg.userMessage }
|
|
28
91
|
],
|
|
29
92
|
max_tokens: 4000,
|
|
30
93
|
});
|
|
94
|
+
// Extract text content from the response
|
|
95
|
+
let message = '';
|
|
96
|
+
for (const block of result.content) {
|
|
97
|
+
if ('text' in block) {
|
|
98
|
+
message += block.text;
|
|
99
|
+
}
|
|
100
|
+
}
|
|
31
101
|
return {
|
|
32
|
-
role:
|
|
33
|
-
message
|
|
102
|
+
role: 'assistant',
|
|
103
|
+
message,
|
|
34
104
|
};
|
|
35
105
|
}
|
|
36
|
-
async audio(
|
|
106
|
+
async audio(optionsArg) {
|
|
37
107
|
// Anthropic does not provide an audio API, so this method is not implemented.
|
|
38
108
|
throw new Error('Audio generation is not yet supported by Anthropic.');
|
|
39
109
|
}
|
|
40
110
|
}
|
|
41
|
-
//# sourceMappingURL=data:application/json;base64,
|
|
111
|
+
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicHJvdmlkZXIuYW50aHJvcGljLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vdHMvcHJvdmlkZXIuYW50aHJvcGljLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sS0FBSyxPQUFPLE1BQU0sY0FBYyxDQUFDO0FBQ3hDLE9BQU8sS0FBSyxLQUFLLE1BQU0sWUFBWSxDQUFDO0FBQ3BDLE9BQU8sRUFBRSxlQUFlLEVBQUUsTUFBTSxrQ0FBa0MsQ0FBQztBQU9uRSxNQUFNLE9BQU8saUJBQWtCLFNBQVEsZUFBZTtJQUlwRCxZQUFZLFVBQXFDO1FBQy9DLEtBQUssRUFBRSxDQUFDO1FBQ1IsSUFBSSxDQUFDLE9BQU8sR0FBRyxVQUFVLENBQUEsQ0FBQyw2QkFBNkI7SUFDekQsQ0FBQztJQUVELEtBQUssQ0FBQyxLQUFLO1FBQ1QsSUFBSSxDQUFDLGtCQUFrQixHQUFHLElBQUksT0FBTyxDQUFDLFNBQVMsQ0FBQyxPQUFPLENBQUM7WUFDdEQsTUFBTSxFQUFFLElBQUksQ0FBQyxPQUFPLENBQUMsY0FBYztTQUNwQyxDQUFDLENBQUM7SUFDTCxDQUFDO0lBRUQsS0FBSyxDQUFDLElBQUksS0FBSSxDQUFDO0lBRVIsS0FBSyxDQUFDLFVBQVUsQ0FBQyxLQUFpQztRQUN2RCxpREFBaUQ7UUFDakQsTUFBTSxPQUFPLEdBQUcsSUFBSSxXQUFXLEVBQUUsQ0FBQztRQUNsQyxJQUFJLE1BQU0sR0FBRyxFQUFFLENBQUM7UUFDaEIsSUFBSSxjQUFjLEdBQThDLElBQUksQ0FBQztRQUVyRSxnREFBZ0Q7UUFDaEQsTUFBTSxTQUFTLEdBQUcsSUFBSSxlQUFlLENBQXFCO1lBQ3hELEtBQUssQ0FBQyxTQUFTLENBQUMsS0FBSyxFQUFFLFVBQVU7Z0JBQy9CLE1BQU0sSUFBSSxPQUFPLENBQUMsTUFBTSxDQUFDLEtBQUssRUFBRSxFQUFFLE1BQU0sRUFBRSxJQUFJLEVBQUUsQ0FBQyxDQUFDO2dCQUVsRCxzREFBc0Q7Z0JBQ3RELE9BQU8sSUFBSSxFQUFFLENBQUM7b0JBQ1osTUFBTSxZQUFZLEdBQUcsTUFBTSxDQUFDLE9BQU8sQ0FBQyxJQUFJLENBQUMsQ0FBQztvQkFDMUMsSUFBSSxZQUFZLEtBQUssQ0FBQyxDQUFDO3dCQUFFLE1BQU07b0JBRS9CLE1BQU0sSUFBSSxHQUFHLE1BQU0sQ0FBQyxLQUFLLENBQUMsQ0FBQyxFQUFFLFlBQVksQ0FBQyxDQUFDO29CQUMzQyxNQUFNLEdBQUcsTUFBTSxDQUFDLEtBQUssQ0FBQyxZQUFZLEdBQUcsQ0FBQyxDQUFDLENBQUM7b0JBRXhDLElBQUksSUFBSSxDQUFDLElBQUksRUFBRSxFQUFFLENBQUM7d0JBQ2hCLElBQUksQ0FBQzs0QkFDSCxNQUFNLE9BQU8sR0FBRyxJQUFJLENBQUMsS0FBSyxDQUFDLElBQUksQ0FBQyxDQUFDOzRCQUNqQyxjQUFjLEdBQUc7Z0NBQ2YsSUFBSSxFQUFFLE9BQU8sQ0FBQyxJQUFJLElBQUksTUFBTTtnQ0FDNUIsT0FBTyxFQUFFLE9BQU8sQ0FBQyxPQUFPLElBQUksRUFBRTs2QkFDL0IsQ0FBQzt3QkFDSixDQUFDO3dCQUFDLE9BQU8sQ0FBQyxFQUFFLENBQUM7NEJBQ1gsT0FBTyxDQUFDLEtBQUssQ0FBQywwQkFBMEIsRUFBRSxDQUFDLENBQUMsQ0FBQzt3QkFDL0MsQ0FBQztvQkFDSCxDQUFDO2dCQUNILENBQUM7Z0JBRUQsc0RBQXNEO2dCQUN0RCxJQUFJLGNBQWMsRUFBRSxDQUFDO29CQUNuQixNQUFNLE1BQU0sR0FBRyxNQUFNLElBQUksQ0FBQyxrQkFBa0IsQ0FBQyxRQUFRLENBQUMsTUFBTSxDQUFDO3dCQUMzRCxLQUFLLEVBQUUsd0JBQXdCO3dCQUMvQixRQUFRLEVBQUUsQ0FBQyxFQUFFLElBQUksRUFBRSxjQUFjLENBQUMsSUFBSSxFQUFFLE9BQU8sRUFBRSxjQUFjLENBQUMsT0FBTyxFQUFFLENBQUM7d0JBQzFFLE1BQU0sRUFBRSxFQUFFO3dCQUNWLE1BQU0sRUFBRSxJQUFJO3dCQUNaLFVBQVUsRUFBRSxJQUFJO3FCQUNqQixDQUFDLENBQUM7b0JBRUgsb0NBQW9DO29CQUNwQyxJQUFJLEtBQUssRUFBRSxNQUFNLEtBQUssSUFBSSxNQUFNLEVBQUUsQ0FBQzt3QkFDakMsTUFBTSxPQUFPLEdBQUcsS0FBSyxDQUFDLEtBQUssRUFBRSxJQUFJLENBQUM7d0JBQ2xDLElBQUksT0FBTyxFQUFFLENBQUM7NEJBQ1osVUFBVSxDQUFDLE9BQU8sQ0FBQyxPQUFPLENBQUMsQ0FBQzt3QkFDOUIsQ0FBQztvQkFDSCxDQUFDO29CQUVELGNBQWMsR0FBRyxJQUFJLENBQUM7Z0JBQ3hCLENBQUM7WUFDSCxDQUFDO1lBRUQsS0FBSyxDQUFDLFVBQVU7Z0JBQ2QsSUFBSSxNQUFNLEVBQUUsQ0FBQztvQkFDWCxJQUFJLENBQUM7d0JBQ0gsTUFBTSxPQUFPLEdBQUcsSUFBSSxDQUFDLEtBQUssQ0FBQyxNQUFNLENBQUMsQ0FBQzt3QkFDbkMsVUFBVSxDQUFDLE9BQU8sQ0FBQyxPQUFPLENBQUMsT0FBTyxJQUFJLEVBQUUsQ0FBQyxDQUFDO29CQUM1QyxDQUFDO29CQUFDLE9BQU8sQ0FBQyxFQUFFLENBQUM7d0JBQ1gsT0FBTyxDQUFDLEtBQUssQ0FBQyxtQ0FBbUMsRUFBRSxDQUFDLENBQUMsQ0FBQztvQkFDeEQsQ0FBQztnQkFDSCxDQUFDO1lBQ0gsQ0FBQztTQUNGLENBQUMsQ0FBQztRQUVILDRDQUE0QztRQUM1QyxPQUFPLEtBQUssQ0FBQyxXQUFXLENBQUMsU0FBUyxDQUFDLENBQUM7SUFDdEMsQ0FBQztJQUVELGdEQUFnRDtJQUN6QyxLQUFLLENBQUMsSUFBSSxDQUFDLFVBQXVCO1FBQ3ZDLDhDQUE4QztRQUM5QyxNQUFNLFFBQVEsR0FBRyxVQUFVLENBQUMsY0FBYyxDQUFDLEdBQUcsQ0FBQyxHQUFHLENBQUMsRUFBRSxDQUFDLENBQUM7WUFDckQsSUFBSSxFQUFFLEdBQUcsQ0FBQyxJQUFJLEtBQUssV0FBVyxDQUFDLENBQUMsQ0FBQyxXQUFvQixDQUFDLENBQUMsQ0FBQyxNQUFlO1lBQ3ZFLE9BQU8sRUFBRSxHQUFHLENBQUMsT0FBTztTQUNyQixDQUFDLENBQUMsQ0FBQztRQUVKLE1BQU0sTUFBTSxHQUFHLE1BQU0sSUFBSSxDQUFDLGtCQUFrQixDQUFDLFFBQVEsQ0FBQyxNQUFNLENBQUM7WUFDM0QsS0FBSyxFQUFFLHdCQUF3QjtZQUMvQixNQUFNLEVBQUUsVUFBVSxDQUFDLGFBQWE7WUFDaEMsUUFBUSxFQUFFO2dCQUNSLEdBQUcsUUFBUTtnQkFDWCxFQUFFLElBQUksRUFBRSxNQUFlLEVBQUUsT0FBTyxFQUFFLFVBQVUsQ0FBQyxXQUFXLEVBQUU7YUFDM0Q7WUFDRCxVQUFVLEVBQUUsSUFBSTtTQUNqQixDQUFDLENBQUM7UUFFSCx5Q0FBeUM7UUFDekMsSUFBSSxPQUFPLEdBQUcsRUFBRSxDQUFDO1FBQ2pCLEtBQUssTUFBTSxLQUFLLElBQUksTUFBTSxDQUFDLE9BQU8sRUFBRSxDQUFDO1lBQ25DLElBQUksTUFBTSxJQUFJLEtBQUssRUFBRSxDQUFDO2dCQUNwQixPQUFPLElBQUksS0FBSyxDQUFDLElBQUksQ0FBQztZQUN4QixDQUFDO1FBQ0gsQ0FBQztRQUVELE9BQU87WUFDTCxJQUFJLEVBQUUsV0FBb0I7WUFDMUIsT0FBTztTQUNSLENBQUM7SUFDSixDQUFDO0lBRU0sS0FBSyxDQUFDLEtBQUssQ0FBQyxVQUErQjtRQUNoRCw4RUFBOEU7UUFDOUUsTUFBTSxJQUFJLEtBQUssQ0FBQyxxREFBcUQsQ0FBQyxDQUFDO0lBQ3pFLENBQUM7Q0FDRiJ9
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
|
2
|
+
import type { ChatOptions, ChatResponse } from './abstract.classes.multimodal.js';
|
|
3
|
+
export interface IGroqProviderOptions {
|
|
4
|
+
groqToken: string;
|
|
5
|
+
model?: string;
|
|
6
|
+
}
|
|
7
|
+
export declare class GroqProvider extends MultiModalModel {
|
|
8
|
+
private options;
|
|
9
|
+
private baseUrl;
|
|
10
|
+
constructor(optionsArg: IGroqProviderOptions);
|
|
11
|
+
start(): Promise<void>;
|
|
12
|
+
stop(): Promise<void>;
|
|
13
|
+
chatStream(input: ReadableStream<Uint8Array>): Promise<ReadableStream<string>>;
|
|
14
|
+
chat(optionsArg: ChatOptions): Promise<ChatResponse>;
|
|
15
|
+
audio(optionsArg: {
|
|
16
|
+
message: string;
|
|
17
|
+
}): Promise<NodeJS.ReadableStream>;
|
|
18
|
+
}
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
import * as plugins from './plugins.js';
|
|
2
|
+
import * as paths from './paths.js';
|
|
3
|
+
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
|
4
|
+
export class GroqProvider extends MultiModalModel {
|
|
5
|
+
constructor(optionsArg) {
|
|
6
|
+
super();
|
|
7
|
+
this.baseUrl = 'https://api.groq.com/v1';
|
|
8
|
+
this.options = {
|
|
9
|
+
...optionsArg,
|
|
10
|
+
model: optionsArg.model || 'llama-3.3-70b-versatile', // Default model
|
|
11
|
+
};
|
|
12
|
+
}
|
|
13
|
+
async start() { }
|
|
14
|
+
async stop() { }
|
|
15
|
+
async chatStream(input) {
|
|
16
|
+
// Create a TextDecoder to handle incoming chunks
|
|
17
|
+
const decoder = new TextDecoder();
|
|
18
|
+
let buffer = '';
|
|
19
|
+
let currentMessage = null;
|
|
20
|
+
// Create a TransformStream to process the input
|
|
21
|
+
const transform = new TransformStream({
|
|
22
|
+
async transform(chunk, controller) {
|
|
23
|
+
buffer += decoder.decode(chunk, { stream: true });
|
|
24
|
+
// Try to parse complete JSON messages from the buffer
|
|
25
|
+
while (true) {
|
|
26
|
+
const newlineIndex = buffer.indexOf('\n');
|
|
27
|
+
if (newlineIndex === -1)
|
|
28
|
+
break;
|
|
29
|
+
const line = buffer.slice(0, newlineIndex);
|
|
30
|
+
buffer = buffer.slice(newlineIndex + 1);
|
|
31
|
+
if (line.trim()) {
|
|
32
|
+
try {
|
|
33
|
+
const message = JSON.parse(line);
|
|
34
|
+
currentMessage = {
|
|
35
|
+
role: message.role || 'user',
|
|
36
|
+
content: message.content || '',
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
catch (e) {
|
|
40
|
+
console.error('Failed to parse message:', e);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
// If we have a complete message, send it to Groq
|
|
45
|
+
if (currentMessage) {
|
|
46
|
+
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
47
|
+
method: 'POST',
|
|
48
|
+
headers: {
|
|
49
|
+
'Authorization': `Bearer ${this.options.groqToken}`,
|
|
50
|
+
'Content-Type': 'application/json',
|
|
51
|
+
},
|
|
52
|
+
body: JSON.stringify({
|
|
53
|
+
model: this.options.model,
|
|
54
|
+
messages: [{ role: currentMessage.role, content: currentMessage.content }],
|
|
55
|
+
stream: true,
|
|
56
|
+
}),
|
|
57
|
+
});
|
|
58
|
+
// Process each chunk from Groq
|
|
59
|
+
const reader = response.body?.getReader();
|
|
60
|
+
if (reader) {
|
|
61
|
+
try {
|
|
62
|
+
while (true) {
|
|
63
|
+
const { done, value } = await reader.read();
|
|
64
|
+
if (done)
|
|
65
|
+
break;
|
|
66
|
+
const chunk = new TextDecoder().decode(value);
|
|
67
|
+
const lines = chunk.split('\n');
|
|
68
|
+
for (const line of lines) {
|
|
69
|
+
if (line.startsWith('data: ')) {
|
|
70
|
+
const data = line.slice(6);
|
|
71
|
+
if (data === '[DONE]')
|
|
72
|
+
break;
|
|
73
|
+
try {
|
|
74
|
+
const parsed = JSON.parse(data);
|
|
75
|
+
const content = parsed.choices[0]?.delta?.content;
|
|
76
|
+
if (content) {
|
|
77
|
+
controller.enqueue(content);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
catch (e) {
|
|
81
|
+
console.error('Failed to parse SSE data:', e);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
finally {
|
|
88
|
+
reader.releaseLock();
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
currentMessage = null;
|
|
92
|
+
}
|
|
93
|
+
},
|
|
94
|
+
flush(controller) {
|
|
95
|
+
if (buffer) {
|
|
96
|
+
try {
|
|
97
|
+
const message = JSON.parse(buffer);
|
|
98
|
+
controller.enqueue(message.content || '');
|
|
99
|
+
}
|
|
100
|
+
catch (e) {
|
|
101
|
+
console.error('Failed to parse remaining buffer:', e);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
});
|
|
106
|
+
// Connect the input to our transform stream
|
|
107
|
+
return input.pipeThrough(transform);
|
|
108
|
+
}
|
|
109
|
+
// Implementing the synchronous chat interaction
|
|
110
|
+
async chat(optionsArg) {
|
|
111
|
+
const messages = [
|
|
112
|
+
// System message
|
|
113
|
+
{
|
|
114
|
+
role: 'system',
|
|
115
|
+
content: optionsArg.systemMessage,
|
|
116
|
+
},
|
|
117
|
+
// Message history
|
|
118
|
+
...optionsArg.messageHistory.map(msg => ({
|
|
119
|
+
role: msg.role,
|
|
120
|
+
content: msg.content,
|
|
121
|
+
})),
|
|
122
|
+
// User message
|
|
123
|
+
{
|
|
124
|
+
role: 'user',
|
|
125
|
+
content: optionsArg.userMessage,
|
|
126
|
+
},
|
|
127
|
+
];
|
|
128
|
+
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
129
|
+
method: 'POST',
|
|
130
|
+
headers: {
|
|
131
|
+
'Authorization': `Bearer ${this.options.groqToken}`,
|
|
132
|
+
'Content-Type': 'application/json',
|
|
133
|
+
},
|
|
134
|
+
body: JSON.stringify({
|
|
135
|
+
model: this.options.model,
|
|
136
|
+
messages,
|
|
137
|
+
temperature: 0.7,
|
|
138
|
+
max_completion_tokens: 1024,
|
|
139
|
+
stream: false,
|
|
140
|
+
}),
|
|
141
|
+
});
|
|
142
|
+
if (!response.ok) {
|
|
143
|
+
const error = await response.json();
|
|
144
|
+
throw new Error(`Groq API error: ${error.message || response.statusText}`);
|
|
145
|
+
}
|
|
146
|
+
const result = await response.json();
|
|
147
|
+
return {
|
|
148
|
+
role: 'assistant',
|
|
149
|
+
message: result.choices[0].message.content,
|
|
150
|
+
};
|
|
151
|
+
}
|
|
152
|
+
async audio(optionsArg) {
|
|
153
|
+
// Groq does not provide an audio API, so this method is not implemented.
|
|
154
|
+
throw new Error('Audio generation is not yet supported by Groq.');
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicHJvdmlkZXIuZ3JvcS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3RzL3Byb3ZpZGVyLmdyb3EudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxLQUFLLE9BQU8sTUFBTSxjQUFjLENBQUM7QUFDeEMsT0FBTyxLQUFLLEtBQUssTUFBTSxZQUFZLENBQUM7QUFDcEMsT0FBTyxFQUFFLGVBQWUsRUFBRSxNQUFNLGtDQUFrQyxDQUFDO0FBUW5FLE1BQU0sT0FBTyxZQUFhLFNBQVEsZUFBZTtJQUkvQyxZQUFZLFVBQWdDO1FBQzFDLEtBQUssRUFBRSxDQUFDO1FBSEYsWUFBTyxHQUFHLHlCQUF5QixDQUFDO1FBSTFDLElBQUksQ0FBQyxPQUFPLEdBQUc7WUFDYixHQUFHLFVBQVU7WUFDYixLQUFLLEVBQUUsVUFBVSxDQUFDLEtBQUssSUFBSSx5QkFBeUIsRUFBRSxnQkFBZ0I7U0FDdkUsQ0FBQztJQUNKLENBQUM7SUFFRCxLQUFLLENBQUMsS0FBSyxLQUFJLENBQUM7SUFFaEIsS0FBSyxDQUFDLElBQUksS0FBSSxDQUFDO0lBRVIsS0FBSyxDQUFDLFVBQVUsQ0FBQyxLQUFpQztRQUN2RCxpREFBaUQ7UUFDakQsTUFBTSxPQUFPLEdBQUcsSUFBSSxXQUFXLEVBQUUsQ0FBQztRQUNsQyxJQUFJLE1BQU0sR0FBRyxFQUFFLENBQUM7UUFDaEIsSUFBSSxjQUFjLEdBQThDLElBQUksQ0FBQztRQUVyRSxnREFBZ0Q7UUFDaEQsTUFBTSxTQUFTLEdBQUcsSUFBSSxlQUFlLENBQXFCO1lBQ3hELEtBQUssQ0FBQyxTQUFTLENBQUMsS0FBSyxFQUFFLFVBQVU7Z0JBQy9CLE1BQU0sSUFBSSxPQUFPLENBQUMsTUFBTSxDQUFDLEtBQUssRUFBRSxFQUFFLE1BQU0sRUFBRSxJQUFJLEVBQUUsQ0FBQyxDQUFDO2dCQUVsRCxzREFBc0Q7Z0JBQ3RELE9BQU8sSUFBSSxFQUFFLENBQUM7b0JBQ1osTUFBTSxZQUFZLEdBQUcsTUFBTSxDQUFDLE9BQU8sQ0FBQyxJQUFJLENBQUMsQ0FBQztvQkFDMUMsSUFBSSxZQUFZLEtBQUssQ0FBQyxDQUFDO3dCQUFFLE1BQU07b0JBRS9CLE1BQU0sSUFBSSxHQUFHLE1BQU0sQ0FBQyxLQUFLLENBQUMsQ0FBQyxFQUFFLFlBQVksQ0FBQyxDQUFDO29CQUMzQyxNQUFNLEdBQUcsTUFBTSxDQUFDLEtBQUssQ0FBQyxZQUFZLEdBQUcsQ0FBQyxDQUFDLENBQUM7b0JBRXhDLElBQUksSUFBSSxDQUFDLElBQUksRUFBRSxFQUFFLENBQUM7d0JBQ2hCLElBQUksQ0FBQzs0QkFDSCxNQUFNLE9BQU8sR0FBRyxJQUFJLENBQUMsS0FBSyxDQUFDLElBQUksQ0FBQyxDQUFDOzRCQUNqQyxjQUFjLEdBQUc7Z0NBQ2YsSUFBSSxFQUFFLE9BQU8sQ0FBQyxJQUFJLElBQUksTUFBTTtnQ0FDNUIsT0FBTyxFQUFFLE9BQU8sQ0FBQyxPQUFPLElBQUksRUFBRTs2QkFDL0IsQ0FBQzt3QkFDSixDQUFDO3dCQUFDLE9BQU8sQ0FBQyxFQUFFLENBQUM7NEJBQ1gsT0FBTyxDQUFDLEtBQUssQ0FBQywwQkFBMEIsRUFBRSxDQUFDLENBQUMsQ0FBQzt3QkFDL0MsQ0FBQztvQkFDSCxDQUFDO2dCQUNILENBQUM7Z0JBRUQsaURBQWlEO2dCQUNqRCxJQUFJLGNBQWMsRUFBRSxDQUFDO29CQUNuQixNQUFNLFFBQVEsR0FBRyxNQUFNLEtBQUssQ0FBQyxHQUFHLElBQUksQ0FBQyxPQUFPLG1CQUFtQixFQUFFO3dCQUMvRCxNQUFNLEVBQUUsTUFBTTt3QkFDZCxPQUFPLEVBQUU7NEJBQ1AsZUFBZSxFQUFFLFVBQVUsSUFBSSxDQUFDLE9BQU8sQ0FBQyxTQUFTLEVBQUU7NEJBQ25ELGNBQWMsRUFBRSxrQkFBa0I7eUJBQ25DO3dCQUNELElBQUksRUFBRSxJQUFJLENBQUMsU0FBUyxDQUFDOzRCQUNuQixLQUFLLEVBQUUsSUFBSSxDQUFDLE9BQU8sQ0FBQyxLQUFLOzRCQUN6QixRQUFRLEVBQUUsQ0FBQyxFQUFFLElBQUksRUFBRSxjQUFjLENBQUMsSUFBSSxFQUFFLE9BQU8sRUFBRSxjQUFjLENBQUMsT0FBTyxFQUFFLENBQUM7NEJBQzFFLE1BQU0sRUFBRSxJQUFJO3lCQUNiLENBQUM7cUJBQ0gsQ0FBQyxDQUFDO29CQUVILCtCQUErQjtvQkFDL0IsTUFBTSxNQUFNLEdBQUcsUUFBUSxDQUFDLElBQUksRUFBRSxTQUFTLEVBQUUsQ0FBQztvQkFDMUMsSUFBSSxNQUFNLEVBQUUsQ0FBQzt3QkFDWCxJQUFJLENBQUM7NEJBQ0gsT0FBTyxJQUFJLEVBQUUsQ0FBQztnQ0FDWixNQUFNLEVBQUUsSUFBSSxFQUFFLEtBQUssRUFBRSxHQUFHLE1BQU0sTUFBTSxDQUFDLElBQUksRUFBRSxDQUFDO2dDQUM1QyxJQUFJLElBQUk7b0NBQUUsTUFBTTtnQ0FFaEIsTUFBTSxLQUFLLEdBQUcsSUFBSSxXQUFXLEVBQUUsQ0FBQyxNQUFNLENBQUMsS0FBSyxDQUFDLENBQUM7Z0NBQzlDLE1BQU0sS0FBSyxHQUFHLEtBQUssQ0FBQyxLQUFLLENBQUMsSUFBSSxDQUFDLENBQUM7Z0NBRWhDLEtBQUssTUFBTSxJQUFJLElBQUksS0FBSyxFQUFFLENBQUM7b0NBQ3pCLElBQUksSUFBSSxDQUFDLFVBQVUsQ0FBQyxRQUFRLENBQUMsRUFBRSxDQUFDO3dDQUM5QixNQUFNLElBQUksR0FBRyxJQUFJLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQyxDQUFDO3dDQUMzQixJQUFJLElBQUksS0FBSyxRQUFROzRDQUFFLE1BQU07d0NBRTdCLElBQUksQ0FBQzs0Q0FDSCxNQUFNLE1BQU0sR0FBRyxJQUFJLENBQUMsS0FBSyxDQUFDLElBQUksQ0FBQyxDQUFDOzRDQUNoQyxNQUFNLE9BQU8sR0FBRyxNQUFNLENBQUMsT0FBTyxDQUFDLENBQUMsQ0FBQyxFQUFFLEtBQUssRUFBRSxPQUFPLENBQUM7NENBQ2xELElBQUksT0FBTyxFQUFFLENBQUM7Z0RBQ1osVUFBVSxDQUFDLE9BQU8sQ0FBQyxPQUFPLENBQUMsQ0FBQzs0Q0FDOUIsQ0FBQzt3Q0FDSCxDQUFDO3dDQUFDLE9BQU8sQ0FBQyxFQUFFLENBQUM7NENBQ1gsT0FBTyxDQUFDLEtBQUssQ0FBQywyQkFBMkIsRUFBRSxDQUFDLENBQUMsQ0FBQzt3Q0FDaEQsQ0FBQztvQ0FDSCxDQUFDO2dDQUNILENBQUM7NEJBQ0gsQ0FBQzt3QkFDSCxDQUFDO2dDQUFTLENBQUM7NEJBQ1QsTUFBTSxDQUFDLFdBQVcsRUFBRSxDQUFDO3dCQUN2QixDQUFDO29CQUNILENBQUM7b0JBRUQsY0FBYyxHQUFHLElBQUksQ0FBQztnQkFDeEIsQ0FBQztZQUNILENBQUM7WUFFRCxLQUFLLENBQUMsVUFBVTtnQkFDZCxJQUFJLE1BQU0sRUFBRSxDQUFDO29CQUNYLElBQUksQ0FBQzt3QkFDSCxNQUFNLE9BQU8sR0FBRyxJQUFJLENBQUMsS0FBSyxDQUFDLE1BQU0sQ0FBQyxDQUFDO3dCQUNuQyxVQUFVLENBQUMsT0FBTyxDQUFDLE9BQU8sQ0FBQyxPQUFPLElBQUksRUFBRSxDQUFDLENBQUM7b0JBQzVDLENBQUM7b0JBQUMsT0FBTyxDQUFDLEVBQUUsQ0FBQzt3QkFDWCxPQUFPLENBQUMsS0FBSyxDQUFDLG1DQUFtQyxFQUFFLENBQUMsQ0FBQyxDQUFDO29CQUN4RCxDQUFDO2dCQUNILENBQUM7WUFDSCxDQUFDO1NBQ0YsQ0FBQyxDQUFDO1FBRUgsNENBQTRDO1FBQzVDLE9BQU8sS0FBSyxDQUFDLFdBQVcsQ0FBQyxTQUFTLENBQUMsQ0FBQztJQUN0QyxDQUFDO0lBRUQsZ0RBQWdEO0lBQ3pDLEtBQUssQ0FBQyxJQUFJLENBQUMsVUFBdUI7UUFDdkMsTUFBTSxRQUFRLEdBQUc7WUFDZixpQkFBaUI7WUFDakI7Z0JBQ0UsSUFBSSxFQUFFLFFBQVE7Z0JBQ2QsT0FBTyxFQUFFLFVBQVUsQ0FBQyxhQUFhO2FBQ2xDO1lBQ0Qsa0JBQWtCO1lBQ2xCLEdBQUcsVUFBVSxDQUFDLGNBQWMsQ0FBQyxHQUFHLENBQUMsR0FBRyxDQUFDLEVBQUUsQ0FBQyxDQUFDO2dCQUN2QyxJQUFJLEVBQUUsR0FBRyxDQUFDLElBQUk7Z0JBQ2QsT0FBTyxFQUFFLEdBQUcsQ0FBQyxPQUFPO2FBQ3JCLENBQUMsQ0FBQztZQUNILGVBQWU7WUFDZjtnQkFDRSxJQUFJLEVBQUUsTUFBTTtnQkFDWixPQUFPLEVBQUUsVUFBVSxDQUFDLFdBQVc7YUFDaEM7U0FDRixDQUFDO1FBRUYsTUFBTSxRQUFRLEdBQUcsTUFBTSxLQUFLLENBQUMsR0FBRyxJQUFJLENBQUMsT0FBTyxtQkFBbUIsRUFBRTtZQUMvRCxNQUFNLEVBQUUsTUFBTTtZQUNkLE9BQU8sRUFBRTtnQkFDUCxlQUFlLEVBQUUsVUFBVSxJQUFJLENBQUMsT0FBTyxDQUFDLFNBQVMsRUFBRTtnQkFDbkQsY0FBYyxFQUFFLGtCQUFrQjthQUNuQztZQUNELElBQUksRUFBRSxJQUFJLENBQUMsU0FBUyxDQUFDO2dCQUNuQixLQUFLLEVBQUUsSUFBSSxDQUFDLE9BQU8sQ0FBQyxLQUFLO2dCQUN6QixRQUFRO2dCQUNSLFdBQVcsRUFBRSxHQUFHO2dCQUNoQixxQkFBcUIsRUFBRSxJQUFJO2dCQUMzQixNQUFNLEVBQUUsS0FBSzthQUNkLENBQUM7U0FDSCxDQUFDLENBQUM7UUFFSCxJQUFJLENBQUMsUUFBUSxDQUFDLEVBQUUsRUFBRSxDQUFDO1lBQ2pCLE1BQU0sS0FBSyxHQUFHLE1BQU0sUUFBUSxDQUFDLElBQUksRUFBRSxDQUFDO1lBQ3BDLE1BQU0sSUFBSSxLQUFLLENBQUMsbUJBQW1CLEtBQUssQ0FBQyxPQUFPLElBQUksUUFBUSxDQUFDLFVBQVUsRUFBRSxDQUFDLENBQUM7UUFDN0UsQ0FBQztRQUVELE1BQU0sTUFBTSxHQUFHLE1BQU0sUUFBUSxDQUFDLElBQUksRUFBRSxDQUFDO1FBRXJDLE9BQU87WUFDTCxJQUFJLEVBQUUsV0FBVztZQUNqQixPQUFPLEVBQUUsTUFBTSxDQUFDLE9BQU8sQ0FBQyxDQUFDLENBQUMsQ0FBQyxPQUFPLENBQUMsT0FBTztTQUMzQyxDQUFDO0lBQ0osQ0FBQztJQUVNLEtBQUssQ0FBQyxLQUFLLENBQUMsVUFBK0I7UUFDaEQseUVBQXlFO1FBQ3pFLE1BQU0sSUFBSSxLQUFLLENBQUMsZ0RBQWdELENBQUMsQ0FBQztJQUNwRSxDQUFDO0NBQ0YifQ==
|
|
@@ -1,2 +1,19 @@
|
|
|
1
|
-
|
|
1
|
+
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
|
2
|
+
import type { ChatOptions, ChatResponse } from './abstract.classes.multimodal.js';
|
|
3
|
+
export interface IOllamaProviderOptions {
|
|
4
|
+
baseUrl?: string;
|
|
5
|
+
model?: string;
|
|
6
|
+
}
|
|
7
|
+
export declare class OllamaProvider extends MultiModalModel {
|
|
8
|
+
private options;
|
|
9
|
+
private baseUrl;
|
|
10
|
+
private model;
|
|
11
|
+
constructor(optionsArg?: IOllamaProviderOptions);
|
|
12
|
+
start(): Promise<void>;
|
|
13
|
+
stop(): Promise<void>;
|
|
14
|
+
chatStream(input: ReadableStream<Uint8Array>): Promise<ReadableStream<string>>;
|
|
15
|
+
chat(optionsArg: ChatOptions): Promise<ChatResponse>;
|
|
16
|
+
audio(optionsArg: {
|
|
17
|
+
message: string;
|
|
18
|
+
}): Promise<NodeJS.ReadableStream>;
|
|
2
19
|
}
|
|
@@ -1,4 +1,147 @@
|
|
|
1
1
|
import * as plugins from './plugins.js';
|
|
2
|
-
|
|
2
|
+
import * as paths from './paths.js';
|
|
3
|
+
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
|
4
|
+
export class OllamaProvider extends MultiModalModel {
|
|
5
|
+
constructor(optionsArg = {}) {
|
|
6
|
+
super();
|
|
7
|
+
this.options = optionsArg;
|
|
8
|
+
this.baseUrl = optionsArg.baseUrl || 'http://localhost:11434';
|
|
9
|
+
this.model = optionsArg.model || 'llama2';
|
|
10
|
+
}
|
|
11
|
+
async start() {
|
|
12
|
+
// Verify Ollama is running
|
|
13
|
+
try {
|
|
14
|
+
const response = await fetch(`${this.baseUrl}/api/tags`);
|
|
15
|
+
if (!response.ok) {
|
|
16
|
+
throw new Error('Failed to connect to Ollama server');
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
catch (error) {
|
|
20
|
+
throw new Error(`Failed to connect to Ollama server at ${this.baseUrl}: ${error.message}`);
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
async stop() { }
|
|
24
|
+
async chatStream(input) {
|
|
25
|
+
// Create a TextDecoder to handle incoming chunks
|
|
26
|
+
const decoder = new TextDecoder();
|
|
27
|
+
let buffer = '';
|
|
28
|
+
let currentMessage = null;
|
|
29
|
+
// Create a TransformStream to process the input
|
|
30
|
+
const transform = new TransformStream({
|
|
31
|
+
async transform(chunk, controller) {
|
|
32
|
+
buffer += decoder.decode(chunk, { stream: true });
|
|
33
|
+
// Try to parse complete JSON messages from the buffer
|
|
34
|
+
while (true) {
|
|
35
|
+
const newlineIndex = buffer.indexOf('\n');
|
|
36
|
+
if (newlineIndex === -1)
|
|
37
|
+
break;
|
|
38
|
+
const line = buffer.slice(0, newlineIndex);
|
|
39
|
+
buffer = buffer.slice(newlineIndex + 1);
|
|
40
|
+
if (line.trim()) {
|
|
41
|
+
try {
|
|
42
|
+
const message = JSON.parse(line);
|
|
43
|
+
currentMessage = {
|
|
44
|
+
role: message.role || 'user',
|
|
45
|
+
content: message.content || '',
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
catch (e) {
|
|
49
|
+
console.error('Failed to parse message:', e);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
// If we have a complete message, send it to Ollama
|
|
54
|
+
if (currentMessage) {
|
|
55
|
+
const response = await fetch(`${this.baseUrl}/api/chat`, {
|
|
56
|
+
method: 'POST',
|
|
57
|
+
headers: {
|
|
58
|
+
'Content-Type': 'application/json',
|
|
59
|
+
},
|
|
60
|
+
body: JSON.stringify({
|
|
61
|
+
model: this.model,
|
|
62
|
+
messages: [{ role: currentMessage.role, content: currentMessage.content }],
|
|
63
|
+
stream: true,
|
|
64
|
+
}),
|
|
65
|
+
});
|
|
66
|
+
// Process each chunk from Ollama
|
|
67
|
+
const reader = response.body?.getReader();
|
|
68
|
+
if (reader) {
|
|
69
|
+
try {
|
|
70
|
+
while (true) {
|
|
71
|
+
const { done, value } = await reader.read();
|
|
72
|
+
if (done)
|
|
73
|
+
break;
|
|
74
|
+
const chunk = new TextDecoder().decode(value);
|
|
75
|
+
const lines = chunk.split('\n');
|
|
76
|
+
for (const line of lines) {
|
|
77
|
+
if (line.trim()) {
|
|
78
|
+
try {
|
|
79
|
+
const parsed = JSON.parse(line);
|
|
80
|
+
const content = parsed.message?.content;
|
|
81
|
+
if (content) {
|
|
82
|
+
controller.enqueue(content);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
catch (e) {
|
|
86
|
+
console.error('Failed to parse Ollama response:', e);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
finally {
|
|
93
|
+
reader.releaseLock();
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
currentMessage = null;
|
|
97
|
+
}
|
|
98
|
+
},
|
|
99
|
+
flush(controller) {
|
|
100
|
+
if (buffer) {
|
|
101
|
+
try {
|
|
102
|
+
const message = JSON.parse(buffer);
|
|
103
|
+
controller.enqueue(message.content || '');
|
|
104
|
+
}
|
|
105
|
+
catch (e) {
|
|
106
|
+
console.error('Failed to parse remaining buffer:', e);
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
});
|
|
111
|
+
// Connect the input to our transform stream
|
|
112
|
+
return input.pipeThrough(transform);
|
|
113
|
+
}
|
|
114
|
+
// Implementing the synchronous chat interaction
|
|
115
|
+
async chat(optionsArg) {
|
|
116
|
+
// Format messages for Ollama
|
|
117
|
+
const messages = [
|
|
118
|
+
{ role: 'system', content: optionsArg.systemMessage },
|
|
119
|
+
...optionsArg.messageHistory,
|
|
120
|
+
{ role: 'user', content: optionsArg.userMessage }
|
|
121
|
+
];
|
|
122
|
+
// Make API call to Ollama
|
|
123
|
+
const response = await fetch(`${this.baseUrl}/api/chat`, {
|
|
124
|
+
method: 'POST',
|
|
125
|
+
headers: {
|
|
126
|
+
'Content-Type': 'application/json',
|
|
127
|
+
},
|
|
128
|
+
body: JSON.stringify({
|
|
129
|
+
model: this.model,
|
|
130
|
+
messages: messages,
|
|
131
|
+
stream: false
|
|
132
|
+
}),
|
|
133
|
+
});
|
|
134
|
+
if (!response.ok) {
|
|
135
|
+
throw new Error(`Ollama API error: ${response.statusText}`);
|
|
136
|
+
}
|
|
137
|
+
const result = await response.json();
|
|
138
|
+
return {
|
|
139
|
+
role: 'assistant',
|
|
140
|
+
message: result.message.content,
|
|
141
|
+
};
|
|
142
|
+
}
|
|
143
|
+
async audio(optionsArg) {
|
|
144
|
+
throw new Error('Audio generation is not supported by Ollama.');
|
|
145
|
+
}
|
|
3
146
|
}
|
|
4
|
-
//# sourceMappingURL=data:application/json;base64,
|
|
147
|
+
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicHJvdmlkZXIub2xsYW1hLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vdHMvcHJvdmlkZXIub2xsYW1hLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sS0FBSyxPQUFPLE1BQU0sY0FBYyxDQUFDO0FBQ3hDLE9BQU8sS0FBSyxLQUFLLE1BQU0sWUFBWSxDQUFDO0FBQ3BDLE9BQU8sRUFBRSxlQUFlLEVBQUUsTUFBTSxrQ0FBa0MsQ0FBQztBQVFuRSxNQUFNLE9BQU8sY0FBZSxTQUFRLGVBQWU7SUFLakQsWUFBWSxhQUFxQyxFQUFFO1FBQ2pELEtBQUssRUFBRSxDQUFDO1FBQ1IsSUFBSSxDQUFDLE9BQU8sR0FBRyxVQUFVLENBQUM7UUFDMUIsSUFBSSxDQUFDLE9BQU8sR0FBRyxVQUFVLENBQUMsT0FBTyxJQUFJLHdCQUF3QixDQUFDO1FBQzlELElBQUksQ0FBQyxLQUFLLEdBQUcsVUFBVSxDQUFDLEtBQUssSUFBSSxRQUFRLENBQUM7SUFDNUMsQ0FBQztJQUVELEtBQUssQ0FBQyxLQUFLO1FBQ1QsMkJBQTJCO1FBQzNCLElBQUksQ0FBQztZQUNILE1BQU0sUUFBUSxHQUFHLE1BQU0sS0FBSyxDQUFDLEdBQUcsSUFBSSxDQUFDLE9BQU8sV0FBVyxDQUFDLENBQUM7WUFDekQsSUFBSSxDQUFDLFFBQVEsQ0FBQyxFQUFFLEVBQUUsQ0FBQztnQkFDakIsTUFBTSxJQUFJLEtBQUssQ0FBQyxvQ0FBb0MsQ0FBQyxDQUFDO1lBQ3hELENBQUM7UUFDSCxDQUFDO1FBQUMsT0FBTyxLQUFLLEVBQUUsQ0FBQztZQUNmLE1BQU0sSUFBSSxLQUFLLENBQUMseUNBQXlDLElBQUksQ0FBQyxPQUFPLEtBQUssS0FBSyxDQUFDLE9BQU8sRUFBRSxDQUFDLENBQUM7UUFDN0YsQ0FBQztJQUNILENBQUM7SUFFRCxLQUFLLENBQUMsSUFBSSxLQUFJLENBQUM7SUFFUixLQUFLLENBQUMsVUFBVSxDQUFDLEtBQWlDO1FBQ3ZELGlEQUFpRDtRQUNqRCxNQUFNLE9BQU8sR0FBRyxJQUFJLFdBQVcsRUFBRSxDQUFDO1FBQ2xDLElBQUksTUFBTSxHQUFHLEVBQUUsQ0FBQztRQUNoQixJQUFJLGNBQWMsR0FBOEMsSUFBSSxDQUFDO1FBRXJFLGdEQUFnRDtRQUNoRCxNQUFNLFNBQVMsR0FBRyxJQUFJLGVBQWUsQ0FBcUI7WUFDeEQsS0FBSyxDQUFDLFNBQVMsQ0FBQyxLQUFLLEVBQUUsVUFBVTtnQkFDL0IsTUFBTSxJQUFJLE9BQU8sQ0FBQyxNQUFNLENBQUMsS0FBSyxFQUFFLEVBQUUsTUFBTSxFQUFFLElBQUksRUFBRSxDQUFDLENBQUM7Z0JBRWxELHNEQUFzRDtnQkFDdEQsT0FBTyxJQUFJLEVBQUUsQ0FBQztvQkFDWixNQUFNLFlBQVksR0FBRyxNQUFNLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyxDQUFDO29CQUMxQyxJQUFJLFlBQVksS0FBSyxDQUFDLENBQUM7d0JBQUUsTUFBTTtvQkFFL0IsTUFBTSxJQUFJLEdBQUcsTUFBTSxDQUFDLEtBQUssQ0FBQyxDQUFDLEVBQUUsWUFBWSxDQUFDLENBQUM7b0JBQzNDLE1BQU0sR0FBRyxNQUFNLENBQUMsS0FBSyxDQUFDLFlBQVksR0FBRyxDQUFDLENBQUMsQ0FBQztvQkFFeEMsSUFBSSxJQUFJLENBQUMsSUFBSSxFQUFFLEVBQUUsQ0FBQzt3QkFDaEIsSUFBSSxDQUFDOzRCQUNILE1BQU0sT0FBTyxHQUFHLElBQUksQ0FBQyxLQUFLLENBQUMsSUFBSSxDQUFDLENBQUM7NEJBQ2pDLGNBQWMsR0FBRztnQ0FDZixJQUFJLEVBQUUsT0FBTyxDQUFDLElBQUksSUFBSSxNQUFNO2dDQUM1QixPQUFPLEVBQUUsT0FBTyxDQUFDLE9BQU8sSUFBSSxFQUFFOzZCQUMvQixDQUFDO3dCQUNKLENBQUM7d0JBQUMsT0FBTyxDQUFDLEVBQUUsQ0FBQzs0QkFDWCxPQUFPLENBQUMsS0FBSyxDQUFDLDBCQUEwQixFQUFFLENBQUMsQ0FBQyxDQUFDO3dCQUMvQyxDQUFDO29CQUNILENBQUM7Z0JBQ0gsQ0FBQztnQkFFRCxtREFBbUQ7Z0JBQ25ELElBQUksY0FBYyxFQUFFLENBQUM7b0JBQ25CLE1BQU0sUUFBUSxHQUFHLE1BQU0sS0FBSyxDQUFDLEdBQUcsSUFBSSxDQUFDLE9BQU8sV0FBVyxFQUFFO3dCQUN2RCxNQUFNLEVBQUUsTUFBTTt3QkFDZCxPQUFPLEVBQUU7NEJBQ1AsY0FBYyxFQUFFLGtCQUFrQjt5QkFDbkM7d0JBQ0QsSUFBSSxFQUFFLElBQUksQ0FBQyxTQUFTLENBQUM7NEJBQ25CLEtBQUssRUFBRSxJQUFJLENBQUMsS0FBSzs0QkFDakIsUUFBUSxFQUFFLENBQUMsRUFBRSxJQUFJLEVBQUUsY0FBYyxDQUFDLElBQUksRUFBRSxPQUFPLEVBQUUsY0FBYyxDQUFDLE9BQU8sRUFBRSxDQUFDOzRCQUMxRSxNQUFNLEVBQUUsSUFBSTt5QkFDYixDQUFDO3FCQUNILENBQUMsQ0FBQztvQkFFSCxpQ0FBaUM7b0JBQ2pDLE1BQU0sTUFBTSxHQUFHLFFBQVEsQ0FBQyxJQUFJLEVBQUUsU0FBUyxFQUFFLENBQUM7b0JBQzFDLElBQUksTUFBTSxFQUFFLENBQUM7d0JBQ1gsSUFBSSxDQUFDOzRCQUNILE9BQU8sSUFBSSxFQUFFLENBQUM7Z0NBQ1osTUFBTSxFQUFFLElBQUksRUFBRSxLQUFLLEVBQUUsR0FBRyxNQUFNLE1BQU0sQ0FBQyxJQUFJLEVBQUUsQ0FBQztnQ0FDNUMsSUFBSSxJQUFJO29DQUFFLE1BQU07Z0NBRWhCLE1BQU0sS0FBSyxHQUFHLElBQUksV0FBVyxFQUFFLENBQUMsTUFBTSxDQUFDLEtBQUssQ0FBQyxDQUFDO2dDQUM5QyxNQUFNLEtBQUssR0FBRyxLQUFLLENBQUMsS0FBSyxDQUFDLElBQUksQ0FBQyxDQUFDO2dDQUVoQyxLQUFLLE1BQU0sSUFBSSxJQUFJLEtBQUssRUFBRSxDQUFDO29DQUN6QixJQUFJLElBQUksQ0FBQyxJQUFJLEVBQUUsRUFBRSxDQUFDO3dDQUNoQixJQUFJLENBQUM7NENBQ0gsTUFBTSxNQUFNLEdBQUcsSUFBSSxDQUFDLEtBQUssQ0FBQyxJQUFJLENBQUMsQ0FBQzs0Q0FDaEMsTUFBTSxPQUFPLEdBQUcsTUFBTSxDQUFDLE9BQU8sRUFBRSxPQUFPLENBQUM7NENBQ3hDLElBQUksT0FBTyxFQUFFLENBQUM7Z0RBQ1osVUFBVSxDQUFDLE9BQU8sQ0FBQyxPQUFPLENBQUMsQ0FBQzs0Q0FDOUIsQ0FBQzt3Q0FDSCxDQUFDO3dDQUFDLE9BQU8sQ0FBQyxFQUFFLENBQUM7NENBQ1gsT0FBTyxDQUFDLEtBQUssQ0FBQyxrQ0FBa0MsRUFBRSxDQUFDLENBQUMsQ0FBQzt3Q0FDdkQsQ0FBQztvQ0FDSCxDQUFDO2dDQUNILENBQUM7NEJBQ0gsQ0FBQzt3QkFDSCxDQUFDO2dDQUFTLENBQUM7NEJBQ1QsTUFBTSxDQUFDLFdBQVcsRUFBRSxDQUFDO3dCQUN2QixDQUFDO29CQUNILENBQUM7b0JBRUQsY0FBYyxHQUFHLElBQUksQ0FBQztnQkFDeEIsQ0FBQztZQUNILENBQUM7WUFFRCxLQUFLLENBQUMsVUFBVTtnQkFDZCxJQUFJLE1BQU0sRUFBRSxDQUFDO29CQUNYLElBQUksQ0FBQzt3QkFDSCxNQUFNLE9BQU8sR0FBRyxJQUFJLENBQUMsS0FBSyxDQUFDLE1BQU0sQ0FBQyxDQUFDO3dCQUNuQyxVQUFVLENBQUMsT0FBTyxDQUFDLE9BQU8sQ0FBQyxPQUFPLElBQUksRUFBRSxDQUFDLENBQUM7b0JBQzVDLENBQUM7b0JBQUMsT0FBTyxDQUFDLEVBQUUsQ0FBQzt3QkFDWCxPQUFPLENBQUMsS0FBSyxDQUFDLG1DQUFtQyxFQUFFLENBQUMsQ0FBQyxDQUFDO29CQUN4RCxDQUFDO2dCQUNILENBQUM7WUFDSCxDQUFDO1NBQ0YsQ0FBQyxDQUFDO1FBRUgsNENBQTRDO1FBQzVDLE9BQU8sS0FBSyxDQUFDLFdBQVcsQ0FBQyxTQUFTLENBQUMsQ0FBQztJQUN0QyxDQUFDO0lBRUQsZ0RBQWdEO0lBQ3pDLEtBQUssQ0FBQyxJQUFJLENBQUMsVUFBdUI7UUFDdkMsNkJBQTZCO1FBQzdCLE1BQU0sUUFBUSxHQUFHO1lBQ2YsRUFBRSxJQUFJLEVBQUUsUUFBUSxFQUFFLE9BQU8sRUFBRSxVQUFVLENBQUMsYUFBYSxFQUFFO1lBQ3JELEdBQUcsVUFBVSxDQUFDLGNBQWM7WUFDNUIsRUFBRSxJQUFJLEVBQUUsTUFBTSxFQUFFLE9BQU8sRUFBRSxVQUFVLENBQUMsV0FBVyxFQUFFO1NBQ2xELENBQUM7UUFFRiwwQkFBMEI7UUFDMUIsTUFBTSxRQUFRLEdBQUcsTUFBTSxLQUFLLENBQUMsR0FBRyxJQUFJLENBQUMsT0FBTyxXQUFXLEVBQUU7WUFDdkQsTUFBTSxFQUFFLE1BQU07WUFDZCxPQUFPLEVBQUU7Z0JBQ1AsY0FBYyxFQUFFLGtCQUFrQjthQUNuQztZQUNELElBQUksRUFBRSxJQUFJLENBQUMsU0FBUyxDQUFDO2dCQUNuQixLQUFLLEVBQUUsSUFBSSxDQUFDLEtBQUs7Z0JBQ2pCLFFBQVEsRUFBRSxRQUFRO2dCQUNsQixNQUFNLEVBQUUsS0FBSzthQUNkLENBQUM7U0FDSCxDQUFDLENBQUM7UUFFSCxJQUFJLENBQUMsUUFBUSxDQUFDLEVBQUUsRUFBRSxDQUFDO1lBQ2pCLE1BQU0sSUFBSSxLQUFLLENBQUMscUJBQXFCLFFBQVEsQ0FBQyxVQUFVLEVBQUUsQ0FBQyxDQUFDO1FBQzlELENBQUM7UUFFRCxNQUFNLE1BQU0sR0FBRyxNQUFNLFFBQVEsQ0FBQyxJQUFJLEVBQUUsQ0FBQztRQUVyQyxPQUFPO1lBQ0wsSUFBSSxFQUFFLFdBQW9CO1lBQzFCLE9BQU8sRUFBRSxNQUFNLENBQUMsT0FBTyxDQUFDLE9BQU87U0FDaEMsQ0FBQztJQUNKLENBQUM7SUFFTSxLQUFLLENBQUMsS0FBSyxDQUFDLFVBQStCO1FBQ2hELE1BQU0sSUFBSSxLQUFLLENBQUMsOENBQThDLENBQUMsQ0FBQztJQUNsRSxDQUFDO0NBQ0YifQ==
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
/// <reference types="node" resolution-mode="require"/>
|
|
2
1
|
import * as plugins from './plugins.js';
|
|
3
2
|
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
|
4
3
|
export interface IOpenaiProviderOptions {
|
|
@@ -11,7 +10,7 @@ export declare class OpenAiProvider extends MultiModalModel {
|
|
|
11
10
|
constructor(optionsArg: IOpenaiProviderOptions);
|
|
12
11
|
start(): Promise<void>;
|
|
13
12
|
stop(): Promise<void>;
|
|
14
|
-
chatStream(input: ReadableStream<
|
|
13
|
+
chatStream(input: ReadableStream<Uint8Array>): Promise<ReadableStream<string>>;
|
|
15
14
|
chat(optionsArg: {
|
|
16
15
|
systemMessage: string;
|
|
17
16
|
userMessage: string;
|