@push.rocks/smartai 0.13.3 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist_ts/00_commitinfo_data.js +3 -3
- package/dist_ts/index.d.ts +6 -11
- package/dist_ts/index.js +6 -12
- package/dist_ts/plugins.d.ts +10 -15
- package/dist_ts/plugins.js +13 -19
- package/dist_ts/smartai.classes.smartai.d.ts +7 -0
- package/dist_ts/smartai.classes.smartai.js +51 -0
- package/dist_ts/smartai.interfaces.d.ts +41 -0
- package/dist_ts/smartai.interfaces.js +2 -0
- package/dist_ts/smartai.middleware.anthropic.d.ts +7 -0
- package/dist_ts/smartai.middleware.anthropic.js +36 -0
- package/dist_ts/smartai.provider.ollama.d.ts +8 -0
- package/dist_ts/smartai.provider.ollama.js +378 -0
- package/dist_ts_audio/index.d.ts +9 -0
- package/dist_ts_audio/index.js +15 -0
- package/dist_ts_audio/plugins.d.ts +2 -0
- package/dist_ts_audio/plugins.js +3 -0
- package/dist_ts_document/index.d.ts +11 -0
- package/dist_ts_document/index.js +45 -0
- package/dist_ts_document/plugins.d.ts +3 -0
- package/dist_ts_document/plugins.js +4 -0
- package/dist_ts_image/index.d.ts +46 -0
- package/dist_ts_image/index.js +110 -0
- package/dist_ts_image/plugins.d.ts +3 -0
- package/dist_ts_image/plugins.js +4 -0
- package/dist_ts_research/index.d.ts +19 -0
- package/dist_ts_research/index.js +98 -0
- package/dist_ts_research/plugins.d.ts +2 -0
- package/dist_ts_research/plugins.js +3 -0
- package/dist_ts_vision/index.d.ts +8 -0
- package/dist_ts_vision/index.js +21 -0
- package/dist_ts_vision/plugins.d.ts +2 -0
- package/dist_ts_vision/plugins.js +3 -0
- package/package.json +50 -22
- package/readme.hints.md +34 -88
- package/readme.md +284 -547
- package/ts/00_commitinfo_data.ts +2 -2
- package/ts/index.ts +8 -11
- package/ts/plugins.ts +19 -35
- package/ts/smartai.classes.smartai.ts +51 -0
- package/ts/smartai.interfaces.ts +53 -0
- package/ts/smartai.middleware.anthropic.ts +38 -0
- package/ts/smartai.provider.ollama.ts +426 -0
- package/ts_audio/index.ts +24 -0
- package/ts_audio/plugins.ts +2 -0
- package/ts_document/index.ts +61 -0
- package/ts_document/plugins.ts +3 -0
- package/ts_image/index.ts +147 -0
- package/ts_image/plugins.ts +3 -0
- package/ts_research/index.ts +120 -0
- package/ts_research/plugins.ts +2 -0
- package/ts_vision/index.ts +29 -0
- package/ts_vision/plugins.ts +2 -0
- package/dist_ts/abstract.classes.multimodal.d.ts +0 -212
- package/dist_ts/abstract.classes.multimodal.js +0 -43
- package/dist_ts/classes.conversation.d.ts +0 -31
- package/dist_ts/classes.conversation.js +0 -150
- package/dist_ts/classes.smartai.d.ts +0 -59
- package/dist_ts/classes.smartai.js +0 -139
- package/dist_ts/classes.tts.d.ts +0 -6
- package/dist_ts/classes.tts.js +0 -10
- package/dist_ts/interfaces.d.ts +0 -1
- package/dist_ts/interfaces.js +0 -2
- package/dist_ts/paths.d.ts +0 -2
- package/dist_ts/paths.js +0 -4
- package/dist_ts/provider.anthropic.d.ts +0 -48
- package/dist_ts/provider.anthropic.js +0 -369
- package/dist_ts/provider.elevenlabs.d.ts +0 -43
- package/dist_ts/provider.elevenlabs.js +0 -64
- package/dist_ts/provider.exo.d.ts +0 -40
- package/dist_ts/provider.exo.js +0 -116
- package/dist_ts/provider.groq.d.ts +0 -39
- package/dist_ts/provider.groq.js +0 -178
- package/dist_ts/provider.mistral.d.ts +0 -61
- package/dist_ts/provider.mistral.js +0 -288
- package/dist_ts/provider.ollama.d.ts +0 -141
- package/dist_ts/provider.ollama.js +0 -529
- package/dist_ts/provider.openai.d.ts +0 -62
- package/dist_ts/provider.openai.js +0 -403
- package/dist_ts/provider.perplexity.d.ts +0 -37
- package/dist_ts/provider.perplexity.js +0 -215
- package/dist_ts/provider.xai.d.ts +0 -52
- package/dist_ts/provider.xai.js +0 -160
- package/ts/abstract.classes.multimodal.ts +0 -240
- package/ts/classes.conversation.ts +0 -176
- package/ts/classes.smartai.ts +0 -187
- package/ts/classes.tts.ts +0 -15
- package/ts/interfaces.ts +0 -0
- package/ts/paths.ts +0 -4
- package/ts/provider.anthropic.ts +0 -446
- package/ts/provider.elevenlabs.ts +0 -116
- package/ts/provider.exo.ts +0 -155
- package/ts/provider.groq.ts +0 -219
- package/ts/provider.mistral.ts +0 -352
- package/ts/provider.ollama.ts +0 -705
- package/ts/provider.openai.ts +0 -462
- package/ts/provider.perplexity.ts +0 -259
- package/ts/provider.xai.ts +0 -214
package/ts/provider.mistral.ts
DELETED
|
@@ -1,352 +0,0 @@
|
|
|
1
|
-
import * as plugins from './plugins.js';
|
|
2
|
-
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
|
3
|
-
import type {
|
|
4
|
-
ChatOptions,
|
|
5
|
-
ChatResponse,
|
|
6
|
-
ChatMessage,
|
|
7
|
-
ResearchOptions,
|
|
8
|
-
ResearchResponse,
|
|
9
|
-
ImageGenerateOptions,
|
|
10
|
-
ImageEditOptions,
|
|
11
|
-
ImageResponse
|
|
12
|
-
} from './abstract.classes.multimodal.js';
|
|
13
|
-
|
|
14
|
-
export interface IMistralProviderOptions {
|
|
15
|
-
mistralToken: string;
|
|
16
|
-
chatModel?: string; // default: 'mistral-large-latest'
|
|
17
|
-
ocrModel?: string; // default: 'mistral-ocr-latest'
|
|
18
|
-
tableFormat?: 'markdown' | 'html';
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
export class MistralProvider extends MultiModalModel {
|
|
22
|
-
private options: IMistralProviderOptions;
|
|
23
|
-
public mistralClient: plugins.mistralai.Mistral;
|
|
24
|
-
|
|
25
|
-
constructor(optionsArg: IMistralProviderOptions) {
|
|
26
|
-
super();
|
|
27
|
-
this.options = optionsArg;
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
async start() {
|
|
31
|
-
await super.start();
|
|
32
|
-
this.mistralClient = new plugins.mistralai.Mistral({
|
|
33
|
-
apiKey: this.options.mistralToken,
|
|
34
|
-
});
|
|
35
|
-
}
|
|
36
|
-
|
|
37
|
-
async stop() {
|
|
38
|
-
await super.stop();
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
/**
|
|
42
|
-
* Synchronous chat interaction using Mistral's chat API
|
|
43
|
-
*/
|
|
44
|
-
public async chat(optionsArg: ChatOptions): Promise<ChatResponse> {
|
|
45
|
-
// Convert message history to Mistral format
|
|
46
|
-
const messages: Array<{
|
|
47
|
-
role: 'system' | 'user' | 'assistant';
|
|
48
|
-
content: string;
|
|
49
|
-
}> = [];
|
|
50
|
-
|
|
51
|
-
// Add system message first
|
|
52
|
-
if (optionsArg.systemMessage) {
|
|
53
|
-
messages.push({
|
|
54
|
-
role: 'system',
|
|
55
|
-
content: optionsArg.systemMessage
|
|
56
|
-
});
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
// Add message history
|
|
60
|
-
for (const msg of optionsArg.messageHistory) {
|
|
61
|
-
messages.push({
|
|
62
|
-
role: msg.role === 'system' ? 'system' : msg.role === 'assistant' ? 'assistant' : 'user',
|
|
63
|
-
content: msg.content
|
|
64
|
-
});
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
// Add current user message
|
|
68
|
-
messages.push({
|
|
69
|
-
role: 'user',
|
|
70
|
-
content: optionsArg.userMessage
|
|
71
|
-
});
|
|
72
|
-
|
|
73
|
-
const result = await this.mistralClient.chat.complete({
|
|
74
|
-
model: this.options.chatModel || 'mistral-large-latest',
|
|
75
|
-
messages: messages,
|
|
76
|
-
});
|
|
77
|
-
|
|
78
|
-
// Extract content from response
|
|
79
|
-
const choice = result.choices?.[0];
|
|
80
|
-
let content = '';
|
|
81
|
-
|
|
82
|
-
if (choice?.message?.content) {
|
|
83
|
-
if (typeof choice.message.content === 'string') {
|
|
84
|
-
content = choice.message.content;
|
|
85
|
-
} else if (Array.isArray(choice.message.content)) {
|
|
86
|
-
// Handle array of content chunks
|
|
87
|
-
content = choice.message.content
|
|
88
|
-
.map((chunk: any) => {
|
|
89
|
-
if (typeof chunk === 'string') return chunk;
|
|
90
|
-
if (chunk && typeof chunk === 'object' && 'text' in chunk) return chunk.text;
|
|
91
|
-
return '';
|
|
92
|
-
})
|
|
93
|
-
.join('');
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
return {
|
|
98
|
-
role: 'assistant',
|
|
99
|
-
message: content,
|
|
100
|
-
};
|
|
101
|
-
}
|
|
102
|
-
|
|
103
|
-
/**
|
|
104
|
-
* Streaming chat using Mistral's streaming API
|
|
105
|
-
*/
|
|
106
|
-
public async chatStream(input: ReadableStream<Uint8Array>): Promise<ReadableStream<string>> {
|
|
107
|
-
const decoder = new TextDecoder();
|
|
108
|
-
let buffer = '';
|
|
109
|
-
const mistralClient = this.mistralClient;
|
|
110
|
-
const chatModel = this.options.chatModel || 'mistral-large-latest';
|
|
111
|
-
|
|
112
|
-
const transform = new TransformStream<Uint8Array, string>({
|
|
113
|
-
async transform(chunk, controller) {
|
|
114
|
-
buffer += decoder.decode(chunk, { stream: true });
|
|
115
|
-
|
|
116
|
-
// Try to parse complete JSON messages from the buffer
|
|
117
|
-
while (true) {
|
|
118
|
-
const newlineIndex = buffer.indexOf('\n');
|
|
119
|
-
if (newlineIndex === -1) break;
|
|
120
|
-
|
|
121
|
-
const line = buffer.slice(0, newlineIndex);
|
|
122
|
-
buffer = buffer.slice(newlineIndex + 1);
|
|
123
|
-
|
|
124
|
-
if (line.trim()) {
|
|
125
|
-
try {
|
|
126
|
-
const message = JSON.parse(line);
|
|
127
|
-
|
|
128
|
-
// Build messages array
|
|
129
|
-
const messages: Array<{
|
|
130
|
-
role: 'system' | 'user' | 'assistant';
|
|
131
|
-
content: string;
|
|
132
|
-
}> = [];
|
|
133
|
-
|
|
134
|
-
if (message.systemMessage) {
|
|
135
|
-
messages.push({
|
|
136
|
-
role: 'system',
|
|
137
|
-
content: message.systemMessage
|
|
138
|
-
});
|
|
139
|
-
}
|
|
140
|
-
|
|
141
|
-
messages.push({
|
|
142
|
-
role: message.role === 'assistant' ? 'assistant' : 'user',
|
|
143
|
-
content: message.content
|
|
144
|
-
});
|
|
145
|
-
|
|
146
|
-
// Use Mistral streaming
|
|
147
|
-
const stream = await mistralClient.chat.stream({
|
|
148
|
-
model: chatModel,
|
|
149
|
-
messages: messages,
|
|
150
|
-
});
|
|
151
|
-
|
|
152
|
-
// Process streaming events
|
|
153
|
-
for await (const event of stream) {
|
|
154
|
-
const delta = event.data?.choices?.[0]?.delta;
|
|
155
|
-
if (delta?.content) {
|
|
156
|
-
if (typeof delta.content === 'string') {
|
|
157
|
-
controller.enqueue(delta.content);
|
|
158
|
-
} else if (Array.isArray(delta.content)) {
|
|
159
|
-
for (const chunk of delta.content) {
|
|
160
|
-
if (typeof chunk === 'string') {
|
|
161
|
-
controller.enqueue(chunk);
|
|
162
|
-
} else if (chunk && typeof chunk === 'object' && 'text' in chunk) {
|
|
163
|
-
controller.enqueue((chunk as any).text);
|
|
164
|
-
}
|
|
165
|
-
}
|
|
166
|
-
}
|
|
167
|
-
}
|
|
168
|
-
}
|
|
169
|
-
} catch (e) {
|
|
170
|
-
console.error('Failed to parse message:', e);
|
|
171
|
-
}
|
|
172
|
-
}
|
|
173
|
-
}
|
|
174
|
-
},
|
|
175
|
-
|
|
176
|
-
flush(controller) {
|
|
177
|
-
if (buffer.trim()) {
|
|
178
|
-
try {
|
|
179
|
-
const message = JSON.parse(buffer);
|
|
180
|
-
controller.enqueue(message.content || '');
|
|
181
|
-
} catch (e) {
|
|
182
|
-
console.error('Failed to parse remaining buffer:', e);
|
|
183
|
-
}
|
|
184
|
-
}
|
|
185
|
-
}
|
|
186
|
-
});
|
|
187
|
-
|
|
188
|
-
return input.pipeThrough(transform);
|
|
189
|
-
}
|
|
190
|
-
|
|
191
|
-
/**
|
|
192
|
-
* Audio generation is not supported by Mistral
|
|
193
|
-
*/
|
|
194
|
-
public async audio(optionsArg: { message: string }): Promise<NodeJS.ReadableStream> {
|
|
195
|
-
throw new Error('Audio generation is not supported by Mistral. Please use ElevenLabs or OpenAI provider for audio generation.');
|
|
196
|
-
}
|
|
197
|
-
|
|
198
|
-
/**
|
|
199
|
-
* Vision using Mistral's OCR API for image analysis
|
|
200
|
-
*/
|
|
201
|
-
public async vision(optionsArg: { image: Buffer; prompt: string }): Promise<string> {
|
|
202
|
-
const base64Image = optionsArg.image.toString('base64');
|
|
203
|
-
|
|
204
|
-
// Detect image type from buffer header
|
|
205
|
-
let mimeType = 'image/jpeg';
|
|
206
|
-
if (optionsArg.image[0] === 0x89 && optionsArg.image[1] === 0x50) {
|
|
207
|
-
mimeType = 'image/png';
|
|
208
|
-
} else if (optionsArg.image[0] === 0x47 && optionsArg.image[1] === 0x49) {
|
|
209
|
-
mimeType = 'image/gif';
|
|
210
|
-
} else if (optionsArg.image[0] === 0x52 && optionsArg.image[1] === 0x49) {
|
|
211
|
-
mimeType = 'image/webp';
|
|
212
|
-
}
|
|
213
|
-
|
|
214
|
-
// Use OCR API with image data URL
|
|
215
|
-
const ocrResult = await this.mistralClient.ocr.process({
|
|
216
|
-
model: this.options.ocrModel || 'mistral-ocr-latest',
|
|
217
|
-
document: {
|
|
218
|
-
imageUrl: `data:${mimeType};base64,${base64Image}`,
|
|
219
|
-
type: 'image_url',
|
|
220
|
-
},
|
|
221
|
-
});
|
|
222
|
-
|
|
223
|
-
// Combine markdown from all pages
|
|
224
|
-
const extractedText = ocrResult.pages.map(page => page.markdown).join('\n\n');
|
|
225
|
-
|
|
226
|
-
// If a prompt is provided, use chat to analyze the extracted text
|
|
227
|
-
if (optionsArg.prompt && optionsArg.prompt.trim()) {
|
|
228
|
-
const chatResponse = await this.chat({
|
|
229
|
-
systemMessage: 'You are an assistant analyzing image content. The following is text extracted from an image using OCR.',
|
|
230
|
-
userMessage: `${optionsArg.prompt}\n\nExtracted content:\n${extractedText}`,
|
|
231
|
-
messageHistory: [],
|
|
232
|
-
});
|
|
233
|
-
return chatResponse.message;
|
|
234
|
-
}
|
|
235
|
-
|
|
236
|
-
return extractedText;
|
|
237
|
-
}
|
|
238
|
-
|
|
239
|
-
/**
|
|
240
|
-
* Document processing using Mistral's OCR API
|
|
241
|
-
* PDFs are uploaded via Files API first, then processed with OCR
|
|
242
|
-
*/
|
|
243
|
-
public async document(optionsArg: {
|
|
244
|
-
systemMessage: string;
|
|
245
|
-
userMessage: string;
|
|
246
|
-
pdfDocuments: Uint8Array[];
|
|
247
|
-
messageHistory: ChatMessage[];
|
|
248
|
-
}): Promise<{ message: any }> {
|
|
249
|
-
const extractedTexts: string[] = [];
|
|
250
|
-
const uploadedFileIds: string[] = [];
|
|
251
|
-
|
|
252
|
-
try {
|
|
253
|
-
// Process each PDF document using Mistral OCR
|
|
254
|
-
for (let i = 0; i < optionsArg.pdfDocuments.length; i++) {
|
|
255
|
-
const pdfDocument = optionsArg.pdfDocuments[i];
|
|
256
|
-
|
|
257
|
-
// Upload the PDF to Mistral's Files API first
|
|
258
|
-
const uploadResult = await this.mistralClient.files.upload({
|
|
259
|
-
file: {
|
|
260
|
-
fileName: `document_${i + 1}.pdf`,
|
|
261
|
-
content: pdfDocument,
|
|
262
|
-
},
|
|
263
|
-
purpose: 'ocr',
|
|
264
|
-
});
|
|
265
|
-
|
|
266
|
-
uploadedFileIds.push(uploadResult.id);
|
|
267
|
-
|
|
268
|
-
// Now use OCR with the uploaded file
|
|
269
|
-
const ocrResult = await this.mistralClient.ocr.process({
|
|
270
|
-
model: this.options.ocrModel || 'mistral-ocr-latest',
|
|
271
|
-
document: {
|
|
272
|
-
type: 'file',
|
|
273
|
-
fileId: uploadResult.id,
|
|
274
|
-
},
|
|
275
|
-
tableFormat: this.options.tableFormat || 'markdown',
|
|
276
|
-
});
|
|
277
|
-
|
|
278
|
-
// Combine all page markdown with page separators
|
|
279
|
-
const pageTexts = ocrResult.pages.map((page, index) => {
|
|
280
|
-
let pageContent = `--- Page ${index + 1} ---\n${page.markdown}`;
|
|
281
|
-
|
|
282
|
-
// Include tables if present
|
|
283
|
-
if (page.tables && page.tables.length > 0) {
|
|
284
|
-
pageContent += '\n\n**Tables:**\n' + page.tables.map((t: any) => t.markdown || t.html || '').join('\n');
|
|
285
|
-
}
|
|
286
|
-
|
|
287
|
-
// Include header/footer if present
|
|
288
|
-
if (page.header) {
|
|
289
|
-
pageContent = `Header: ${page.header}\n${pageContent}`;
|
|
290
|
-
}
|
|
291
|
-
if (page.footer) {
|
|
292
|
-
pageContent += `\nFooter: ${page.footer}`;
|
|
293
|
-
}
|
|
294
|
-
|
|
295
|
-
return pageContent;
|
|
296
|
-
}).join('\n\n');
|
|
297
|
-
|
|
298
|
-
extractedTexts.push(pageTexts);
|
|
299
|
-
}
|
|
300
|
-
|
|
301
|
-
// Combine all document texts
|
|
302
|
-
const allDocumentText = extractedTexts.length === 1
|
|
303
|
-
? extractedTexts[0]
|
|
304
|
-
: extractedTexts.map((text, i) => `=== Document ${i + 1} ===\n${text}`).join('\n\n');
|
|
305
|
-
|
|
306
|
-
// Use chat API to process the extracted text with the user's query
|
|
307
|
-
const chatResponse = await this.chat({
|
|
308
|
-
systemMessage: optionsArg.systemMessage || 'You are a helpful assistant analyzing document content.',
|
|
309
|
-
userMessage: `${optionsArg.userMessage}\n\n---\nDocument Content:\n${allDocumentText}`,
|
|
310
|
-
messageHistory: optionsArg.messageHistory,
|
|
311
|
-
});
|
|
312
|
-
|
|
313
|
-
return {
|
|
314
|
-
message: {
|
|
315
|
-
role: 'assistant',
|
|
316
|
-
content: chatResponse.message
|
|
317
|
-
}
|
|
318
|
-
};
|
|
319
|
-
} finally {
|
|
320
|
-
// Clean up uploaded files
|
|
321
|
-
for (const fileId of uploadedFileIds) {
|
|
322
|
-
try {
|
|
323
|
-
await this.mistralClient.files.delete({ fileId });
|
|
324
|
-
} catch (cleanupError) {
|
|
325
|
-
// Ignore cleanup errors - files may have already been auto-deleted
|
|
326
|
-
console.warn(`Failed to delete temporary file ${fileId}:`, cleanupError);
|
|
327
|
-
}
|
|
328
|
-
}
|
|
329
|
-
}
|
|
330
|
-
}
|
|
331
|
-
|
|
332
|
-
/**
|
|
333
|
-
* Research is not natively supported by Mistral
|
|
334
|
-
*/
|
|
335
|
-
public async research(optionsArg: ResearchOptions): Promise<ResearchResponse> {
|
|
336
|
-
throw new Error('Research/web search is not supported by Mistral. Please use Perplexity or Anthropic provider for research capabilities.');
|
|
337
|
-
}
|
|
338
|
-
|
|
339
|
-
/**
|
|
340
|
-
* Image generation is not supported by Mistral
|
|
341
|
-
*/
|
|
342
|
-
public async imageGenerate(optionsArg: ImageGenerateOptions): Promise<ImageResponse> {
|
|
343
|
-
throw new Error('Image generation is not supported by Mistral. Please use OpenAI provider for image generation.');
|
|
344
|
-
}
|
|
345
|
-
|
|
346
|
-
/**
|
|
347
|
-
* Image editing is not supported by Mistral
|
|
348
|
-
*/
|
|
349
|
-
public async imageEdit(optionsArg: ImageEditOptions): Promise<ImageResponse> {
|
|
350
|
-
throw new Error('Image editing is not supported by Mistral. Please use OpenAI provider for image editing.');
|
|
351
|
-
}
|
|
352
|
-
}
|