@push.rocks/smartai 0.5.11 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist_ts/00_commitinfo_data.js +2 -2
- package/dist_ts/abstract.classes.multimodal.d.ts +95 -0
- package/dist_ts/abstract.classes.multimodal.js +1 -1
- package/dist_ts/index.d.ts +6 -0
- package/dist_ts/index.js +7 -1
- package/dist_ts/provider.anthropic.d.ts +13 -1
- package/dist_ts/provider.anthropic.js +141 -5
- package/dist_ts/provider.exo.d.ts +10 -1
- package/dist_ts/provider.exo.js +16 -1
- package/dist_ts/provider.groq.d.ts +10 -1
- package/dist_ts/provider.groq.js +16 -1
- package/dist_ts/provider.ollama.d.ts +10 -1
- package/dist_ts/provider.ollama.js +16 -1
- package/dist_ts/provider.openai.d.ts +13 -0
- package/dist_ts/provider.openai.js +218 -1
- package/dist_ts/provider.perplexity.d.ts +10 -1
- package/dist_ts/provider.perplexity.js +72 -1
- package/dist_ts/provider.xai.d.ts +10 -0
- package/dist_ts/provider.xai.js +16 -1
- package/package.json +2 -1
- package/readme.md +56 -9
- package/ts/00_commitinfo_data.ts +1 -1
- package/ts/abstract.classes.multimodal.ts +103 -0
- package/ts/index.ts +6 -0
- package/ts/provider.anthropic.ts +168 -5
- package/ts/provider.exo.ts +28 -1
- package/ts/provider.groq.ts +28 -1
- package/ts/provider.ollama.ts +28 -1
- package/ts/provider.openai.ts +224 -1
- package/ts/provider.perplexity.ts +89 -1
- package/ts/provider.xai.ts +28 -1
|
@@ -25,6 +25,85 @@ export interface ChatResponse {
|
|
|
25
25
|
message: string;
|
|
26
26
|
}
|
|
27
27
|
|
|
28
|
+
/**
|
|
29
|
+
* Options for research interactions
|
|
30
|
+
*/
|
|
31
|
+
export interface ResearchOptions {
|
|
32
|
+
query: string;
|
|
33
|
+
searchDepth?: 'basic' | 'advanced' | 'deep';
|
|
34
|
+
maxSources?: number;
|
|
35
|
+
includeWebSearch?: boolean;
|
|
36
|
+
background?: boolean;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Response format for research interactions
|
|
41
|
+
*/
|
|
42
|
+
export interface ResearchResponse {
|
|
43
|
+
answer: string;
|
|
44
|
+
sources: Array<{
|
|
45
|
+
url: string;
|
|
46
|
+
title: string;
|
|
47
|
+
snippet: string;
|
|
48
|
+
}>;
|
|
49
|
+
searchQueries?: string[];
|
|
50
|
+
metadata?: any;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Options for image generation
|
|
55
|
+
*/
|
|
56
|
+
export interface ImageGenerateOptions {
|
|
57
|
+
prompt: string;
|
|
58
|
+
model?: 'gpt-image-1' | 'dall-e-3' | 'dall-e-2';
|
|
59
|
+
quality?: 'low' | 'medium' | 'high' | 'standard' | 'hd' | 'auto';
|
|
60
|
+
size?: '256x256' | '512x512' | '1024x1024' | '1536x1024' | '1024x1536' | '1792x1024' | '1024x1792' | 'auto';
|
|
61
|
+
style?: 'vivid' | 'natural';
|
|
62
|
+
background?: 'transparent' | 'opaque' | 'auto';
|
|
63
|
+
outputFormat?: 'png' | 'jpeg' | 'webp';
|
|
64
|
+
outputCompression?: number; // 0-100 for webp/jpeg
|
|
65
|
+
moderation?: 'low' | 'auto';
|
|
66
|
+
n?: number; // Number of images to generate
|
|
67
|
+
stream?: boolean;
|
|
68
|
+
partialImages?: number; // 0-3 for streaming
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Options for image editing
|
|
73
|
+
*/
|
|
74
|
+
export interface ImageEditOptions {
|
|
75
|
+
image: Buffer;
|
|
76
|
+
prompt: string;
|
|
77
|
+
mask?: Buffer;
|
|
78
|
+
model?: 'gpt-image-1' | 'dall-e-2';
|
|
79
|
+
quality?: 'low' | 'medium' | 'high' | 'standard' | 'auto';
|
|
80
|
+
size?: '256x256' | '512x512' | '1024x1024' | '1536x1024' | '1024x1536' | 'auto';
|
|
81
|
+
background?: 'transparent' | 'opaque' | 'auto';
|
|
82
|
+
outputFormat?: 'png' | 'jpeg' | 'webp';
|
|
83
|
+
outputCompression?: number;
|
|
84
|
+
n?: number;
|
|
85
|
+
stream?: boolean;
|
|
86
|
+
partialImages?: number;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
/**
|
|
90
|
+
* Response format for image operations
|
|
91
|
+
*/
|
|
92
|
+
export interface ImageResponse {
|
|
93
|
+
images: Array<{
|
|
94
|
+
b64_json?: string;
|
|
95
|
+
url?: string;
|
|
96
|
+
revisedPrompt?: string;
|
|
97
|
+
}>;
|
|
98
|
+
metadata?: {
|
|
99
|
+
model: string;
|
|
100
|
+
quality?: string;
|
|
101
|
+
size?: string;
|
|
102
|
+
outputFormat?: string;
|
|
103
|
+
tokensUsed?: number;
|
|
104
|
+
};
|
|
105
|
+
}
|
|
106
|
+
|
|
28
107
|
/**
|
|
29
108
|
* Abstract base class for multi-modal AI models.
|
|
30
109
|
* Provides a common interface for different AI providers (OpenAI, Anthropic, Perplexity, Ollama)
|
|
@@ -98,4 +177,28 @@ export abstract class MultiModalModel {
|
|
|
98
177
|
pdfDocuments: Uint8Array[];
|
|
99
178
|
messageHistory: ChatMessage[];
|
|
100
179
|
}): Promise<{ message: any }>;
|
|
180
|
+
|
|
181
|
+
/**
|
|
182
|
+
* Research and web search capabilities
|
|
183
|
+
* @param optionsArg Options containing the research query and configuration
|
|
184
|
+
* @returns Promise resolving to the research results with sources
|
|
185
|
+
* @throws Error if the provider doesn't support research capabilities
|
|
186
|
+
*/
|
|
187
|
+
public abstract research(optionsArg: ResearchOptions): Promise<ResearchResponse>;
|
|
188
|
+
|
|
189
|
+
/**
|
|
190
|
+
* Image generation from text prompts
|
|
191
|
+
* @param optionsArg Options containing the prompt and generation parameters
|
|
192
|
+
* @returns Promise resolving to the generated image(s)
|
|
193
|
+
* @throws Error if the provider doesn't support image generation
|
|
194
|
+
*/
|
|
195
|
+
public abstract imageGenerate(optionsArg: ImageGenerateOptions): Promise<ImageResponse>;
|
|
196
|
+
|
|
197
|
+
/**
|
|
198
|
+
* Image editing and inpainting
|
|
199
|
+
* @param optionsArg Options containing the image, prompt, and editing parameters
|
|
200
|
+
* @returns Promise resolving to the edited image(s)
|
|
201
|
+
* @throws Error if the provider doesn't support image editing
|
|
202
|
+
*/
|
|
203
|
+
public abstract imageEdit(optionsArg: ImageEditOptions): Promise<ImageResponse>;
|
|
101
204
|
}
|
package/ts/index.ts
CHANGED
|
@@ -1,3 +1,9 @@
|
|
|
1
1
|
export * from './classes.smartai.js';
|
|
2
2
|
export * from './abstract.classes.multimodal.js';
|
|
3
3
|
export * from './provider.openai.js';
|
|
4
|
+
export * from './provider.anthropic.js';
|
|
5
|
+
export * from './provider.perplexity.js';
|
|
6
|
+
export * from './provider.groq.js';
|
|
7
|
+
export * from './provider.ollama.js';
|
|
8
|
+
export * from './provider.xai.js';
|
|
9
|
+
export * from './provider.exo.js';
|
package/ts/provider.anthropic.ts
CHANGED
|
@@ -1,13 +1,25 @@
|
|
|
1
1
|
import * as plugins from './plugins.js';
|
|
2
2
|
import * as paths from './paths.js';
|
|
3
3
|
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
|
4
|
-
import type {
|
|
4
|
+
import type {
|
|
5
|
+
ChatOptions,
|
|
6
|
+
ChatResponse,
|
|
7
|
+
ChatMessage,
|
|
8
|
+
ResearchOptions,
|
|
9
|
+
ResearchResponse,
|
|
10
|
+
ImageGenerateOptions,
|
|
11
|
+
ImageEditOptions,
|
|
12
|
+
ImageResponse
|
|
13
|
+
} from './abstract.classes.multimodal.js';
|
|
5
14
|
import type { ImageBlockParam, TextBlockParam } from '@anthropic-ai/sdk/resources/messages';
|
|
6
15
|
|
|
7
16
|
type ContentBlock = ImageBlockParam | TextBlockParam;
|
|
8
17
|
|
|
9
18
|
export interface IAnthropicProviderOptions {
|
|
10
19
|
anthropicToken: string;
|
|
20
|
+
enableWebSearch?: boolean;
|
|
21
|
+
searchDomainAllowList?: string[];
|
|
22
|
+
searchDomainBlockList?: string[];
|
|
11
23
|
}
|
|
12
24
|
|
|
13
25
|
export class AnthropicProvider extends MultiModalModel {
|
|
@@ -65,7 +77,7 @@ export class AnthropicProvider extends MultiModalModel {
|
|
|
65
77
|
// If we have a complete message, send it to Anthropic
|
|
66
78
|
if (currentMessage) {
|
|
67
79
|
const stream = await this.anthropicApiClient.messages.create({
|
|
68
|
-
model: 'claude-
|
|
80
|
+
model: 'claude-sonnet-4-5-20250929',
|
|
69
81
|
messages: [{ role: currentMessage.role, content: currentMessage.content }],
|
|
70
82
|
system: '',
|
|
71
83
|
stream: true,
|
|
@@ -109,7 +121,7 @@ export class AnthropicProvider extends MultiModalModel {
|
|
|
109
121
|
}));
|
|
110
122
|
|
|
111
123
|
const result = await this.anthropicApiClient.messages.create({
|
|
112
|
-
model: 'claude-
|
|
124
|
+
model: 'claude-sonnet-4-5-20250929',
|
|
113
125
|
system: optionsArg.systemMessage,
|
|
114
126
|
messages: [
|
|
115
127
|
...messages,
|
|
@@ -156,7 +168,7 @@ export class AnthropicProvider extends MultiModalModel {
|
|
|
156
168
|
];
|
|
157
169
|
|
|
158
170
|
const result = await this.anthropicApiClient.messages.create({
|
|
159
|
-
model: 'claude-
|
|
171
|
+
model: 'claude-sonnet-4-5-20250929',
|
|
160
172
|
messages: [{
|
|
161
173
|
role: 'user',
|
|
162
174
|
content
|
|
@@ -215,7 +227,7 @@ export class AnthropicProvider extends MultiModalModel {
|
|
|
215
227
|
}
|
|
216
228
|
|
|
217
229
|
const result = await this.anthropicApiClient.messages.create({
|
|
218
|
-
model: 'claude-
|
|
230
|
+
model: 'claude-sonnet-4-5-20250929',
|
|
219
231
|
system: optionsArg.systemMessage,
|
|
220
232
|
messages: [
|
|
221
233
|
...messages,
|
|
@@ -239,4 +251,155 @@ export class AnthropicProvider extends MultiModalModel {
|
|
|
239
251
|
}
|
|
240
252
|
};
|
|
241
253
|
}
|
|
254
|
+
|
|
255
|
+
public async research(optionsArg: ResearchOptions): Promise<ResearchResponse> {
|
|
256
|
+
// Prepare the messages for the research request
|
|
257
|
+
const systemMessage = `You are a research assistant with web search capabilities.
|
|
258
|
+
Provide comprehensive, well-researched answers with citations and sources.
|
|
259
|
+
When searching the web, be thorough and cite your sources accurately.`;
|
|
260
|
+
|
|
261
|
+
try {
|
|
262
|
+
// Build the tool configuration for web search
|
|
263
|
+
const tools: any[] = [];
|
|
264
|
+
|
|
265
|
+
if (this.options.enableWebSearch) {
|
|
266
|
+
const webSearchTool: any = {
|
|
267
|
+
type: 'web_search_20250305',
|
|
268
|
+
name: 'web_search'
|
|
269
|
+
};
|
|
270
|
+
|
|
271
|
+
// Add optional parameters
|
|
272
|
+
if (optionsArg.maxSources) {
|
|
273
|
+
webSearchTool.max_uses = optionsArg.maxSources;
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
if (this.options.searchDomainAllowList?.length) {
|
|
277
|
+
webSearchTool.allowed_domains = this.options.searchDomainAllowList;
|
|
278
|
+
} else if (this.options.searchDomainBlockList?.length) {
|
|
279
|
+
webSearchTool.blocked_domains = this.options.searchDomainBlockList;
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
tools.push(webSearchTool);
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
// Configure the request based on search depth
|
|
286
|
+
const maxTokens = optionsArg.searchDepth === 'deep' ? 8192 :
|
|
287
|
+
optionsArg.searchDepth === 'advanced' ? 6144 : 4096;
|
|
288
|
+
|
|
289
|
+
// Create the research request
|
|
290
|
+
const requestParams: any = {
|
|
291
|
+
model: 'claude-sonnet-4-5-20250929',
|
|
292
|
+
system: systemMessage,
|
|
293
|
+
messages: [
|
|
294
|
+
{
|
|
295
|
+
role: 'user' as const,
|
|
296
|
+
content: optionsArg.query
|
|
297
|
+
}
|
|
298
|
+
],
|
|
299
|
+
max_tokens: maxTokens,
|
|
300
|
+
temperature: 0.7
|
|
301
|
+
};
|
|
302
|
+
|
|
303
|
+
// Add tools if web search is enabled
|
|
304
|
+
if (tools.length > 0) {
|
|
305
|
+
requestParams.tools = tools;
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
// Execute the research request
|
|
309
|
+
const result = await this.anthropicApiClient.messages.create(requestParams);
|
|
310
|
+
|
|
311
|
+
// Extract the answer from content blocks
|
|
312
|
+
let answer = '';
|
|
313
|
+
const sources: Array<{ url: string; title: string; snippet: string }> = [];
|
|
314
|
+
const searchQueries: string[] = [];
|
|
315
|
+
|
|
316
|
+
// Process content blocks
|
|
317
|
+
for (const block of result.content) {
|
|
318
|
+
if ('text' in block) {
|
|
319
|
+
// Accumulate text content
|
|
320
|
+
answer += block.text;
|
|
321
|
+
|
|
322
|
+
// Extract citations if present
|
|
323
|
+
if ('citations' in block && Array.isArray(block.citations)) {
|
|
324
|
+
for (const citation of block.citations) {
|
|
325
|
+
if (citation.type === 'web_search_result_location') {
|
|
326
|
+
sources.push({
|
|
327
|
+
title: citation.title || '',
|
|
328
|
+
url: citation.url || '',
|
|
329
|
+
snippet: citation.cited_text || ''
|
|
330
|
+
});
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
} else if ('type' in block && block.type === 'server_tool_use') {
|
|
335
|
+
// Extract search queries from server tool use
|
|
336
|
+
if (block.name === 'web_search' && block.input && typeof block.input === 'object' && 'query' in block.input) {
|
|
337
|
+
searchQueries.push((block.input as any).query);
|
|
338
|
+
}
|
|
339
|
+
} else if ('type' in block && block.type === 'web_search_tool_result') {
|
|
340
|
+
// Extract sources from web search results
|
|
341
|
+
if (Array.isArray(block.content)) {
|
|
342
|
+
for (const result of block.content) {
|
|
343
|
+
if (result.type === 'web_search_result') {
|
|
344
|
+
// Only add if not already in sources (avoid duplicates from citations)
|
|
345
|
+
if (!sources.some(s => s.url === result.url)) {
|
|
346
|
+
sources.push({
|
|
347
|
+
title: result.title || '',
|
|
348
|
+
url: result.url || '',
|
|
349
|
+
snippet: '' // Search results don't include snippets, only citations do
|
|
350
|
+
});
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
// Fallback: Parse markdown-style links if no citations found
|
|
359
|
+
if (sources.length === 0) {
|
|
360
|
+
const urlRegex = /\[([^\]]+)\]\(([^)]+)\)/g;
|
|
361
|
+
let match: RegExpExecArray | null;
|
|
362
|
+
|
|
363
|
+
while ((match = urlRegex.exec(answer)) !== null) {
|
|
364
|
+
sources.push({
|
|
365
|
+
title: match[1],
|
|
366
|
+
url: match[2],
|
|
367
|
+
snippet: ''
|
|
368
|
+
});
|
|
369
|
+
}
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
// Check if web search was used based on usage info
|
|
373
|
+
const webSearchCount = result.usage?.server_tool_use?.web_search_requests || 0;
|
|
374
|
+
|
|
375
|
+
return {
|
|
376
|
+
answer,
|
|
377
|
+
sources,
|
|
378
|
+
searchQueries: searchQueries.length > 0 ? searchQueries : undefined,
|
|
379
|
+
metadata: {
|
|
380
|
+
model: 'claude-sonnet-4-5-20250929',
|
|
381
|
+
searchDepth: optionsArg.searchDepth || 'basic',
|
|
382
|
+
tokensUsed: result.usage?.output_tokens,
|
|
383
|
+
webSearchesPerformed: webSearchCount
|
|
384
|
+
}
|
|
385
|
+
};
|
|
386
|
+
} catch (error) {
|
|
387
|
+
console.error('Anthropic research error:', error);
|
|
388
|
+
throw new Error(`Failed to perform research: ${error.message}`);
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
/**
|
|
393
|
+
* Image generation is not supported by Anthropic
|
|
394
|
+
*/
|
|
395
|
+
public async imageGenerate(optionsArg: ImageGenerateOptions): Promise<ImageResponse> {
|
|
396
|
+
throw new Error('Image generation is not supported by Anthropic. Claude can only analyze images, not generate them. Please use OpenAI provider for image generation.');
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
/**
|
|
400
|
+
* Image editing is not supported by Anthropic
|
|
401
|
+
*/
|
|
402
|
+
public async imageEdit(optionsArg: ImageEditOptions): Promise<ImageResponse> {
|
|
403
|
+
throw new Error('Image editing is not supported by Anthropic. Claude can only analyze images, not edit them. Please use OpenAI provider for image editing.');
|
|
404
|
+
}
|
|
242
405
|
}
|
package/ts/provider.exo.ts
CHANGED
|
@@ -1,7 +1,16 @@
|
|
|
1
1
|
import * as plugins from './plugins.js';
|
|
2
2
|
import * as paths from './paths.js';
|
|
3
3
|
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
|
4
|
-
import type {
|
|
4
|
+
import type {
|
|
5
|
+
ChatOptions,
|
|
6
|
+
ChatResponse,
|
|
7
|
+
ChatMessage,
|
|
8
|
+
ResearchOptions,
|
|
9
|
+
ResearchResponse,
|
|
10
|
+
ImageGenerateOptions,
|
|
11
|
+
ImageEditOptions,
|
|
12
|
+
ImageResponse
|
|
13
|
+
} from './abstract.classes.multimodal.js';
|
|
5
14
|
import type { ChatCompletionMessageParam } from 'openai/resources/chat/completions';
|
|
6
15
|
|
|
7
16
|
export interface IExoProviderOptions {
|
|
@@ -125,4 +134,22 @@ export class ExoProvider extends MultiModalModel {
|
|
|
125
134
|
}): Promise<{ message: any }> {
|
|
126
135
|
throw new Error('Document processing is not supported by Exo provider');
|
|
127
136
|
}
|
|
137
|
+
|
|
138
|
+
public async research(optionsArg: ResearchOptions): Promise<ResearchResponse> {
|
|
139
|
+
throw new Error('Research capabilities are not yet supported by Exo provider.');
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
/**
|
|
143
|
+
* Image generation is not supported by Exo
|
|
144
|
+
*/
|
|
145
|
+
public async imageGenerate(optionsArg: ImageGenerateOptions): Promise<ImageResponse> {
|
|
146
|
+
throw new Error('Image generation is not supported by Exo. Please use OpenAI provider for image generation.');
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
/**
|
|
150
|
+
* Image editing is not supported by Exo
|
|
151
|
+
*/
|
|
152
|
+
public async imageEdit(optionsArg: ImageEditOptions): Promise<ImageResponse> {
|
|
153
|
+
throw new Error('Image editing is not supported by Exo. Please use OpenAI provider for image editing.');
|
|
154
|
+
}
|
|
128
155
|
}
|
package/ts/provider.groq.ts
CHANGED
|
@@ -1,7 +1,16 @@
|
|
|
1
1
|
import * as plugins from './plugins.js';
|
|
2
2
|
import * as paths from './paths.js';
|
|
3
3
|
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
|
4
|
-
import type {
|
|
4
|
+
import type {
|
|
5
|
+
ChatOptions,
|
|
6
|
+
ChatResponse,
|
|
7
|
+
ChatMessage,
|
|
8
|
+
ResearchOptions,
|
|
9
|
+
ResearchResponse,
|
|
10
|
+
ImageGenerateOptions,
|
|
11
|
+
ImageEditOptions,
|
|
12
|
+
ImageResponse
|
|
13
|
+
} from './abstract.classes.multimodal.js';
|
|
5
14
|
|
|
6
15
|
export interface IGroqProviderOptions {
|
|
7
16
|
groqToken: string;
|
|
@@ -189,4 +198,22 @@ export class GroqProvider extends MultiModalModel {
|
|
|
189
198
|
}): Promise<{ message: any }> {
|
|
190
199
|
throw new Error('Document processing is not yet supported by Groq.');
|
|
191
200
|
}
|
|
201
|
+
|
|
202
|
+
public async research(optionsArg: ResearchOptions): Promise<ResearchResponse> {
|
|
203
|
+
throw new Error('Research capabilities are not yet supported by Groq provider.');
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
/**
|
|
207
|
+
* Image generation is not supported by Groq
|
|
208
|
+
*/
|
|
209
|
+
public async imageGenerate(optionsArg: ImageGenerateOptions): Promise<ImageResponse> {
|
|
210
|
+
throw new Error('Image generation is not supported by Groq. Please use OpenAI provider for image generation.');
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
/**
|
|
214
|
+
* Image editing is not supported by Groq
|
|
215
|
+
*/
|
|
216
|
+
public async imageEdit(optionsArg: ImageEditOptions): Promise<ImageResponse> {
|
|
217
|
+
throw new Error('Image editing is not supported by Groq. Please use OpenAI provider for image editing.');
|
|
218
|
+
}
|
|
192
219
|
}
|
package/ts/provider.ollama.ts
CHANGED
|
@@ -1,7 +1,16 @@
|
|
|
1
1
|
import * as plugins from './plugins.js';
|
|
2
2
|
import * as paths from './paths.js';
|
|
3
3
|
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
|
4
|
-
import type {
|
|
4
|
+
import type {
|
|
5
|
+
ChatOptions,
|
|
6
|
+
ChatResponse,
|
|
7
|
+
ChatMessage,
|
|
8
|
+
ResearchOptions,
|
|
9
|
+
ResearchResponse,
|
|
10
|
+
ImageGenerateOptions,
|
|
11
|
+
ImageEditOptions,
|
|
12
|
+
ImageResponse
|
|
13
|
+
} from './abstract.classes.multimodal.js';
|
|
5
14
|
|
|
6
15
|
export interface IOllamaProviderOptions {
|
|
7
16
|
baseUrl?: string;
|
|
@@ -251,4 +260,22 @@ export class OllamaProvider extends MultiModalModel {
|
|
|
251
260
|
}
|
|
252
261
|
};
|
|
253
262
|
}
|
|
263
|
+
|
|
264
|
+
public async research(optionsArg: ResearchOptions): Promise<ResearchResponse> {
|
|
265
|
+
throw new Error('Research capabilities are not yet supported by Ollama provider.');
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
/**
|
|
269
|
+
* Image generation is not supported by Ollama
|
|
270
|
+
*/
|
|
271
|
+
public async imageGenerate(optionsArg: ImageGenerateOptions): Promise<ImageResponse> {
|
|
272
|
+
throw new Error('Image generation is not supported by Ollama. Please use OpenAI provider for image generation.');
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
/**
|
|
276
|
+
* Image editing is not supported by Ollama
|
|
277
|
+
*/
|
|
278
|
+
public async imageEdit(optionsArg: ImageEditOptions): Promise<ImageResponse> {
|
|
279
|
+
throw new Error('Image editing is not supported by Ollama. Please use OpenAI provider for image editing.');
|
|
280
|
+
}
|
|
254
281
|
}
|