@push.rocks/smartai 0.13.3 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. package/dist_ts/00_commitinfo_data.js +3 -3
  2. package/dist_ts/index.d.ts +6 -11
  3. package/dist_ts/index.js +6 -12
  4. package/dist_ts/plugins.d.ts +10 -15
  5. package/dist_ts/plugins.js +13 -19
  6. package/dist_ts/smartai.classes.smartai.d.ts +7 -0
  7. package/dist_ts/smartai.classes.smartai.js +51 -0
  8. package/dist_ts/smartai.interfaces.d.ts +41 -0
  9. package/dist_ts/smartai.interfaces.js +2 -0
  10. package/dist_ts/smartai.middleware.anthropic.d.ts +7 -0
  11. package/dist_ts/smartai.middleware.anthropic.js +36 -0
  12. package/dist_ts/smartai.provider.ollama.d.ts +8 -0
  13. package/dist_ts/smartai.provider.ollama.js +378 -0
  14. package/dist_ts_audio/index.d.ts +9 -0
  15. package/dist_ts_audio/index.js +15 -0
  16. package/dist_ts_audio/plugins.d.ts +2 -0
  17. package/dist_ts_audio/plugins.js +3 -0
  18. package/dist_ts_document/index.d.ts +11 -0
  19. package/dist_ts_document/index.js +45 -0
  20. package/dist_ts_document/plugins.d.ts +3 -0
  21. package/dist_ts_document/plugins.js +4 -0
  22. package/dist_ts_image/index.d.ts +46 -0
  23. package/dist_ts_image/index.js +110 -0
  24. package/dist_ts_image/plugins.d.ts +3 -0
  25. package/dist_ts_image/plugins.js +4 -0
  26. package/dist_ts_research/index.d.ts +19 -0
  27. package/dist_ts_research/index.js +98 -0
  28. package/dist_ts_research/plugins.d.ts +2 -0
  29. package/dist_ts_research/plugins.js +3 -0
  30. package/dist_ts_vision/index.d.ts +8 -0
  31. package/dist_ts_vision/index.js +21 -0
  32. package/dist_ts_vision/plugins.d.ts +2 -0
  33. package/dist_ts_vision/plugins.js +3 -0
  34. package/package.json +50 -22
  35. package/readme.hints.md +34 -88
  36. package/readme.md +284 -547
  37. package/ts/00_commitinfo_data.ts +2 -2
  38. package/ts/index.ts +8 -11
  39. package/ts/plugins.ts +19 -35
  40. package/ts/smartai.classes.smartai.ts +51 -0
  41. package/ts/smartai.interfaces.ts +53 -0
  42. package/ts/smartai.middleware.anthropic.ts +38 -0
  43. package/ts/smartai.provider.ollama.ts +426 -0
  44. package/ts_audio/index.ts +24 -0
  45. package/ts_audio/plugins.ts +2 -0
  46. package/ts_document/index.ts +61 -0
  47. package/ts_document/plugins.ts +3 -0
  48. package/ts_image/index.ts +147 -0
  49. package/ts_image/plugins.ts +3 -0
  50. package/ts_research/index.ts +120 -0
  51. package/ts_research/plugins.ts +2 -0
  52. package/ts_vision/index.ts +29 -0
  53. package/ts_vision/plugins.ts +2 -0
  54. package/dist_ts/abstract.classes.multimodal.d.ts +0 -212
  55. package/dist_ts/abstract.classes.multimodal.js +0 -43
  56. package/dist_ts/classes.conversation.d.ts +0 -31
  57. package/dist_ts/classes.conversation.js +0 -150
  58. package/dist_ts/classes.smartai.d.ts +0 -59
  59. package/dist_ts/classes.smartai.js +0 -139
  60. package/dist_ts/classes.tts.d.ts +0 -6
  61. package/dist_ts/classes.tts.js +0 -10
  62. package/dist_ts/interfaces.d.ts +0 -1
  63. package/dist_ts/interfaces.js +0 -2
  64. package/dist_ts/paths.d.ts +0 -2
  65. package/dist_ts/paths.js +0 -4
  66. package/dist_ts/provider.anthropic.d.ts +0 -48
  67. package/dist_ts/provider.anthropic.js +0 -369
  68. package/dist_ts/provider.elevenlabs.d.ts +0 -43
  69. package/dist_ts/provider.elevenlabs.js +0 -64
  70. package/dist_ts/provider.exo.d.ts +0 -40
  71. package/dist_ts/provider.exo.js +0 -116
  72. package/dist_ts/provider.groq.d.ts +0 -39
  73. package/dist_ts/provider.groq.js +0 -178
  74. package/dist_ts/provider.mistral.d.ts +0 -61
  75. package/dist_ts/provider.mistral.js +0 -288
  76. package/dist_ts/provider.ollama.d.ts +0 -141
  77. package/dist_ts/provider.ollama.js +0 -529
  78. package/dist_ts/provider.openai.d.ts +0 -62
  79. package/dist_ts/provider.openai.js +0 -403
  80. package/dist_ts/provider.perplexity.d.ts +0 -37
  81. package/dist_ts/provider.perplexity.js +0 -215
  82. package/dist_ts/provider.xai.d.ts +0 -52
  83. package/dist_ts/provider.xai.js +0 -160
  84. package/ts/abstract.classes.multimodal.ts +0 -240
  85. package/ts/classes.conversation.ts +0 -176
  86. package/ts/classes.smartai.ts +0 -187
  87. package/ts/classes.tts.ts +0 -15
  88. package/ts/interfaces.ts +0 -0
  89. package/ts/paths.ts +0 -4
  90. package/ts/provider.anthropic.ts +0 -446
  91. package/ts/provider.elevenlabs.ts +0 -116
  92. package/ts/provider.exo.ts +0 -155
  93. package/ts/provider.groq.ts +0 -219
  94. package/ts/provider.mistral.ts +0 -352
  95. package/ts/provider.ollama.ts +0 -705
  96. package/ts/provider.openai.ts +0 -462
  97. package/ts/provider.perplexity.ts +0 -259
  98. package/ts/provider.xai.ts +0 -214
@@ -1,141 +0,0 @@
1
- import { MultiModalModel } from './abstract.classes.multimodal.js';
2
- import type { ChatOptions, ChatResponse, ChatMessage, ResearchOptions, ResearchResponse, ImageGenerateOptions, ImageEditOptions, ImageResponse, StreamingChatOptions } from './abstract.classes.multimodal.js';
3
- /**
4
- * Ollama model runtime options
5
- * @see https://github.com/ollama/ollama/blob/main/docs/modelfile.md
6
- */
7
- export interface IOllamaModelOptions {
8
- num_ctx?: number;
9
- temperature?: number;
10
- top_k?: number;
11
- top_p?: number;
12
- repeat_penalty?: number;
13
- num_predict?: number;
14
- stop?: string[];
15
- seed?: number;
16
- think?: boolean;
17
- }
18
- /**
19
- * JSON Schema tool definition for Ollama native tool calling
20
- * @see https://docs.ollama.com/capabilities/tool-calling
21
- */
22
- export interface IOllamaTool {
23
- type: 'function';
24
- function: {
25
- name: string;
26
- description: string;
27
- parameters: {
28
- type: 'object';
29
- properties: Record<string, {
30
- type: string;
31
- description?: string;
32
- enum?: string[];
33
- }>;
34
- required?: string[];
35
- };
36
- };
37
- }
38
- /**
39
- * Tool call returned by model in native tool calling mode
40
- */
41
- export interface IOllamaToolCall {
42
- function: {
43
- name: string;
44
- arguments: Record<string, unknown>;
45
- index?: number;
46
- };
47
- }
48
- export interface IOllamaProviderOptions {
49
- baseUrl?: string;
50
- model?: string;
51
- visionModel?: string;
52
- defaultOptions?: IOllamaModelOptions;
53
- defaultTimeout?: number;
54
- }
55
- /**
56
- * Extended chat options with Ollama-specific settings
57
- */
58
- export interface IOllamaChatOptions extends ChatOptions {
59
- options?: IOllamaModelOptions;
60
- timeout?: number;
61
- model?: string;
62
- tools?: IOllamaTool[];
63
- }
64
- /**
65
- * Chunk emitted during streaming
66
- */
67
- export interface IOllamaStreamChunk {
68
- content: string;
69
- thinking?: string;
70
- toolCalls?: IOllamaToolCall[];
71
- done: boolean;
72
- stats?: {
73
- totalDuration?: number;
74
- evalCount?: number;
75
- };
76
- }
77
- /**
78
- * Extended chat response with Ollama-specific fields
79
- */
80
- export interface IOllamaChatResponse extends ChatResponse {
81
- thinking?: string;
82
- toolCalls?: IOllamaToolCall[];
83
- stats?: {
84
- totalDuration?: number;
85
- evalCount?: number;
86
- };
87
- }
88
- export declare class OllamaProvider extends MultiModalModel {
89
- private options;
90
- private baseUrl;
91
- private model;
92
- private visionModel;
93
- private defaultOptions;
94
- private defaultTimeout;
95
- constructor(optionsArg?: IOllamaProviderOptions);
96
- start(): Promise<void>;
97
- stop(): Promise<void>;
98
- chatStream(input: ReadableStream<Uint8Array>): Promise<ReadableStream<string>>;
99
- chat(optionsArg: ChatOptions): Promise<ChatResponse>;
100
- /**
101
- * Streaming chat with token callback (implements MultiModalModel interface)
102
- * Calls onToken for each token generated during the response
103
- */
104
- chatStreaming(optionsArg: StreamingChatOptions): Promise<ChatResponse>;
105
- /**
106
- * Streaming chat with async iteration and options support
107
- */
108
- chatStreamResponse(optionsArg: IOllamaChatOptions): Promise<AsyncIterable<IOllamaStreamChunk>>;
109
- /**
110
- * Stream and collect full response with optional progress callback
111
- */
112
- collectStreamResponse(optionsArg: IOllamaChatOptions, onChunk?: (chunk: IOllamaStreamChunk) => void): Promise<IOllamaChatResponse>;
113
- /**
114
- * Non-streaming chat with full options support
115
- */
116
- chatWithOptions(optionsArg: IOllamaChatOptions): Promise<IOllamaChatResponse>;
117
- audio(optionsArg: {
118
- message: string;
119
- }): Promise<NodeJS.ReadableStream>;
120
- vision(optionsArg: {
121
- image: Buffer;
122
- prompt: string;
123
- }): Promise<string>;
124
- document(optionsArg: {
125
- systemMessage: string;
126
- userMessage: string;
127
- pdfDocuments: Uint8Array[];
128
- messageHistory: ChatMessage[];
129
- }): Promise<{
130
- message: any;
131
- }>;
132
- research(optionsArg: ResearchOptions): Promise<ResearchResponse>;
133
- /**
134
- * Image generation is not supported by Ollama
135
- */
136
- imageGenerate(optionsArg: ImageGenerateOptions): Promise<ImageResponse>;
137
- /**
138
- * Image editing is not supported by Ollama
139
- */
140
- imageEdit(optionsArg: ImageEditOptions): Promise<ImageResponse>;
141
- }