@olane/o-intelligence 0.7.12-alpha.9 → 0.7.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/dist/src/anthropic-intelligence.tool.d.ts +11 -3
  2. package/dist/src/anthropic-intelligence.tool.d.ts.map +1 -1
  3. package/dist/src/anthropic-intelligence.tool.js +230 -4
  4. package/dist/src/gemini-intelligence.tool.d.ts +11 -3
  5. package/dist/src/gemini-intelligence.tool.d.ts.map +1 -1
  6. package/dist/src/gemini-intelligence.tool.js +234 -2
  7. package/dist/src/grok-intelligence.tool.d.ts +5 -3
  8. package/dist/src/grok-intelligence.tool.d.ts.map +1 -1
  9. package/dist/src/grok-intelligence.tool.js +207 -2
  10. package/dist/src/interfaces/prompt.request.d.ts +2 -2
  11. package/dist/src/interfaces/prompt.request.d.ts.map +1 -1
  12. package/dist/src/methods/intelligence.methods.d.ts.map +1 -1
  13. package/dist/src/methods/intelligence.methods.js +7 -0
  14. package/dist/src/methods/llm.methods.d.ts.map +1 -1
  15. package/dist/src/methods/llm.methods.js +14 -0
  16. package/dist/src/o-intelligence.tool.d.ts.map +1 -1
  17. package/dist/src/o-intelligence.tool.js +42 -38
  18. package/dist/src/ollama-intelligence.tool.d.ts +11 -3
  19. package/dist/src/ollama-intelligence.tool.d.ts.map +1 -1
  20. package/dist/src/ollama-intelligence.tool.js +188 -2
  21. package/dist/src/openai-intelligence.tool.d.ts +11 -3
  22. package/dist/src/openai-intelligence.tool.d.ts.map +1 -1
  23. package/dist/src/openai-intelligence.tool.js +221 -2
  24. package/dist/src/perplexity-intelligence.tool.d.ts +11 -3
  25. package/dist/src/perplexity-intelligence.tool.d.ts.map +1 -1
  26. package/dist/src/perplexity-intelligence.tool.js +288 -3
  27. package/dist/src/types/streaming.types.d.ts +197 -0
  28. package/dist/src/types/streaming.types.d.ts.map +1 -0
  29. package/dist/src/types/streaming.types.js +4 -0
  30. package/dist/src/utils/sse-parser.d.ts +66 -0
  31. package/dist/src/utils/sse-parser.d.ts.map +1 -0
  32. package/dist/src/utils/sse-parser.js +255 -0
  33. package/dist/src/utils/streaming-helpers.d.ts +16 -0
  34. package/dist/src/utils/streaming-helpers.d.ts.map +1 -0
  35. package/dist/src/utils/streaming-helpers.js +129 -0
  36. package/package.json +7 -7
@@ -1,6 +1,7 @@
1
1
  import { oAddress } from '@olane/o-core';
2
2
  import { LLM_PARAMS } from './methods/llm.methods.js';
3
3
  import { oLaneTool } from '@olane/o-lane';
4
+ import { StreamUtils } from '@olane/o-node';
4
5
  export class GrokIntelligenceTool extends oLaneTool {
5
6
  constructor(config) {
6
7
  super({
@@ -15,8 +16,13 @@ export class GrokIntelligenceTool extends oLaneTool {
15
16
  this.apiKey = process.env.GROK_API_KEY || '';
16
17
  }
17
18
  async _tool_completion(request) {
19
+ const params = request.params;
20
+ const { _isStreaming = false } = params;
21
+ if (_isStreaming) {
22
+ this.logger.debug('Streaming completion...');
23
+ return StreamUtils.processGenerator(request, this._streamCompletion(request), request.stream);
24
+ }
18
25
  try {
19
- const params = request.params;
20
26
  const { model = this.defaultModel, messages, apiKey = this.apiKey, ...options } = params;
21
27
  const key = apiKey || process.env.GROK_API_KEY;
22
28
  if (!key) {
@@ -67,9 +73,109 @@ export class GrokIntelligenceTool extends oLaneTool {
67
73
  };
68
74
  }
69
75
  }
70
- async _tool_generate(request) {
76
+ async *_streamCompletion(request) {
71
77
  try {
72
78
  const params = request.params;
79
+ const { model = this.defaultModel, messages, apiKey = this.apiKey, ...options } = params;
80
+ const key = apiKey || process.env.GROK_API_KEY;
81
+ if (!key) {
82
+ yield { success: false, error: 'Grok API key is required' };
83
+ return;
84
+ }
85
+ if (!messages || !Array.isArray(messages)) {
86
+ yield { success: false, error: '"messages" array is required' };
87
+ return;
88
+ }
89
+ const chatRequest = {
90
+ model: model,
91
+ messages: messages,
92
+ stream: true,
93
+ };
94
+ if (options.max_tokens !== undefined)
95
+ chatRequest.max_tokens = options.max_tokens;
96
+ if (options.temperature !== undefined)
97
+ chatRequest.temperature = options.temperature;
98
+ if (options.top_p !== undefined)
99
+ chatRequest.top_p = options.top_p;
100
+ const response = await fetch(`${this.baseUrl}/chat/completions`, {
101
+ method: 'POST',
102
+ headers: {
103
+ 'Content-Type': 'application/json',
104
+ Authorization: `Bearer ${key}`,
105
+ },
106
+ body: JSON.stringify(chatRequest),
107
+ });
108
+ if (!response.ok) {
109
+ const errorText = await response.text();
110
+ yield {
111
+ success: false,
112
+ error: `Grok API error: ${response.status} - ${errorText}`,
113
+ };
114
+ return;
115
+ }
116
+ if (!response.body) {
117
+ yield { success: false, error: 'Response body is null' };
118
+ return;
119
+ }
120
+ const reader = response.body.getReader();
121
+ const decoder = new TextDecoder();
122
+ let buffer = '';
123
+ while (true) {
124
+ const { done, value } = await reader.read();
125
+ if (done)
126
+ break;
127
+ buffer += decoder.decode(value, { stream: true });
128
+ const lines = buffer.split('\n');
129
+ buffer = lines.pop() || '';
130
+ for (const line of lines) {
131
+ const trimmedLine = line.trim();
132
+ if (!trimmedLine || !trimmedLine.startsWith('data: '))
133
+ continue;
134
+ const data = trimmedLine.slice(6);
135
+ if (data === '[DONE]')
136
+ break;
137
+ try {
138
+ const parsed = JSON.parse(data);
139
+ const choice = parsed.choices?.[0];
140
+ if (choice?.delta?.content) {
141
+ yield {
142
+ delta: choice.delta.content,
143
+ model: parsed.model || model,
144
+ };
145
+ }
146
+ if (choice?.finish_reason) {
147
+ yield {
148
+ finish_reason: choice.finish_reason,
149
+ };
150
+ }
151
+ if (parsed.usage) {
152
+ yield {
153
+ usage: parsed.usage,
154
+ };
155
+ }
156
+ }
157
+ catch (parseError) {
158
+ // Skip invalid JSON
159
+ continue;
160
+ }
161
+ }
162
+ }
163
+ }
164
+ catch (error) {
165
+ yield {
166
+ success: false,
167
+ error: `Failed to stream chat: ${error.message}`,
168
+ };
169
+ }
170
+ }
171
+ async _tool_generate(request) {
172
+ const params = request.params;
173
+ const { _isStreaming = false } = params;
174
+ if (_isStreaming) {
175
+ this.logger.debug('Streaming generate...');
176
+ return StreamUtils.processGenerator(request, this._streamGenerate(request), request.stream);
177
+ }
178
+ try {
73
179
  const { model = this.defaultModel, prompt, system, apiKey = this.apiKey, ...options } = params;
74
180
  const key = apiKey || process.env.GROK_API_KEY;
75
181
  if (!key) {
@@ -124,6 +230,105 @@ export class GrokIntelligenceTool extends oLaneTool {
124
230
  };
125
231
  }
126
232
  }
233
+ async *_streamGenerate(request) {
234
+ try {
235
+ const params = request.params;
236
+ const { model = this.defaultModel, prompt, system, apiKey = this.apiKey, ...options } = params;
237
+ const key = apiKey || process.env.GROK_API_KEY;
238
+ if (!key) {
239
+ yield { success: false, error: 'Grok API key is required' };
240
+ return;
241
+ }
242
+ if (!prompt) {
243
+ yield { success: false, error: 'Prompt is required' };
244
+ return;
245
+ }
246
+ const messages = [];
247
+ if (system)
248
+ messages.push({ role: 'system', content: system });
249
+ messages.push({ role: 'user', content: prompt });
250
+ const chatRequest = {
251
+ model: model,
252
+ messages,
253
+ stream: true,
254
+ };
255
+ if (options.max_tokens !== undefined)
256
+ chatRequest.max_tokens = options.max_tokens;
257
+ if (options.temperature !== undefined)
258
+ chatRequest.temperature = options.temperature;
259
+ if (options.top_p !== undefined)
260
+ chatRequest.top_p = options.top_p;
261
+ const response = await fetch(`${this.baseUrl}/chat/completions`, {
262
+ method: 'POST',
263
+ headers: {
264
+ 'Content-Type': 'application/json',
265
+ Authorization: `Bearer ${key}`,
266
+ },
267
+ body: JSON.stringify(chatRequest),
268
+ });
269
+ if (!response.ok) {
270
+ const errorText = await response.text();
271
+ yield {
272
+ success: false,
273
+ error: `Grok API error: ${response.status} - ${errorText}`,
274
+ };
275
+ return;
276
+ }
277
+ if (!response.body) {
278
+ yield { success: false, error: 'Response body is null' };
279
+ return;
280
+ }
281
+ const reader = response.body.getReader();
282
+ const decoder = new TextDecoder();
283
+ let buffer = '';
284
+ while (true) {
285
+ const { done, value } = await reader.read();
286
+ if (done)
287
+ break;
288
+ buffer += decoder.decode(value, { stream: true });
289
+ const lines = buffer.split('\n');
290
+ buffer = lines.pop() || '';
291
+ for (const line of lines) {
292
+ const trimmedLine = line.trim();
293
+ if (!trimmedLine || !trimmedLine.startsWith('data: '))
294
+ continue;
295
+ const data = trimmedLine.slice(6);
296
+ if (data === '[DONE]')
297
+ break;
298
+ try {
299
+ const parsed = JSON.parse(data);
300
+ const choice = parsed.choices?.[0];
301
+ if (choice?.delta?.content) {
302
+ yield {
303
+ delta: choice.delta.content,
304
+ model: parsed.model || model,
305
+ };
306
+ }
307
+ if (choice?.finish_reason) {
308
+ yield {
309
+ finish_reason: choice.finish_reason,
310
+ };
311
+ }
312
+ if (parsed.usage) {
313
+ yield {
314
+ usage: parsed.usage,
315
+ };
316
+ }
317
+ }
318
+ catch (parseError) {
319
+ // Skip invalid JSON
320
+ continue;
321
+ }
322
+ }
323
+ }
324
+ }
325
+ catch (error) {
326
+ yield {
327
+ success: false,
328
+ error: `Failed to stream generate: ${error.message}`,
329
+ };
330
+ }
331
+ }
127
332
  async _tool_list_models(request) {
128
333
  try {
129
334
  const params = request.params;
@@ -1,6 +1,6 @@
1
- import { oRequest } from '@olane/o-core';
1
+ import { oStreamRequest } from '@olane/o-node';
2
2
  import { RequestParams } from '@olane/o-protocol';
3
- export interface PromptRequest extends oRequest {
3
+ export interface PromptRequest extends oStreamRequest {
4
4
  params: RequestParams & {
5
5
  prompt: string;
6
6
  };
@@ -1 +1 @@
1
- {"version":3,"file":"prompt.request.d.ts","sourceRoot":"","sources":["../../../src/interfaces/prompt.request.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,eAAe,CAAC;AACzC,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAElD,MAAM,WAAW,aAAc,SAAQ,QAAQ;IAC7C,MAAM,EAAE,aAAa,GAAG;QACtB,MAAM,EAAE,MAAM,CAAC;KAChB,CAAC;CACH"}
1
+ {"version":3,"file":"prompt.request.d.ts","sourceRoot":"","sources":["../../../src/interfaces/prompt.request.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,cAAc,EAAE,MAAM,eAAe,CAAC;AAC/C,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAElD,MAAM,WAAW,aAAc,SAAQ,cAAc;IACnD,MAAM,EAAE,aAAa,GAAG;QACtB,MAAM,EAAE,MAAM,CAAC;KAChB,CAAC;CACH"}
@@ -1 +1 @@
1
- {"version":3,"file":"intelligence.methods.d.ts","sourceRoot":"","sources":["../../../src/methods/intelligence.methods.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AAE5C,eAAO,MAAM,mBAAmB,EAAE;IAAE,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAA;CAoCzD,CAAC"}
1
+ {"version":3,"file":"intelligence.methods.d.ts","sourceRoot":"","sources":["../../../src/methods/intelligence.methods.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AAE5C,eAAO,MAAM,mBAAmB,EAAE;IAAE,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAA;CA2CzD,CAAC"}
@@ -31,6 +31,13 @@ export const INTELLIGENCE_PARAMS = {
31
31
  value: 'string',
32
32
  description: 'The prompt to send to the AI model',
33
33
  },
34
+ {
35
+ name: 'stream',
36
+ type: 'boolean',
37
+ value: 'boolean',
38
+ description: 'Whether to stream the response',
39
+ required: false,
40
+ },
34
41
  ],
35
42
  },
36
43
  };
@@ -1 +1 @@
1
- {"version":3,"file":"llm.methods.d.ts","sourceRoot":"","sources":["../../../src/methods/llm.methods.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AAE5C,eAAO,MAAM,UAAU,EAAE;IAAE,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAA;CAwKhD,CAAC"}
1
+ {"version":3,"file":"llm.methods.d.ts","sourceRoot":"","sources":["../../../src/methods/llm.methods.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AAE5C,eAAO,MAAM,UAAU,EAAE;IAAE,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAA;CAsLhD,CAAC"}
@@ -24,6 +24,13 @@ export const LLM_PARAMS = {
24
24
  description: 'The options to use for generation',
25
25
  required: false,
26
26
  },
27
+ {
28
+ name: 'stream',
29
+ type: 'boolean',
30
+ value: 'boolean',
31
+ description: 'Whether to stream the response',
32
+ required: false,
33
+ },
27
34
  ],
28
35
  },
29
36
  generate: {
@@ -37,6 +44,13 @@ export const LLM_PARAMS = {
37
44
  value: 'string',
38
45
  description: 'The model to use for generation',
39
46
  },
47
+ {
48
+ name: 'stream',
49
+ type: 'boolean',
50
+ value: 'boolean',
51
+ description: 'Whether to stream the response',
52
+ required: false,
53
+ },
40
54
  ],
41
55
  },
42
56
  list_models: {
@@ -1 +1 @@
1
- {"version":3,"file":"o-intelligence.tool.d.ts","sourceRoot":"","sources":["../../src/o-intelligence.tool.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAa,MAAM,eAAe,CAAC;AACpD,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAQ3C,OAAO,EAAE,YAAY,EAAE,MAAM,+BAA+B,CAAC;AAC7D,OAAO,EAAE,gBAAgB,EAAE,MAAM,mCAAmC,CAAC;AAGrE,OAAO,EAAE,aAAa,EAAE,MAAM,gCAAgC,CAAC;AAC/D,OAAO,EAAE,SAAS,EAAE,MAAM,eAAe,CAAC;AAC1C,OAAO,EAAe,eAAe,EAAE,MAAM,eAAe,CAAC;AAE7D,qBAAa,gBAAiB,SAAQ,SAAS;IAC7C,OAAO,CAAC,eAAe,CAAK;gBAChB,MAAM,EAAE,eAAe;IAuB7B,cAAc,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC;IAmBnD,gBAAgB,IAAI,OAAO,CAAC;QAAE,QAAQ,EAAE,YAAY,CAAA;KAAE,CAAC;IA6DvD,iBAAiB,CAAC,QAAQ,EAAE,YAAY,GAAG,OAAO,CAAC;QAAE,MAAM,EAAE,MAAM,CAAA;KAAE,CAAC;IAsEtE,kBAAkB,CAAC,OAAO,EAAE,aAAa,GAAG,OAAO,CAAC;QACxD,MAAM,EAAE,QAAQ,CAAC;QACjB,MAAM,EAAE,MAAM,CAAC;QACf,OAAO,EAAE,GAAG,CAAC;KACd,CAAC;IAWI,eAAe,CAAC,OAAO,EAAE,gBAAgB,GAAG,OAAO,CAAC,UAAU,CAAC;IA6C/D,YAAY,CAAC,OAAO,EAAE,aAAa,GAAG,OAAO,CAAC,UAAU,CAAC;IAqBzD,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;CAwClC"}
1
+ {"version":3,"file":"o-intelligence.tool.d.ts","sourceRoot":"","sources":["../../src/o-intelligence.tool.ts"],"names":[],"mappings":"AAAA,OAAO,EAAa,QAAQ,EAAa,MAAM,eAAe,CAAC;AAC/D,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAQ3C,OAAO,EAAE,YAAY,EAAE,MAAM,+BAA+B,CAAC;AAC7D,OAAO,EAAE,gBAAgB,EAAE,MAAM,mCAAmC,CAAC;AAGrE,OAAO,EAAE,aAAa,EAAE,MAAM,gCAAgC,CAAC;AAC/D,OAAO,EAAE,SAAS,EAAE,MAAM,eAAe,CAAC;AAC1C,OAAO,EAAe,eAAe,EAAE,MAAM,eAAe,CAAC;AAE7D,qBAAa,gBAAiB,SAAQ,SAAS;IAC7C,OAAO,CAAC,eAAe,CAAK;gBAChB,MAAM,EAAE,eAAe;IAuB7B,cAAc,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC;IAmBnD,gBAAgB,IAAI,OAAO,CAAC;QAAE,QAAQ,EAAE,YAAY,CAAA;KAAE,CAAC;IA6DvD,iBAAiB,CAAC,QAAQ,EAAE,YAAY,GAAG,OAAO,CAAC;QAAE,MAAM,EAAE,MAAM,CAAA;KAAE,CAAC;IAsEtE,kBAAkB,CAAC,OAAO,EAAE,aAAa,GAAG,OAAO,CAAC;QACxD,MAAM,EAAE,QAAQ,CAAC;QACjB,MAAM,EAAE,MAAM,CAAC;QACf,OAAO,EAAE,GAAG,CAAC;KACd,CAAC;IAWI,eAAe,CAAC,OAAO,EAAE,gBAAgB,GAAG,OAAO,CAAC,UAAU,CAAC;IA6C/D,YAAY,CAAC,OAAO,EAAE,aAAa,GAAG,OAAO,CAAC,UAAU,CAAC;IA+BzD,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;CAuClC"}
@@ -1,4 +1,4 @@
1
- import { oAddress } from '@olane/o-core';
1
+ import { CoreUtils, oAddress, oResponse } from '@olane/o-core';
2
2
  import { AnthropicIntelligenceTool } from './anthropic-intelligence.tool.js';
3
3
  import { OpenAIIntelligenceTool } from './openai-intelligence.tool.js';
4
4
  import { OllamaIntelligenceTool } from './ollama-intelligence.tool.js';
@@ -216,13 +216,14 @@ export class IntelligenceTool extends oLaneTool {
216
216
  }
217
217
  // we cannot wrap this tool use in a plan because it is a core dependency in all planning
218
218
  async _tool_prompt(request) {
219
- const { prompt } = request.params;
219
+ const { prompt, _isStreaming = false } = request.params;
220
+ const stream = request.stream;
220
221
  const intelligence = await this.chooseIntelligence(request);
221
- this.logger.debug('Using intelligence: ', intelligence.choice.toString());
222
222
  const child = this.hierarchyManager.getChild(intelligence.choice);
223
223
  const response = await this.useChild(child || intelligence.choice, {
224
224
  method: 'completion',
225
225
  params: {
226
+ _isStreaming: _isStreaming,
226
227
  apiKey: intelligence.apiKey,
227
228
  messages: [
228
229
  {
@@ -231,46 +232,49 @@ export class IntelligenceTool extends oLaneTool {
231
232
  },
232
233
  ],
233
234
  },
235
+ }, {
236
+ isStream: _isStreaming || false,
237
+ onChunk: async (chunk) => {
238
+ await CoreUtils.sendStreamResponse(oResponse.fromJSON(chunk), stream);
239
+ },
234
240
  });
235
241
  return response.result.data;
236
242
  }
237
243
  async initialize() {
238
244
  await super.initialize();
239
245
  const config = this.config;
240
- const anthropicTool = new AnthropicIntelligenceTool({
241
- ...config,
242
- parent: this.address,
243
- leader: this.leader,
244
- });
245
- await anthropicTool.start();
246
- this.addChildNode(anthropicTool);
247
- const openaiTool = new OpenAIIntelligenceTool({
248
- ...config,
249
- parent: this.address,
250
- leader: this.leader,
251
- });
252
- await openaiTool.start();
253
- this.addChildNode(openaiTool);
254
- const ollamaTool = new OllamaIntelligenceTool({
255
- ...config,
256
- parent: this.address,
257
- leader: this.leader,
258
- });
259
- await ollamaTool.start();
260
- this.addChildNode(ollamaTool);
261
- const perplexityTool = new PerplexityIntelligenceTool({
262
- ...config,
263
- parent: this.address,
264
- leader: this.leader,
265
- });
266
- await perplexityTool.start();
267
- this.addChildNode(perplexityTool);
268
- const grokTool = new GrokIntelligenceTool({
269
- ...config,
270
- parent: this.address,
271
- leader: this.leader,
272
- });
273
- await grokTool.start();
274
- this.addChildNode(grokTool);
246
+ const tools = [
247
+ new AnthropicIntelligenceTool({
248
+ ...config,
249
+ parent: this.address,
250
+ leader: this.leader,
251
+ }),
252
+ new OpenAIIntelligenceTool({
253
+ ...config,
254
+ parent: this.address,
255
+ leader: this.leader,
256
+ }),
257
+ new OllamaIntelligenceTool({
258
+ ...config,
259
+ parent: this.address,
260
+ leader: this.leader,
261
+ }),
262
+ new PerplexityIntelligenceTool({
263
+ ...config,
264
+ parent: this.address,
265
+ leader: this.leader,
266
+ }),
267
+ new GrokIntelligenceTool({
268
+ ...config,
269
+ parent: this.address,
270
+ leader: this.leader,
271
+ }),
272
+ ];
273
+ for (const tool of tools) {
274
+ tool.hookInitializeFinished = () => {
275
+ this.addChildNode(tool);
276
+ };
277
+ await tool.start();
278
+ }
275
279
  }
276
280
  }
@@ -1,7 +1,7 @@
1
1
  import { oRequest } from '@olane/o-core';
2
2
  import { ToolResult } from '@olane/o-tool';
3
3
  import { oLaneTool } from '@olane/o-lane';
4
- import { oNodeToolConfig } from '@olane/o-node';
4
+ import { oNodeToolConfig, oStreamRequest } from '@olane/o-node';
5
5
  export declare class OllamaIntelligenceTool extends oLaneTool {
6
6
  static defaultModel: string;
7
7
  static defaultUrl: string;
@@ -9,11 +9,19 @@ export declare class OllamaIntelligenceTool extends oLaneTool {
9
9
  /**
10
10
  * Chat completion with Ollama
11
11
  */
12
- _tool_completion(request: oRequest): Promise<ToolResult>;
12
+ _tool_completion(request: oStreamRequest): Promise<ToolResult>;
13
+ /**
14
+ * Stream chat completion with Ollama
15
+ */
16
+ private _streamCompletion;
13
17
  /**
14
18
  * Generate text with Ollama
15
19
  */
16
- _tool_generate(request: oRequest): Promise<ToolResult>;
20
+ _tool_generate(request: oStreamRequest): Promise<ToolResult>;
21
+ /**
22
+ * Stream text generation with Ollama
23
+ */
24
+ private _streamGenerate;
17
25
  /**
18
26
  * List available models
19
27
  */
@@ -1 +1 @@
1
- {"version":3,"file":"ollama-intelligence.tool.d.ts","sourceRoot":"","sources":["../../src/ollama-intelligence.tool.ts"],"names":[],"mappings":"AAAA,OAAO,EAAY,QAAQ,EAAE,MAAM,eAAe,CAAC;AACnD,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAE3C,OAAO,EAAE,SAAS,EAAE,MAAM,eAAe,CAAC;AAC1C,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AA8GhD,qBAAa,sBAAuB,SAAQ,SAAS;IACnD,MAAM,CAAC,YAAY,SAAqB;IACxC,MAAM,CAAC,UAAU,SAA4B;gBAEjC,MAAM,EAAE,eAAe;IAYnC;;OAEG;IACG,gBAAgB,CAAC,OAAO,EAAE,QAAQ,GAAG,OAAO,CAAC,UAAU,CAAC;IA4D9D;;OAEG;IACG,cAAc,CAAC,OAAO,EAAE,QAAQ,GAAG,OAAO,CAAC,UAAU,CAAC;IAgE5D;;OAEG;IACG,iBAAiB,CAAC,OAAO,EAAE,QAAQ,GAAG,OAAO,CAAC,UAAU,CAAC;IAmC/D;;OAEG;IACG,gBAAgB,CAAC,OAAO,EAAE,QAAQ,GAAG,OAAO,CAAC,UAAU,CAAC;IAiE9D;;OAEG;IACG,kBAAkB,CAAC,OAAO,EAAE,QAAQ,GAAG,OAAO,CAAC,UAAU,CAAC;IA6ChE;;OAEG;IACG,gBAAgB,CAAC,OAAO,EAAE,QAAQ,GAAG,OAAO,CAAC,UAAU,CAAC;IAwC9D;;OAEG;IACG,YAAY,CAAC,OAAO,EAAE,QAAQ,GAAG,OAAO,CAAC,UAAU,CAAC;CAyB3D"}
1
+ {"version":3,"file":"ollama-intelligence.tool.d.ts","sourceRoot":"","sources":["../../src/ollama-intelligence.tool.ts"],"names":[],"mappings":"AAAA,OAAO,EAAY,QAAQ,EAAE,MAAM,eAAe,CAAC;AACnD,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAE3C,OAAO,EAAE,SAAS,EAAE,MAAM,eAAe,CAAC;AAC1C,OAAO,EAAE,eAAe,EAAE,cAAc,EAAe,MAAM,eAAe,CAAC;AA+G7E,qBAAa,sBAAuB,SAAQ,SAAS;IACnD,MAAM,CAAC,YAAY,SAAqB;IACxC,MAAM,CAAC,UAAU,SAA4B;gBAEjC,MAAM,EAAE,eAAe;IAYnC;;OAEG;IACG,gBAAgB,CAAC,OAAO,EAAE,cAAc,GAAG,OAAO,CAAC,UAAU,CAAC;IAsEpE;;OAEG;YACY,iBAAiB;IAuGhC;;OAEG;IACG,cAAc,CAAC,OAAO,EAAE,cAAc,GAAG,OAAO,CAAC,UAAU,CAAC;IA0ElE;;OAEG;YACY,eAAe;IAyG9B;;OAEG;IACG,iBAAiB,CAAC,OAAO,EAAE,QAAQ,GAAG,OAAO,CAAC,UAAU,CAAC;IAmC/D;;OAEG;IACG,gBAAgB,CAAC,OAAO,EAAE,QAAQ,GAAG,OAAO,CAAC,UAAU,CAAC;IAiE9D;;OAEG;IACG,kBAAkB,CAAC,OAAO,EAAE,QAAQ,GAAG,OAAO,CAAC,UAAU,CAAC;IA6ChE;;OAEG;IACG,gBAAgB,CAAC,OAAO,EAAE,QAAQ,GAAG,OAAO,CAAC,UAAU,CAAC;IAwC9D;;OAEG;IACG,YAAY,CAAC,OAAO,EAAE,QAAQ,GAAG,OAAO,CAAC,UAAU,CAAC;CAyB3D"}