openplanter 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (126) hide show
  1. package/README.md +210 -0
  2. package/dist/builder.d.ts +11 -0
  3. package/dist/builder.d.ts.map +1 -0
  4. package/dist/builder.js +179 -0
  5. package/dist/builder.js.map +1 -0
  6. package/dist/cli.d.ts +9 -0
  7. package/dist/cli.d.ts.map +1 -0
  8. package/dist/cli.js +548 -0
  9. package/dist/cli.js.map +1 -0
  10. package/dist/config.d.ts +51 -0
  11. package/dist/config.d.ts.map +1 -0
  12. package/dist/config.js +114 -0
  13. package/dist/config.js.map +1 -0
  14. package/dist/credentials.d.ts +52 -0
  15. package/dist/credentials.d.ts.map +1 -0
  16. package/dist/credentials.js +371 -0
  17. package/dist/credentials.js.map +1 -0
  18. package/dist/demo.d.ts +26 -0
  19. package/dist/demo.d.ts.map +1 -0
  20. package/dist/demo.js +95 -0
  21. package/dist/demo.js.map +1 -0
  22. package/dist/engine.d.ts +91 -0
  23. package/dist/engine.d.ts.map +1 -0
  24. package/dist/engine.js +1036 -0
  25. package/dist/engine.js.map +1 -0
  26. package/dist/index.d.ts +30 -0
  27. package/dist/index.d.ts.map +1 -0
  28. package/dist/index.js +39 -0
  29. package/dist/index.js.map +1 -0
  30. package/dist/investigation-tools/aph-holdings.d.ts +61 -0
  31. package/dist/investigation-tools/aph-holdings.d.ts.map +1 -0
  32. package/dist/investigation-tools/aph-holdings.js +459 -0
  33. package/dist/investigation-tools/aph-holdings.js.map +1 -0
  34. package/dist/investigation-tools/asic-officer-lookup.d.ts +42 -0
  35. package/dist/investigation-tools/asic-officer-lookup.d.ts.map +1 -0
  36. package/dist/investigation-tools/asic-officer-lookup.js +197 -0
  37. package/dist/investigation-tools/asic-officer-lookup.js.map +1 -0
  38. package/dist/investigation-tools/asx-calendar-fetcher.d.ts +42 -0
  39. package/dist/investigation-tools/asx-calendar-fetcher.d.ts.map +1 -0
  40. package/dist/investigation-tools/asx-calendar-fetcher.js +271 -0
  41. package/dist/investigation-tools/asx-calendar-fetcher.js.map +1 -0
  42. package/dist/investigation-tools/asx-parser.d.ts +66 -0
  43. package/dist/investigation-tools/asx-parser.d.ts.map +1 -0
  44. package/dist/investigation-tools/asx-parser.js +314 -0
  45. package/dist/investigation-tools/asx-parser.js.map +1 -0
  46. package/dist/investigation-tools/bulk-asx-announcements.d.ts +53 -0
  47. package/dist/investigation-tools/bulk-asx-announcements.d.ts.map +1 -0
  48. package/dist/investigation-tools/bulk-asx-announcements.js +204 -0
  49. package/dist/investigation-tools/bulk-asx-announcements.js.map +1 -0
  50. package/dist/investigation-tools/entity-resolver.d.ts +77 -0
  51. package/dist/investigation-tools/entity-resolver.d.ts.map +1 -0
  52. package/dist/investigation-tools/entity-resolver.js +346 -0
  53. package/dist/investigation-tools/entity-resolver.js.map +1 -0
  54. package/dist/investigation-tools/hotcopper-scraper.d.ts +73 -0
  55. package/dist/investigation-tools/hotcopper-scraper.d.ts.map +1 -0
  56. package/dist/investigation-tools/hotcopper-scraper.js +318 -0
  57. package/dist/investigation-tools/hotcopper-scraper.js.map +1 -0
  58. package/dist/investigation-tools/index.d.ts +15 -0
  59. package/dist/investigation-tools/index.d.ts.map +1 -0
  60. package/dist/investigation-tools/index.js +15 -0
  61. package/dist/investigation-tools/index.js.map +1 -0
  62. package/dist/investigation-tools/insider-graph.d.ts +173 -0
  63. package/dist/investigation-tools/insider-graph.d.ts.map +1 -0
  64. package/dist/investigation-tools/insider-graph.js +732 -0
  65. package/dist/investigation-tools/insider-graph.js.map +1 -0
  66. package/dist/investigation-tools/insider-suspicion-scorer.d.ts +97 -0
  67. package/dist/investigation-tools/insider-suspicion-scorer.d.ts.map +1 -0
  68. package/dist/investigation-tools/insider-suspicion-scorer.js +327 -0
  69. package/dist/investigation-tools/insider-suspicion-scorer.js.map +1 -0
  70. package/dist/investigation-tools/multi-forum-scraper.d.ts +104 -0
  71. package/dist/investigation-tools/multi-forum-scraper.d.ts.map +1 -0
  72. package/dist/investigation-tools/multi-forum-scraper.js +415 -0
  73. package/dist/investigation-tools/multi-forum-scraper.js.map +1 -0
  74. package/dist/investigation-tools/price-fetcher.d.ts +81 -0
  75. package/dist/investigation-tools/price-fetcher.d.ts.map +1 -0
  76. package/dist/investigation-tools/price-fetcher.js +268 -0
  77. package/dist/investigation-tools/price-fetcher.js.map +1 -0
  78. package/dist/investigation-tools/shared.d.ts +39 -0
  79. package/dist/investigation-tools/shared.d.ts.map +1 -0
  80. package/dist/investigation-tools/shared.js +203 -0
  81. package/dist/investigation-tools/shared.js.map +1 -0
  82. package/dist/investigation-tools/timeline-linker.d.ts +90 -0
  83. package/dist/investigation-tools/timeline-linker.d.ts.map +1 -0
  84. package/dist/investigation-tools/timeline-linker.js +219 -0
  85. package/dist/investigation-tools/timeline-linker.js.map +1 -0
  86. package/dist/investigation-tools/volume-scanner.d.ts +70 -0
  87. package/dist/investigation-tools/volume-scanner.d.ts.map +1 -0
  88. package/dist/investigation-tools/volume-scanner.js +227 -0
  89. package/dist/investigation-tools/volume-scanner.js.map +1 -0
  90. package/dist/model.d.ts +136 -0
  91. package/dist/model.d.ts.map +1 -0
  92. package/dist/model.js +1071 -0
  93. package/dist/model.js.map +1 -0
  94. package/dist/patching.d.ts +45 -0
  95. package/dist/patching.d.ts.map +1 -0
  96. package/dist/patching.js +317 -0
  97. package/dist/patching.js.map +1 -0
  98. package/dist/prompts.d.ts +15 -0
  99. package/dist/prompts.d.ts.map +1 -0
  100. package/dist/prompts.js +351 -0
  101. package/dist/prompts.js.map +1 -0
  102. package/dist/replay-log.d.ts +54 -0
  103. package/dist/replay-log.d.ts.map +1 -0
  104. package/dist/replay-log.js +94 -0
  105. package/dist/replay-log.js.map +1 -0
  106. package/dist/runtime.d.ts +53 -0
  107. package/dist/runtime.d.ts.map +1 -0
  108. package/dist/runtime.js +259 -0
  109. package/dist/runtime.js.map +1 -0
  110. package/dist/settings.d.ts +39 -0
  111. package/dist/settings.d.ts.map +1 -0
  112. package/dist/settings.js +146 -0
  113. package/dist/settings.js.map +1 -0
  114. package/dist/tool-defs.d.ts +58 -0
  115. package/dist/tool-defs.d.ts.map +1 -0
  116. package/dist/tool-defs.js +1029 -0
  117. package/dist/tool-defs.js.map +1 -0
  118. package/dist/tools.d.ts +72 -0
  119. package/dist/tools.d.ts.map +1 -0
  120. package/dist/tools.js +1454 -0
  121. package/dist/tools.js.map +1 -0
  122. package/dist/tui.d.ts +49 -0
  123. package/dist/tui.d.ts.map +1 -0
  124. package/dist/tui.js +699 -0
  125. package/dist/tui.js.map +1 -0
  126. package/package.json +126 -0
package/dist/model.js ADDED
@@ -0,0 +1,1071 @@
1
+ import * as https from 'node:https';
2
+ import * as http from 'node:http';
3
+ import { URL } from 'node:url';
4
+ import { TOOL_DEFINITIONS, to_openai_tools, to_anthropic_tools, } from './tool-defs.js';
5
+ // Re-export tool helpers so consumers can reach them via the model module.
6
+ export { TOOL_DEFINITIONS, to_openai_tools, to_anthropic_tools };
7
+ // ---------------------------------------------------------------------------
8
+ // Error type
9
+ // ---------------------------------------------------------------------------
10
+ export class ModelError extends Error {
11
+ constructor(message) {
12
+ super(message);
13
+ this.name = 'ModelError';
14
+ }
15
+ }
16
+ const _DEFAULT_USER_AGENT = 'OpenPlanter/0.1';
17
+ /** Opaque conversation state owned by the model layer. */
18
+ export class Conversation {
19
+ _provider_messages;
20
+ system_prompt;
21
+ turn_count;
22
+ stop_sequences;
23
+ constructor(opts) {
24
+ this._provider_messages = opts?._provider_messages ?? [];
25
+ this.system_prompt = opts?.system_prompt ?? '';
26
+ this.turn_count = opts?.turn_count ?? 0;
27
+ this.stop_sequences = opts?.stop_sequences ?? [];
28
+ }
29
+ /** Return a shallow copy of the provider messages list. */
30
+ getMessages() {
31
+ return [...this._provider_messages];
32
+ }
33
+ }
34
+ // ---------------------------------------------------------------------------
35
+ // Shared helpers
36
+ // ---------------------------------------------------------------------------
37
+ export function _extractContent(content) {
38
+ if (typeof content === 'string') {
39
+ return content;
40
+ }
41
+ if (Array.isArray(content)) {
42
+ const textParts = [];
43
+ for (const part of content) {
44
+ if (typeof part !== 'object' || part === null)
45
+ continue;
46
+ const rec = part;
47
+ const maybeText = rec['text'];
48
+ if (typeof maybeText === 'string') {
49
+ textParts.push(maybeText);
50
+ continue;
51
+ }
52
+ if (rec['type'] === 'text') {
53
+ const nested = rec['text'];
54
+ if (typeof nested === 'string') {
55
+ textParts.push(nested);
56
+ }
57
+ }
58
+ }
59
+ return textParts.join('\n');
60
+ }
61
+ return '';
62
+ }
63
+ // ---------------------------------------------------------------------------
64
+ // HTTP JSON helper (uses native fetch)
65
+ // ---------------------------------------------------------------------------
66
+ async function _httpJson(url, method, headers, payload, timeoutSec = 90) {
67
+ const merged = { 'User-Agent': _DEFAULT_USER_AGENT, ...headers };
68
+ const controller = new AbortController();
69
+ const timer = setTimeout(() => controller.abort(), timeoutSec * 1000);
70
+ let resp;
71
+ try {
72
+ resp = await fetch(url, {
73
+ method,
74
+ headers: merged,
75
+ body: payload != null ? JSON.stringify(payload) : undefined,
76
+ signal: controller.signal,
77
+ });
78
+ }
79
+ catch (err) {
80
+ clearTimeout(timer);
81
+ if (err instanceof DOMException && err.name === 'AbortError') {
82
+ throw new ModelError(`Timeout calling ${url}`);
83
+ }
84
+ throw new ModelError(`Connection error calling ${url}: ${err}`);
85
+ }
86
+ finally {
87
+ clearTimeout(timer);
88
+ }
89
+ const raw = await resp.text();
90
+ if (!resp.ok) {
91
+ throw new ModelError(`HTTP ${resp.status} calling ${url}: ${raw}`);
92
+ }
93
+ let parsed;
94
+ try {
95
+ parsed = JSON.parse(raw);
96
+ }
97
+ catch {
98
+ throw new ModelError(`Non-JSON response from ${url}: ${raw.slice(0, 500)}`);
99
+ }
100
+ if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
101
+ throw new ModelError(`Unexpected non-object JSON from ${url}: ${typeof parsed}`);
102
+ }
103
+ return parsed;
104
+ }
105
+ // ---------------------------------------------------------------------------
106
+ // SSE reader (from a Node.js IncomingMessage stream)
107
+ // ---------------------------------------------------------------------------
108
+ function _readSseEvents(lines, onSseEvent) {
109
+ const events = [];
110
+ let currentEvent = '';
111
+ let currentDataLines = [];
112
+ function flush() {
113
+ if (currentDataLines.length === 0)
114
+ return;
115
+ const joined = currentDataLines.join('\n');
116
+ let dataDict;
117
+ try {
118
+ dataDict = JSON.parse(joined);
119
+ }
120
+ catch {
121
+ dataDict = { _raw: joined };
122
+ }
123
+ if (typeof dataDict === 'object' && dataDict !== null && !Array.isArray(dataDict)) {
124
+ // Check for Anthropic error events
125
+ if (dataDict['type'] === 'error') {
126
+ const errObj = dataDict['error'];
127
+ const errMsg = typeof errObj === 'object' && errObj !== null
128
+ ? errObj['message'] ?? JSON.stringify(dataDict)
129
+ : JSON.stringify(dataDict);
130
+ throw new ModelError(`Stream error: ${errMsg}`);
131
+ }
132
+ events.push([currentEvent, dataDict]);
133
+ if (onSseEvent) {
134
+ try {
135
+ onSseEvent(currentEvent, dataDict);
136
+ }
137
+ catch {
138
+ // Swallow callback errors like the Python version
139
+ }
140
+ }
141
+ }
142
+ currentDataLines = [];
143
+ currentEvent = '';
144
+ }
145
+ for (const line of lines) {
146
+ if (line.startsWith('event:')) {
147
+ currentEvent = line.slice('event:'.length).trim();
148
+ continue;
149
+ }
150
+ if (line.startsWith('data:')) {
151
+ const dataStr = line.slice('data:'.length).trim();
152
+ if (dataStr === '[DONE]') {
153
+ break;
154
+ }
155
+ currentDataLines.push(dataStr);
156
+ continue;
157
+ }
158
+ // Empty line = end of SSE message
159
+ if (line === '') {
160
+ flush();
161
+ continue;
162
+ }
163
+ }
164
+ // Flush remaining data (some servers don't end with empty line)
165
+ flush();
166
+ return events;
167
+ }
168
+ // ---------------------------------------------------------------------------
169
+ // SSE streaming via Node http/https modules
170
+ // ---------------------------------------------------------------------------
171
+ function _httpStreamSseRaw(url, method, headers, payload, firstByteTimeout, streamTimeout) {
172
+ return new Promise((resolve, reject) => {
173
+ const parsed = new URL(url);
174
+ const isHttps = parsed.protocol === 'https:';
175
+ const transport = isHttps ? https : http;
176
+ const data = JSON.stringify(payload);
177
+ const merged = {
178
+ 'User-Agent': _DEFAULT_USER_AGENT,
179
+ 'Content-Length': String(Buffer.byteLength(data, 'utf-8')),
180
+ ...headers,
181
+ };
182
+ const reqOpts = {
183
+ hostname: parsed.hostname,
184
+ port: parsed.port || (isHttps ? 443 : 80),
185
+ path: parsed.pathname + parsed.search,
186
+ method,
187
+ headers: merged,
188
+ timeout: firstByteTimeout * 1000,
189
+ };
190
+ const req = transport.request(reqOpts, (resp) => {
191
+ // Check for HTTP errors
192
+ const statusCode = resp.statusCode ?? 0;
193
+ if (statusCode >= 400) {
194
+ let body = '';
195
+ resp.setEncoding('utf-8');
196
+ resp.on('data', (chunk) => { body += chunk; });
197
+ resp.on('end', () => {
198
+ reject(new ModelError(`HTTP ${statusCode} calling ${url}: ${body}`));
199
+ });
200
+ return;
201
+ }
202
+ // Extend timeout for stream reading
203
+ resp.setTimeout(streamTimeout * 1000);
204
+ req.setTimeout(streamTimeout * 1000);
205
+ const lines = [];
206
+ let buffer = '';
207
+ resp.setEncoding('utf-8');
208
+ resp.on('data', (chunk) => {
209
+ buffer += chunk;
210
+ const parts = buffer.split('\n');
211
+ // Keep the last (possibly incomplete) part in the buffer
212
+ buffer = parts.pop() ?? '';
213
+ for (const part of parts) {
214
+ lines.push(part.replace(/\r$/, ''));
215
+ }
216
+ });
217
+ resp.on('end', () => {
218
+ // Flush any remaining buffer
219
+ if (buffer.length > 0) {
220
+ lines.push(buffer.replace(/\r$/, ''));
221
+ }
222
+ resolve(lines);
223
+ });
224
+ resp.on('error', (err) => {
225
+ reject(err);
226
+ });
227
+ });
228
+ req.on('timeout', () => {
229
+ req.destroy();
230
+ reject(new Error('timeout'));
231
+ });
232
+ req.on('error', (err) => {
233
+ reject(err);
234
+ });
235
+ req.write(data);
236
+ req.end();
237
+ });
238
+ }
239
+ async function _httpStreamSse(url, method, headers, payload, firstByteTimeout = 10, streamTimeout = 120, maxRetries = 3, onSseEvent) {
240
+ let lastExc = null;
241
+ for (let attempt = 0; attempt < maxRetries; attempt++) {
242
+ try {
243
+ const lines = await _httpStreamSseRaw(url, method, headers, payload, firstByteTimeout, streamTimeout);
244
+ return _readSseEvents(lines, onSseEvent);
245
+ }
246
+ catch (err) {
247
+ // If it's a ModelError (HTTP error), rethrow immediately (don't retry)
248
+ if (err instanceof ModelError) {
249
+ throw err;
250
+ }
251
+ // Timeout or connection error — retry
252
+ lastExc = err;
253
+ continue;
254
+ }
255
+ }
256
+ throw new ModelError(`Timed out after ${maxRetries} attempts calling ${url}: ${lastExc}`);
257
+ }
258
+ // ---------------------------------------------------------------------------
259
+ // SSE stream accumulators
260
+ // ---------------------------------------------------------------------------
261
+ export function _accumulateOpenaiStream(events) {
262
+ const textParts = [];
263
+ const toolCallsByIndex = new Map();
264
+ let finishReason = '';
265
+ let usage = {};
266
+ for (const [, chunk] of events) {
267
+ // Usage may appear in a dedicated chunk or alongside the last delta
268
+ if (chunk['usage'] && typeof chunk['usage'] === 'object') {
269
+ usage = chunk['usage'];
270
+ }
271
+ const choices = chunk['choices'];
272
+ if (!choices || !Array.isArray(choices) || choices.length === 0)
273
+ continue;
274
+ const choice = choices[0];
275
+ const fr = choice['finish_reason'];
276
+ if (fr && typeof fr === 'string') {
277
+ finishReason = fr;
278
+ }
279
+ const delta = (choice['delta'] ?? {});
280
+ if (!delta || typeof delta !== 'object')
281
+ continue;
282
+ // Text content
283
+ const content = delta['content'];
284
+ if (typeof content === 'string' && content) {
285
+ textParts.push(content);
286
+ }
287
+ // Tool calls (streamed incrementally)
288
+ const tcDeltas = delta['tool_calls'];
289
+ if (tcDeltas && Array.isArray(tcDeltas)) {
290
+ for (const tcDelta of tcDeltas) {
291
+ const td = tcDelta;
292
+ const idx = (typeof td['index'] === 'number' ? td['index'] : 0);
293
+ if (!toolCallsByIndex.has(idx)) {
294
+ toolCallsByIndex.set(idx, {
295
+ id: td['id'] ?? '',
296
+ type: 'function',
297
+ function: { name: '', arguments: '' },
298
+ });
299
+ }
300
+ const tc = toolCallsByIndex.get(idx);
301
+ if (td['id'] && typeof td['id'] === 'string') {
302
+ tc['id'] = td['id'];
303
+ }
304
+ const func = (td['function'] ?? {});
305
+ const tcFunc = tc['function'];
306
+ if (func['name'] && typeof func['name'] === 'string') {
307
+ tcFunc['name'] = func['name'];
308
+ }
309
+ if (func['arguments'] && typeof func['arguments'] === 'string') {
310
+ tcFunc['arguments'] += func['arguments'];
311
+ }
312
+ }
313
+ }
314
+ }
315
+ // Build the final message
316
+ const message = {
317
+ role: 'assistant',
318
+ content: textParts.length > 0 ? textParts.join('') : null,
319
+ };
320
+ if (toolCallsByIndex.size > 0) {
321
+ const sortedIndices = Array.from(toolCallsByIndex.keys()).sort((a, b) => a - b);
322
+ message['tool_calls'] = sortedIndices.map((i) => toolCallsByIndex.get(i));
323
+ }
324
+ else {
325
+ message['tool_calls'] = null;
326
+ }
327
+ const result = {
328
+ choices: [{ message, finish_reason: finishReason }],
329
+ };
330
+ if (Object.keys(usage).length > 0) {
331
+ result['usage'] = usage;
332
+ }
333
+ return result;
334
+ }
335
+ export function _accumulateAnthropicStream(events) {
336
+ const blocksByIndex = new Map();
337
+ let stopReason = '';
338
+ let usage = {};
339
+ for (const [eventType, data] of events) {
340
+ const msgType = data['type'] ?? eventType;
341
+ if (msgType === 'message_start') {
342
+ const msg = (data['message'] ?? {});
343
+ const msgUsage = msg['usage'];
344
+ if (msgUsage && typeof msgUsage === 'object') {
345
+ usage = { ...usage, ...msgUsage };
346
+ }
347
+ }
348
+ else if (msgType === 'content_block_start') {
349
+ const idx = typeof data['index'] === 'number' ? data['index'] : blocksByIndex.size;
350
+ const block = (data['content_block'] ?? {});
351
+ const btype = block['type'] ?? 'text';
352
+ if (btype === 'text') {
353
+ blocksByIndex.set(idx, {
354
+ type: 'text',
355
+ text: block['text'] ?? '',
356
+ });
357
+ }
358
+ else if (btype === 'tool_use') {
359
+ blocksByIndex.set(idx, {
360
+ type: 'tool_use',
361
+ id: block['id'] ?? '',
362
+ name: block['name'] ?? '',
363
+ input: {},
364
+ _input_json: '',
365
+ });
366
+ }
367
+ else if (btype === 'thinking') {
368
+ blocksByIndex.set(idx, {
369
+ type: 'thinking',
370
+ thinking: block['thinking'] ?? '',
371
+ });
372
+ }
373
+ else {
374
+ blocksByIndex.set(idx, { ...block });
375
+ }
376
+ }
377
+ else if (msgType === 'content_block_delta') {
378
+ const idx = typeof data['index'] === 'number' ? data['index'] : 0;
379
+ const delta = (data['delta'] ?? {});
380
+ const deltaType = delta['type'] ?? '';
381
+ const block = blocksByIndex.get(idx);
382
+ if (block == null)
383
+ continue;
384
+ if (deltaType === 'text_delta') {
385
+ block['text'] =
386
+ (block['text'] ?? '') +
387
+ (delta['text'] ?? '');
388
+ }
389
+ else if (deltaType === 'input_json_delta') {
390
+ block['_input_json'] =
391
+ (block['_input_json'] ?? '') +
392
+ (delta['partial_json'] ?? '');
393
+ }
394
+ else if (deltaType === 'thinking_delta') {
395
+ block['thinking'] =
396
+ (block['thinking'] ?? '') +
397
+ (delta['thinking'] ?? '');
398
+ }
399
+ else if (deltaType === 'signature_delta') {
400
+ block['signature'] = delta['signature'] ?? '';
401
+ }
402
+ }
403
+ else if (msgType === 'content_block_stop') {
404
+ const idx = typeof data['index'] === 'number' ? data['index'] : 0;
405
+ const block = blocksByIndex.get(idx);
406
+ if (block && block['type'] === 'tool_use') {
407
+ const rawJson = block['_input_json'] ?? '';
408
+ delete block['_input_json'];
409
+ if (rawJson) {
410
+ try {
411
+ block['input'] = JSON.parse(rawJson);
412
+ }
413
+ catch {
414
+ block['input'] = {};
415
+ }
416
+ }
417
+ }
418
+ }
419
+ else if (msgType === 'message_delta') {
420
+ const delta = (data['delta'] ?? {});
421
+ if (delta['stop_reason'] && typeof delta['stop_reason'] === 'string') {
422
+ stopReason = delta['stop_reason'];
423
+ }
424
+ const deltaUsage = data['usage'];
425
+ if (deltaUsage && typeof deltaUsage === 'object') {
426
+ usage = { ...usage, ...deltaUsage };
427
+ }
428
+ }
429
+ // message_stop: end of stream, nothing to do
430
+ }
431
+ // Finalize content blocks in index order
432
+ const contentBlocks = [];
433
+ const sortedIndices = Array.from(blocksByIndex.keys()).sort((a, b) => a - b);
434
+ for (const idx of sortedIndices) {
435
+ const block = blocksByIndex.get(idx);
436
+ delete block['_input_json'];
437
+ contentBlocks.push(block);
438
+ }
439
+ return {
440
+ content: contentBlocks,
441
+ stop_reason: stopReason,
442
+ usage,
443
+ };
444
+ }
445
+ // ---------------------------------------------------------------------------
446
+ // Utility functions
447
+ // ---------------------------------------------------------------------------
448
+ export function _parseTimestamp(value) {
449
+ if (typeof value === 'number') {
450
+ return Math.floor(value);
451
+ }
452
+ if (typeof value === 'string') {
453
+ const text = value.trim();
454
+ if (!text)
455
+ return 0;
456
+ if (/^\d+$/.test(text))
457
+ return parseInt(text, 10);
458
+ // Try ISO date parsing
459
+ const isoCandidate = text.replace('Z', '+00:00');
460
+ const dt = new Date(isoCandidate);
461
+ if (!isNaN(dt.getTime())) {
462
+ return Math.floor(dt.getTime() / 1000);
463
+ }
464
+ return 0;
465
+ }
466
+ return 0;
467
+ }
468
+ export function _sortedModels(models) {
469
+ return [...models].sort((a, b) => {
470
+ const aTs = (typeof a['created_ts'] === 'number' ? a['created_ts'] : 0);
471
+ const bTs = (typeof b['created_ts'] === 'number' ? b['created_ts'] : 0);
472
+ if (bTs !== aTs)
473
+ return bTs - aTs;
474
+ const aId = String(a['id'] ?? '');
475
+ const bId = String(b['id'] ?? '');
476
+ return bId.localeCompare(aId);
477
+ });
478
+ }
479
+ // ---------------------------------------------------------------------------
480
+ // Model listing functions
481
+ // ---------------------------------------------------------------------------
482
+ export async function listOpenaiModels(apiKey, baseUrl = 'https://api.openai.com/v1', timeoutSec = 60) {
483
+ const parsed = await _httpJson(baseUrl.replace(/\/+$/, '') + '/models', 'GET', {
484
+ Authorization: `Bearer ${apiKey}`,
485
+ 'Content-Type': 'application/json',
486
+ }, null, timeoutSec);
487
+ const rows = [];
488
+ const data = parsed['data'];
489
+ if (Array.isArray(data)) {
490
+ for (const row of data) {
491
+ if (typeof row !== 'object' || row === null)
492
+ continue;
493
+ const rec = row;
494
+ const modelId = String(rec['id'] ?? '').trim();
495
+ if (!modelId)
496
+ continue;
497
+ const created = _parseTimestamp(rec['created'] ?? rec['created_at']);
498
+ rows.push({
499
+ provider: 'openai',
500
+ id: modelId,
501
+ created_ts: created,
502
+ raw: rec,
503
+ });
504
+ }
505
+ }
506
+ return _sortedModels(rows);
507
+ }
508
+ export async function listAnthropicModels(apiKey, baseUrl = 'https://api.anthropic.com/v1', timeoutSec = 60) {
509
+ const parsed = await _httpJson(baseUrl.replace(/\/+$/, '') + '/models', 'GET', {
510
+ 'x-api-key': apiKey,
511
+ 'anthropic-version': '2023-06-01',
512
+ 'content-type': 'application/json',
513
+ }, null, timeoutSec);
514
+ const rows = [];
515
+ const data = parsed['data'];
516
+ if (Array.isArray(data)) {
517
+ for (const row of data) {
518
+ if (typeof row !== 'object' || row === null)
519
+ continue;
520
+ const rec = row;
521
+ const modelId = String(rec['id'] ?? '').trim();
522
+ if (!modelId)
523
+ continue;
524
+ const created = _parseTimestamp(rec['created_at'] ?? rec['created'] ?? rec['released_at']);
525
+ rows.push({
526
+ provider: 'anthropic',
527
+ id: modelId,
528
+ created_ts: created,
529
+ raw: rec,
530
+ });
531
+ }
532
+ }
533
+ return _sortedModels(rows);
534
+ }
535
+ export async function listOpenrouterModels(apiKey, baseUrl = 'https://openrouter.ai/api/v1', timeoutSec = 60) {
536
+ const parsed = await _httpJson(baseUrl.replace(/\/+$/, '') + '/models', 'GET', {
537
+ Authorization: `Bearer ${apiKey}`,
538
+ 'Content-Type': 'application/json',
539
+ }, null, timeoutSec);
540
+ const rows = [];
541
+ const data = parsed['data'];
542
+ if (Array.isArray(data)) {
543
+ for (const row of data) {
544
+ if (typeof row !== 'object' || row === null)
545
+ continue;
546
+ const rec = row;
547
+ const modelId = String(rec['id'] ?? '').trim();
548
+ if (!modelId)
549
+ continue;
550
+ const topProvider = rec['top_provider'];
551
+ const topProviderCreated = typeof topProvider === 'object' && topProvider !== null
552
+ ? topProvider['created']
553
+ : null;
554
+ const created = _parseTimestamp(rec['created'] ??
555
+ rec['created_at'] ??
556
+ topProviderCreated ??
557
+ rec['updated_at']);
558
+ rows.push({
559
+ provider: 'openrouter',
560
+ id: modelId,
561
+ created_ts: created,
562
+ raw: rec,
563
+ });
564
+ }
565
+ }
566
+ return _sortedModels(rows);
567
+ }
568
+ // ---------------------------------------------------------------------------
569
+ // OpenAI-compatible model (native tool calling)
570
+ // ---------------------------------------------------------------------------
571
+ export class OpenAICompatibleModel {
572
+ model;
573
+ apiKey;
574
+ baseUrl;
575
+ temperature;
576
+ reasoningEffort;
577
+ timeoutSec;
578
+ extraHeaders;
579
+ strictTools;
580
+ toolDefs;
581
+ onContentDelta;
582
+ constructor(opts) {
583
+ this.model = opts.model;
584
+ this.apiKey = opts.apiKey;
585
+ this.baseUrl = opts.baseUrl ?? 'https://api.openai.com/v1';
586
+ this.temperature = opts.temperature ?? 0.0;
587
+ this.reasoningEffort = opts.reasoningEffort ?? null;
588
+ this.timeoutSec = opts.timeoutSec ?? 300;
589
+ this.extraHeaders = opts.extraHeaders ?? {};
590
+ this.strictTools = opts.strictTools ?? true;
591
+ this.toolDefs = opts.toolDefs ?? null;
592
+ this.onContentDelta = opts.onContentDelta ?? null;
593
+ }
594
+ _isReasoningModel() {
595
+ const lower = this.model.toLowerCase();
596
+ if (lower.startsWith('o1-') || lower === 'o1' ||
597
+ lower.startsWith('o3-') || lower === 'o3' ||
598
+ lower.startsWith('o4-') || lower === 'o4') {
599
+ return true;
600
+ }
601
+ if (lower.startsWith('gpt-5')) {
602
+ return true;
603
+ }
604
+ return false;
605
+ }
606
+ createConversation(systemPrompt, initialUserMessage) {
607
+ const messages = [
608
+ { role: 'system', content: systemPrompt },
609
+ { role: 'user', content: initialUserMessage },
610
+ ];
611
+ return new Conversation({ _provider_messages: messages, system_prompt: systemPrompt });
612
+ }
613
+ async complete(conversation) {
614
+ const isReasoning = this._isReasoningModel();
615
+ const payload = {
616
+ model: this.model,
617
+ messages: conversation._provider_messages,
618
+ tools: to_openai_tools(this.toolDefs ?? undefined, this.strictTools),
619
+ tool_choice: 'auto',
620
+ stream: true,
621
+ stream_options: { include_usage: true },
622
+ };
623
+ if (conversation.stop_sequences.length > 0) {
624
+ payload['stop'] = conversation.stop_sequences;
625
+ }
626
+ // Reasoning models (o-series) don't support temperature.
627
+ if (!isReasoning) {
628
+ payload['temperature'] = this.temperature;
629
+ }
630
+ // Chat Completions API uses flat `reasoning_effort` (not the nested format).
631
+ // xAI Grok models do not support this parameter.
632
+ const isGrok = this.model.toLowerCase().startsWith('grok');
633
+ const effort = (this.reasoningEffort ?? '').trim().toLowerCase();
634
+ if (effort && !isGrok) {
635
+ payload['reasoning_effort'] = effort;
636
+ }
637
+ const url = this.baseUrl.replace(/\/+$/, '') + '/chat/completions';
638
+ const headers = {
639
+ Authorization: `Bearer ${this.apiKey}`,
640
+ 'Content-Type': 'application/json',
641
+ ...this.extraHeaders,
642
+ };
643
+ // Build SSE event forwarder for streaming text deltas to TUI
644
+ const cb = this.onContentDelta;
645
+ const forwardDelta = cb
646
+ ? (_eventType, data) => {
647
+ const choices = data['choices'];
648
+ if (!choices || !Array.isArray(choices) || choices.length === 0)
649
+ return;
650
+ const delta = (choices[0]['delta'] ?? {});
651
+ if (!delta || typeof delta !== 'object')
652
+ return;
653
+ const content = delta['content'];
654
+ if (typeof content === 'string' && content) {
655
+ cb('text', content);
656
+ }
657
+ }
658
+ : null;
659
+ let parsed;
660
+ try {
661
+ const events = await _httpStreamSse(url, 'POST', headers, payload, 10, this.timeoutSec, 3, forwardDelta);
662
+ parsed = _accumulateOpenaiStream(events);
663
+ }
664
+ catch (exc) {
665
+ if (!(exc instanceof ModelError))
666
+ throw exc;
667
+ const text = exc.message.toLowerCase();
668
+ const unsupportedReasoning = effort &&
669
+ (text.includes('reasoning_effort') || text.includes('reasoningeffort')) &&
670
+ (text.includes('unsupported_parameter') || text.includes('unknown') || text.includes('invalid argument'));
671
+ if (!unsupportedReasoning)
672
+ throw exc;
673
+ // Retry without reasoning_effort
674
+ const retryPayload = { ...payload };
675
+ delete retryPayload['reasoning_effort'];
676
+ const events = await _httpStreamSse(url, 'POST', headers, retryPayload, 10, this.timeoutSec, 3, forwardDelta);
677
+ parsed = _accumulateOpenaiStream(events);
678
+ }
679
+ const choices = parsed['choices'];
680
+ if (!choices || !Array.isArray(choices) || choices.length === 0) {
681
+ throw new ModelError(`Model response missing content: ${JSON.stringify(parsed)}`);
682
+ }
683
+ const choice = choices[0];
684
+ const message = choice['message'];
685
+ if (!message) {
686
+ throw new ModelError(`Model response missing content: ${JSON.stringify(parsed)}`);
687
+ }
688
+ const finishReason = choice['finish_reason'] ?? '';
689
+ // Parse tool calls
690
+ const rawToolCalls = message['tool_calls'];
691
+ const toolCalls = [];
692
+ if (rawToolCalls && Array.isArray(rawToolCalls)) {
693
+ for (const tc of rawToolCalls) {
694
+ const tcRec = tc;
695
+ const func = (tcRec['function'] ?? {});
696
+ const argsStr = func['arguments'] ?? '{}';
697
+ let args;
698
+ try {
699
+ args = JSON.parse(argsStr);
700
+ }
701
+ catch {
702
+ args = {};
703
+ }
704
+ toolCalls.push({
705
+ id: tcRec['id'] ?? '',
706
+ name: func['name'] ?? '',
707
+ arguments: typeof args === 'object' && args !== null && !Array.isArray(args)
708
+ ? args
709
+ : {},
710
+ });
711
+ }
712
+ }
713
+ // Extract text content
714
+ let textContent = _extractContent(message['content'] ?? '') || null;
715
+ if (textContent !== null && !textContent.trim()) {
716
+ textContent = null;
717
+ }
718
+ // Extract token usage
719
+ const usageObj = parsed['usage'];
720
+ const inputTokens = typeof usageObj === 'object' && usageObj !== null
721
+ ? usageObj['prompt_tokens'] ?? 0
722
+ : 0;
723
+ const outputTokens = typeof usageObj === 'object' && usageObj !== null
724
+ ? usageObj['completion_tokens'] ?? 0
725
+ : 0;
726
+ return {
727
+ tool_calls: toolCalls,
728
+ text: textContent,
729
+ stop_reason: finishReason,
730
+ raw_response: message,
731
+ input_tokens: inputTokens,
732
+ output_tokens: outputTokens,
733
+ };
734
+ }
735
+ appendAssistantTurn(conversation, turn) {
736
+ // Replay the raw OpenAI message object to preserve tool_calls array
737
+ conversation._provider_messages.push(turn.raw_response);
738
+ conversation.turn_count += 1;
739
+ }
740
+ appendToolResults(conversation, results) {
741
+ for (const r of results) {
742
+ conversation._provider_messages.push({
743
+ role: 'tool',
744
+ tool_call_id: r.tool_call_id,
745
+ name: r.name,
746
+ content: r.content,
747
+ });
748
+ }
749
+ }
750
+ condenseConversation(conversation, keepRecentTurns = 4) {
751
+ const msgs = conversation._provider_messages;
752
+ // Find indices of tool-role messages.
753
+ const toolIndices = [];
754
+ for (let i = 0; i < msgs.length; i++) {
755
+ const m = msgs[i];
756
+ if (typeof m === 'object' && m !== null && m['role'] === 'tool') {
757
+ toolIndices.push(i);
758
+ }
759
+ }
760
+ if (toolIndices.length <= keepRecentTurns) {
761
+ return 0;
762
+ }
763
+ const toCondense = toolIndices.slice(0, -keepRecentTurns);
764
+ let condensed = 0;
765
+ const placeholder = '[earlier tool output condensed]';
766
+ for (const idx of toCondense) {
767
+ const msg = msgs[idx];
768
+ if (msg['content'] !== placeholder) {
769
+ msg['content'] = placeholder;
770
+ condensed += 1;
771
+ }
772
+ }
773
+ return condensed;
774
+ }
775
+ }
776
+ // ---------------------------------------------------------------------------
777
+ // Anthropic model (native tool calling)
778
+ // ---------------------------------------------------------------------------
779
+ export class AnthropicModel {
780
+ model;
781
+ apiKey;
782
+ baseUrl;
783
+ temperature;
784
+ reasoningEffort;
785
+ maxTokens;
786
+ timeoutSec;
787
+ toolDefs;
788
+ onContentDelta;
789
+ constructor(opts) {
790
+ this.model = opts.model;
791
+ this.apiKey = opts.apiKey;
792
+ this.baseUrl = opts.baseUrl ?? 'https://api.anthropic.com/v1';
793
+ this.temperature = opts.temperature ?? 0.0;
794
+ this.reasoningEffort = opts.reasoningEffort ?? null;
795
+ this.maxTokens = opts.maxTokens ?? 16384;
796
+ this.timeoutSec = opts.timeoutSec ?? 300;
797
+ this.toolDefs = opts.toolDefs ?? null;
798
+ this.onContentDelta = opts.onContentDelta ?? null;
799
+ }
800
+ createConversation(systemPrompt, initialUserMessage) {
801
+ const messages = [
802
+ { role: 'user', content: initialUserMessage },
803
+ ];
804
+ return new Conversation({ _provider_messages: messages, system_prompt: systemPrompt });
805
+ }
806
+ _isOpus46() {
807
+ const lower = this.model.toLowerCase();
808
+ return lower.includes('opus-4-6') || lower.includes('opus-4.6');
809
+ }
810
+ async complete(conversation) {
811
+ const effort = (this.reasoningEffort ?? '').trim().toLowerCase();
812
+ const useThinking = ['low', 'medium', 'high'].includes(effort);
813
+ const payload = {
814
+ model: this.model,
815
+ max_tokens: this.maxTokens,
816
+ messages: conversation._provider_messages,
817
+ tools: to_anthropic_tools(this.toolDefs ?? undefined),
818
+ stream: true,
819
+ };
820
+ if (conversation.stop_sequences.length > 0) {
821
+ payload['stop_sequences'] = conversation.stop_sequences;
822
+ }
823
+ // Thinking is incompatible with temperature — omit it entirely.
824
+ if (!useThinking) {
825
+ payload['temperature'] = this.temperature;
826
+ }
827
+ if (useThinking) {
828
+ if (this._isOpus46()) {
829
+ // Opus 4.6: adaptive thinking (manual mode deprecated).
830
+ payload['thinking'] = { type: 'adaptive' };
831
+ payload['output_config'] = { effort };
832
+ }
833
+ else {
834
+ // Older models: manual thinking with explicit budget.
835
+ const budgetMap = { low: 1024, medium: 4096, high: 8192 };
836
+ const budget = budgetMap[effort];
837
+ if (payload['max_tokens'] <= budget) {
838
+ payload['max_tokens'] = budget + 8192;
839
+ }
840
+ payload['thinking'] = { type: 'enabled', budget_tokens: budget };
841
+ }
842
+ }
843
+ if (conversation.system_prompt) {
844
+ payload['system'] = conversation.system_prompt;
845
+ }
846
+ const url = this.baseUrl.replace(/\/+$/, '') + '/messages';
847
+ const headers = {
848
+ 'x-api-key': this.apiKey,
849
+ 'anthropic-version': '2023-06-01',
850
+ 'content-type': 'application/json',
851
+ };
852
+ // Build SSE event forwarder for streaming deltas to TUI
853
+ const cb = this.onContentDelta;
854
+ const forwardDelta = cb
855
+ ? (_eventType, data) => {
856
+ const msgType = data['type'] ?? _eventType;
857
+ if (msgType !== 'content_block_delta')
858
+ return;
859
+ const delta = (data['delta'] ?? {});
860
+ const deltaType = delta['type'] ?? '';
861
+ if (deltaType === 'thinking_delta') {
862
+ const text = delta['thinking'] ?? '';
863
+ if (text)
864
+ cb('thinking', text);
865
+ }
866
+ else if (deltaType === 'text_delta') {
867
+ const text = delta['text'] ?? '';
868
+ if (text)
869
+ cb('text', text);
870
+ }
871
+ }
872
+ : null;
873
+ let parsed;
874
+ try {
875
+ const events = await _httpStreamSse(url, 'POST', headers, payload, 10, this.timeoutSec, 3, forwardDelta);
876
+ parsed = _accumulateAnthropicStream(events);
877
+ }
878
+ catch (exc) {
879
+ if (!(exc instanceof ModelError))
880
+ throw exc;
881
+ const text = exc.message.toLowerCase();
882
+ const unsupportedThinking = useThinking &&
883
+ text.includes('thinking') &&
884
+ (text.includes('unknown') || text.includes('unsupported') || text.includes('invalid'));
885
+ if (!unsupportedThinking)
886
+ throw exc;
887
+ // Retry without thinking
888
+ const retryPayload = { ...payload };
889
+ delete retryPayload['thinking'];
890
+ delete retryPayload['output_config'];
891
+ const events = await _httpStreamSse(url, 'POST', headers, retryPayload, 10, this.timeoutSec, 3, forwardDelta);
892
+ parsed = _accumulateAnthropicStream(events);
893
+ }
894
+ const stopReason = parsed['stop_reason'] ?? '';
895
+ let contentBlocks = parsed['content'];
896
+ if (!Array.isArray(contentBlocks)) {
897
+ contentBlocks = [];
898
+ }
899
+ // Parse content blocks
900
+ const toolCalls = [];
901
+ const textParts = [];
902
+ for (const block of contentBlocks) {
903
+ if (typeof block !== 'object' || block === null)
904
+ continue;
905
+ const blockRec = block;
906
+ const blockType = blockRec['type'] ?? '';
907
+ if (blockType === 'tool_use') {
908
+ const inputVal = blockRec['input'];
909
+ toolCalls.push({
910
+ id: blockRec['id'] ?? '',
911
+ name: blockRec['name'] ?? '',
912
+ arguments: typeof inputVal === 'object' && inputVal !== null && !Array.isArray(inputVal)
913
+ ? inputVal
914
+ : {},
915
+ });
916
+ }
917
+ else if (blockType === 'text') {
918
+ const t = blockRec['text'];
919
+ if (typeof t === 'string' && t.trim()) {
920
+ textParts.push(t);
921
+ }
922
+ }
923
+ }
924
+ const textContent = textParts.length > 0 ? textParts.join('\n') : null;
925
+ // Extract token usage
926
+ const usageObj = parsed['usage'];
927
+ const inputTokens = typeof usageObj === 'object' && usageObj !== null
928
+ ? usageObj['input_tokens'] ?? 0
929
+ : 0;
930
+ const outputTokens = typeof usageObj === 'object' && usageObj !== null
931
+ ? usageObj['output_tokens'] ?? 0
932
+ : 0;
933
+ return {
934
+ tool_calls: toolCalls,
935
+ text: textContent,
936
+ stop_reason: stopReason,
937
+ raw_response: contentBlocks,
938
+ input_tokens: inputTokens,
939
+ output_tokens: outputTokens,
940
+ };
941
+ }
942
+ appendAssistantTurn(conversation, turn) {
943
+ // Replay the full content block array (including thinking blocks)
944
+ conversation._provider_messages.push({
945
+ role: 'assistant',
946
+ content: turn.raw_response,
947
+ });
948
+ conversation.turn_count += 1;
949
+ }
950
+ appendToolResults(conversation, results) {
951
+ const toolResultBlocks = [];
952
+ for (const r of results) {
953
+ const block = {
954
+ type: 'tool_result',
955
+ tool_use_id: r.tool_call_id,
956
+ content: r.content,
957
+ };
958
+ if (r.is_error) {
959
+ block['is_error'] = true;
960
+ }
961
+ toolResultBlocks.push(block);
962
+ }
963
+ conversation._provider_messages.push({
964
+ role: 'user',
965
+ content: toolResultBlocks,
966
+ });
967
+ }
968
+ condenseConversation(conversation, keepRecentTurns = 4) {
969
+ const msgs = conversation._provider_messages;
970
+ const placeholder = '[earlier tool output condensed]';
971
+ // Find indices of user messages that contain tool_result blocks.
972
+ const toolMsgIndices = [];
973
+ for (let i = 0; i < msgs.length; i++) {
974
+ const m = msgs[i];
975
+ if (typeof m !== 'object' || m === null)
976
+ continue;
977
+ const rec = m;
978
+ if (rec['role'] !== 'user')
979
+ continue;
980
+ const content = rec['content'];
981
+ if (Array.isArray(content) &&
982
+ content.some((b) => typeof b === 'object' &&
983
+ b !== null &&
984
+ b['type'] === 'tool_result')) {
985
+ toolMsgIndices.push(i);
986
+ }
987
+ }
988
+ if (toolMsgIndices.length <= keepRecentTurns) {
989
+ return 0;
990
+ }
991
+ const toCondense = toolMsgIndices.slice(0, -keepRecentTurns);
992
+ let condensed = 0;
993
+ for (const idx of toCondense) {
994
+ const content = msgs[idx]['content'];
995
+ if (!Array.isArray(content))
996
+ continue;
997
+ for (const block of content) {
998
+ if (typeof block !== 'object' || block === null)
999
+ continue;
1000
+ const blockRec = block;
1001
+ if (blockRec['type'] !== 'tool_result')
1002
+ continue;
1003
+ if (blockRec['content'] !== placeholder) {
1004
+ blockRec['content'] = placeholder;
1005
+ condensed += 1;
1006
+ }
1007
+ }
1008
+ }
1009
+ return condensed;
1010
+ }
1011
+ }
1012
+ // ---------------------------------------------------------------------------
1013
+ // Test / fallback models
1014
+ // ---------------------------------------------------------------------------
1015
+ /** Model that returns pre-scripted ModelTurn responses for testing. */
1016
+ export class ScriptedModel {
1017
+ scriptedTurns;
1018
+ constructor(scriptedTurns = []) {
1019
+ this.scriptedTurns = [...scriptedTurns];
1020
+ }
1021
+ createConversation(systemPrompt, initialUserMessage) {
1022
+ return new Conversation({
1023
+ _provider_messages: [{ role: 'user', content: initialUserMessage }],
1024
+ system_prompt: systemPrompt,
1025
+ });
1026
+ }
1027
+ complete(_conversation) {
1028
+ if (this.scriptedTurns.length === 0) {
1029
+ throw new ModelError('ScriptedModel exhausted; no responses left.');
1030
+ }
1031
+ return this.scriptedTurns.shift();
1032
+ }
1033
+ appendAssistantTurn(_conversation, _turn) {
1034
+ // no-op
1035
+ }
1036
+ appendToolResults(_conversation, _results) {
1037
+ // no-op
1038
+ }
1039
+ condenseConversation(_conversation, _keepRecentTurns = 4) {
1040
+ return 0;
1041
+ }
1042
+ }
1043
+ export class EchoFallbackModel {
1044
+ note;
1045
+ constructor(note = 'No provider API keys configured. Set OpenAI/Anthropic/OpenRouter keys to use a live LLM.') {
1046
+ this.note = note;
1047
+ }
1048
+ createConversation(systemPrompt, initialUserMessage) {
1049
+ return new Conversation({
1050
+ _provider_messages: [{ role: 'user', content: initialUserMessage }],
1051
+ system_prompt: systemPrompt,
1052
+ });
1053
+ }
1054
+ complete(_conversation) {
1055
+ return {
1056
+ tool_calls: [],
1057
+ text: this.note,
1058
+ stop_reason: 'end_turn',
1059
+ raw_response: null,
1060
+ input_tokens: 0,
1061
+ output_tokens: 0,
1062
+ };
1063
+ }
1064
+ appendAssistantTurn(_conversation, _turn) {
1065
+ // no-op
1066
+ }
1067
+ appendToolResults(_conversation, _results) {
1068
+ // no-op
1069
+ }
1070
+ }
1071
+ //# sourceMappingURL=model.js.map