saico 2.2.3 → 2.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/context.js +3 -1211
- package/dynamo.js +227 -0
- package/index.js +7 -1
- package/msgs.js +1213 -0
- package/package.json +14 -1
- package/saico.js +345 -0
- package/sid.js +1 -1
package/msgs.js
ADDED
|
@@ -0,0 +1,1213 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const crypto = require('crypto');
|
|
4
|
+
const openai = require('./openai.js');
|
|
5
|
+
const util = require('./util.js');
|
|
6
|
+
|
|
7
|
+
const { _log, _lerr, _ldbg } = util;
|
|
8
|
+
const debug = 0;
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Context - Conversation context that can be attached to any Itask.
|
|
12
|
+
*
|
|
13
|
+
* Key differences from the old Messages class:
|
|
14
|
+
* - Uses task hierarchy instead of parent/child messages
|
|
15
|
+
* - task reference replaces parent reference
|
|
16
|
+
* - getMsgContext() traverses task hierarchy
|
|
17
|
+
* - _createMsgQ() aggregates from task ancestors
|
|
18
|
+
*/
|
|
19
|
+
class Context {
|
|
20
|
+
constructor(prompt, task, config = {}) {
|
|
21
|
+
this.prompt = prompt;
|
|
22
|
+
this.task = task; // Reference to owning Itask (replaces parent)
|
|
23
|
+
this.tag = config.tag || crypto.randomBytes(4).toString('hex');
|
|
24
|
+
this.token_limit = config.token_limit || 1000000000;
|
|
25
|
+
this.lower_limit = this.token_limit * 0.85;
|
|
26
|
+
this.upper_limit = this.token_limit * 0.98;
|
|
27
|
+
this.tool_handler = config.tool_handler || task?.tool_handler;
|
|
28
|
+
this.functions = config.functions || task?.functions || null;
|
|
29
|
+
|
|
30
|
+
// Recursive depth and repetition control
|
|
31
|
+
this.max_depth = config.max_depth || 5;
|
|
32
|
+
this.max_tool_repetition = config.max_tool_repetition || 20;
|
|
33
|
+
this._current_depth = 0;
|
|
34
|
+
this._deferred_tool_calls = [];
|
|
35
|
+
this._tool_call_sequence = [];
|
|
36
|
+
|
|
37
|
+
// Chat history persistence
|
|
38
|
+
this.chat_history = config.chat_history || null;
|
|
39
|
+
|
|
40
|
+
this._msgs = [];
|
|
41
|
+
this._waitingQueue = [];
|
|
42
|
+
this._active_tool_calls = new Map();
|
|
43
|
+
|
|
44
|
+
// Sequential mode support
|
|
45
|
+
this._sequential_queue = [];
|
|
46
|
+
this._processing_sequential = false;
|
|
47
|
+
this._sequential_mode = config.sequential_mode || false;
|
|
48
|
+
|
|
49
|
+
// Queue structure limits
|
|
50
|
+
this.QUEUE_LIMIT = config.queue_limit ?? 30;
|
|
51
|
+
this.TOOL_DIGEST_LIMIT = config.tool_digest_limit ?? 10;
|
|
52
|
+
this.MIN_CHAT_MESSAGES = config.min_chat_messages ?? 10;
|
|
53
|
+
|
|
54
|
+
// Tool digest — persistent history of tool calls that mutated task state
|
|
55
|
+
this.tool_digest = config.tool_digest || [];
|
|
56
|
+
|
|
57
|
+
// Initialize messages if provided
|
|
58
|
+
(config.msgs || []).forEach(m => this.push(m));
|
|
59
|
+
|
|
60
|
+
_log('created Context for tag', this.tag);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// Set the task reference (used when context is created separately)
|
|
64
|
+
setTask(task) {
|
|
65
|
+
this.task = task;
|
|
66
|
+
if (!this.tool_handler)
|
|
67
|
+
this.tool_handler = task?.tool_handler;
|
|
68
|
+
if (!this.functions)
|
|
69
|
+
this.functions = task?.functions;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
// Overridable: extending classes provide current state summary
|
|
73
|
+
getStateSummary() { return ''; }
|
|
74
|
+
|
|
75
|
+
// Recursively collect state summaries from child tasks that have no context
|
|
76
|
+
// (no msg Q), stopping at children that do have one.
|
|
77
|
+
_collectChildStateSummaries(task) {
|
|
78
|
+
if (!task.child || !task.child.size) return '';
|
|
79
|
+
const parts = [];
|
|
80
|
+
for (const child of task.child) {
|
|
81
|
+
if (child.context) continue; // has its own Q — boundary, stop here
|
|
82
|
+
if (typeof child.getStateSummary === 'function') {
|
|
83
|
+
const s = child.getStateSummary();
|
|
84
|
+
if (s) parts.push(s);
|
|
85
|
+
}
|
|
86
|
+
const nested = this._collectChildStateSummaries(child);
|
|
87
|
+
if (nested) parts.push(nested);
|
|
88
|
+
}
|
|
89
|
+
return parts.join('\n');
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// Internal (not overridable): own getStateSummary() + summaries from all
|
|
93
|
+
// contextless descendants, stopping at the first child that has its own Q.
|
|
94
|
+
_getStateSummary() {
|
|
95
|
+
const parts = [];
|
|
96
|
+
const own = this.getStateSummary();
|
|
97
|
+
if (own) parts.push(own);
|
|
98
|
+
if (this.task) {
|
|
99
|
+
const childSummaries = this._collectChildStateSummaries(this.task);
|
|
100
|
+
if (childSummaries) parts.push(childSummaries);
|
|
101
|
+
}
|
|
102
|
+
return parts.join('\n');
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
// Snapshot all public (non-underscore) task properties for dirty detection.
|
|
106
|
+
// Mirrors the observable proxy convention: _ prefix = internal, ignored.
|
|
107
|
+
// Does NOT call serialize() — that is for persistence, not dirty detection.
|
|
108
|
+
_snapshotPublicProps(obj, seen = new Set()) {
|
|
109
|
+
if (typeof obj !== 'object' || obj === null) return obj;
|
|
110
|
+
if (seen.has(obj)) return undefined; // circular ref — skip
|
|
111
|
+
seen.add(obj);
|
|
112
|
+
const out = Array.isArray(obj) ? [] : {};
|
|
113
|
+
for (const key of Object.keys(obj)) {
|
|
114
|
+
if (!key.startsWith('_') && typeof obj[key] !== 'function')
|
|
115
|
+
out[key] = this._snapshotPublicProps(obj[key], seen);
|
|
116
|
+
}
|
|
117
|
+
seen.delete(obj);
|
|
118
|
+
return out;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// Append a tool result to the persistent tool digest
|
|
122
|
+
_appendToolDigest(toolName, resultContent) {
|
|
123
|
+
const truncated = typeof resultContent === 'string'
|
|
124
|
+
? resultContent.slice(0, 500)
|
|
125
|
+
: String(resultContent ?? '').slice(0, 500);
|
|
126
|
+
this.tool_digest.push({ tool: toolName, result: truncated, tm: Date.now() });
|
|
127
|
+
if (this.tool_digest.length > this.TOOL_DIGEST_LIMIT)
|
|
128
|
+
this.tool_digest = this.tool_digest.slice(-this.TOOL_DIGEST_LIMIT);
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// Get the parent context by traversing task hierarchy
|
|
132
|
+
getParentContext() {
|
|
133
|
+
if (!this.task || !this.task.parent)
|
|
134
|
+
return null;
|
|
135
|
+
return this.task.parent.findContext ? this.task.parent.findContext() : null;
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
// Get all ancestor contexts via task hierarchy
|
|
139
|
+
getAncestorContexts() {
|
|
140
|
+
if (!this.task)
|
|
141
|
+
return [];
|
|
142
|
+
return this.task.getAncestorContexts().filter(ctx => ctx !== this);
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
_hasPendingToolCalls() {
|
|
146
|
+
const toolCallMsgs = this._msgs.filter(m => m.msg.tool_calls);
|
|
147
|
+
|
|
148
|
+
for (const toolCallMsg of toolCallMsgs) {
|
|
149
|
+
const toolCalls = toolCallMsg.msg.tool_calls;
|
|
150
|
+
const toolCallIds = toolCalls.map(tc => tc.id);
|
|
151
|
+
|
|
152
|
+
const toolReplies = this._msgs.filter(m =>
|
|
153
|
+
m.msg.role === 'tool' &&
|
|
154
|
+
toolCallIds.includes(m.msg.tool_call_id)
|
|
155
|
+
);
|
|
156
|
+
|
|
157
|
+
const repliedCallIds = new Set(toolReplies.map(r => r.msg.tool_call_id));
|
|
158
|
+
const deferredCallIds = new Set(this._deferred_tool_calls.map(d => d.call.id));
|
|
159
|
+
const unRepliedCalls = toolCalls.filter(tc =>
|
|
160
|
+
!repliedCallIds.has(tc.id) && !deferredCallIds.has(tc.id)
|
|
161
|
+
);
|
|
162
|
+
|
|
163
|
+
if (unRepliedCalls.length > 0)
|
|
164
|
+
return true;
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
return false;
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
_processWaitingQueue() {
|
|
171
|
+
_log('Processing waiting queue,', this._waitingQueue.length, 'messages waiting');
|
|
172
|
+
|
|
173
|
+
this._waitingQueue.forEach(waitingMessage => {
|
|
174
|
+
_log('Adding queued message to queue:', waitingMessage.role,
|
|
175
|
+
waitingMessage.content?.slice(0, 50));
|
|
176
|
+
this._createMsgObj(
|
|
177
|
+
waitingMessage.role,
|
|
178
|
+
waitingMessage.content,
|
|
179
|
+
waitingMessage.functions,
|
|
180
|
+
waitingMessage.opts
|
|
181
|
+
);
|
|
182
|
+
});
|
|
183
|
+
|
|
184
|
+
this._waitingQueue = [];
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
async _processSequentialQueue() {
|
|
188
|
+
if (this._processing_sequential || this._sequential_queue.length === 0)
|
|
189
|
+
return;
|
|
190
|
+
|
|
191
|
+
_ldbg('[' + this.tag + '] Starting sequential queue processing');
|
|
192
|
+
this._processing_sequential = true;
|
|
193
|
+
|
|
194
|
+
try {
|
|
195
|
+
while (this._sequential_queue.length > 0) {
|
|
196
|
+
const queuedMsg = this._sequential_queue.shift();
|
|
197
|
+
_ldbg('Processing sequential message:', queuedMsg.role,
|
|
198
|
+
queuedMsg.content?.slice(0, 50));
|
|
199
|
+
|
|
200
|
+
try {
|
|
201
|
+
const result = await this._sendMessageInternal(
|
|
202
|
+
queuedMsg.role,
|
|
203
|
+
queuedMsg.content,
|
|
204
|
+
queuedMsg.functions,
|
|
205
|
+
queuedMsg.opts
|
|
206
|
+
);
|
|
207
|
+
|
|
208
|
+
if (queuedMsg.resolve)
|
|
209
|
+
queuedMsg.resolve(result);
|
|
210
|
+
} catch (err) {
|
|
211
|
+
if (queuedMsg.reject)
|
|
212
|
+
queuedMsg.reject(err);
|
|
213
|
+
else
|
|
214
|
+
_lerr('Error processing queued message:', err);
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
_ldbg('[' + this.tag + '] Sequential queue processing completed');
|
|
218
|
+
} catch (err) {
|
|
219
|
+
_lerr('Error processing sequential queue:', err);
|
|
220
|
+
} finally {
|
|
221
|
+
_ldbg('[' + this.tag + '] Setting _processing_sequential = false');
|
|
222
|
+
this._processing_sequential = false;
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
_trackToolCall(toolName) {
|
|
227
|
+
this._tool_call_sequence.push(toolName);
|
|
228
|
+
|
|
229
|
+
if (this._tool_call_sequence.length > this.max_tool_repetition * 2) {
|
|
230
|
+
this._tool_call_sequence = this._tool_call_sequence.slice(-this.max_tool_repetition);
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
_shouldDropToolCall(toolName) {
|
|
235
|
+
if (this._tool_call_sequence.length < this.max_tool_repetition)
|
|
236
|
+
return false;
|
|
237
|
+
|
|
238
|
+
let consecutiveCount = 0;
|
|
239
|
+
for (let i = this._tool_call_sequence.length - 1; i >= 0; i--) {
|
|
240
|
+
if (this._tool_call_sequence[i] === toolName) {
|
|
241
|
+
consecutiveCount++;
|
|
242
|
+
} else {
|
|
243
|
+
break;
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
return consecutiveCount >= this.max_tool_repetition;
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
_resetToolSequenceIfDifferent(newToolNames) {
|
|
251
|
+
if (!newToolNames || newToolNames.length === 0) {
|
|
252
|
+
this._tool_call_sequence = [];
|
|
253
|
+
return;
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
const lastTool = this._tool_call_sequence[this._tool_call_sequence.length - 1];
|
|
257
|
+
if (!lastTool || !newToolNames.includes(lastTool)) {
|
|
258
|
+
this._tool_call_sequence = [];
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
_filterExcessiveToolCalls(toolCalls) {
|
|
263
|
+
if (!toolCalls || toolCalls.length === 0) return toolCalls;
|
|
264
|
+
|
|
265
|
+
return toolCalls.filter(call => {
|
|
266
|
+
const toolName = call.function.name;
|
|
267
|
+
if (this._shouldDropToolCall(toolName)) {
|
|
268
|
+
_log('Dropping excessive tool call:', toolName,
|
|
269
|
+
'(hit max_tool_repetition=' + this.max_tool_repetition + ')');
|
|
270
|
+
return false;
|
|
271
|
+
}
|
|
272
|
+
return true;
|
|
273
|
+
});
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
async _processDeferredToolCalls() {
|
|
277
|
+
if (this._deferred_tool_calls.length === 0) return;
|
|
278
|
+
|
|
279
|
+
_log('Processing deferred tool calls:', this._deferred_tool_calls.length);
|
|
280
|
+
|
|
281
|
+
const deferredCalls = [...this._deferred_tool_calls];
|
|
282
|
+
this._deferred_tool_calls = [];
|
|
283
|
+
|
|
284
|
+
const callsByMessage = new Map();
|
|
285
|
+
for (const deferred of deferredCalls) {
|
|
286
|
+
const key = deferred.originalMessage.msgid;
|
|
287
|
+
if (!callsByMessage.has(key)) {
|
|
288
|
+
callsByMessage.set(key, []);
|
|
289
|
+
}
|
|
290
|
+
callsByMessage.get(key).push(deferred);
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
for (const [msgid, deferredGroup] of callsByMessage) {
|
|
294
|
+
_log('Processing deferred group for message', msgid + ':', deferredGroup.length, 'calls');
|
|
295
|
+
|
|
296
|
+
const toolCalls = deferredGroup.map(d => d.call);
|
|
297
|
+
const toolNames = toolCalls.map(call => call.function.name);
|
|
298
|
+
this._resetToolSequenceIfDifferent(toolNames);
|
|
299
|
+
|
|
300
|
+
const filteredToolCalls = this._filterExcessiveToolCalls(toolCalls);
|
|
301
|
+
|
|
302
|
+
let reply2 = {};
|
|
303
|
+
for (const [i, call] of filteredToolCalls.entries()) {
|
|
304
|
+
const toolName = call.function.name;
|
|
305
|
+
this._trackToolCall(toolName);
|
|
306
|
+
|
|
307
|
+
let result;
|
|
308
|
+
|
|
309
|
+
if (this._isDuplicateToolCall(call)) {
|
|
310
|
+
_log('Duplicate deferred tool call detected:', call.function.name);
|
|
311
|
+
result = {
|
|
312
|
+
content: `Duplicate call detected. An identical "${call.function.name}" ` +
|
|
313
|
+
`tool call with the same arguments is already running.`,
|
|
314
|
+
functions: null
|
|
315
|
+
};
|
|
316
|
+
} else {
|
|
317
|
+
this._trackActiveToolCall(call);
|
|
318
|
+
const _snap = this.task
|
|
319
|
+
? JSON.stringify(this._snapshotPublicProps(this.task)) : null;
|
|
320
|
+
|
|
321
|
+
try {
|
|
322
|
+
const correspondingDeferred = deferredGroup.find(d => d.call.id === call.id);
|
|
323
|
+
const handler = correspondingDeferred?.originalMessage.opts.handler || this.tool_handler;
|
|
324
|
+
const timeout = correspondingDeferred?.originalMessage.opts.timeout;
|
|
325
|
+
|
|
326
|
+
result = await this._executeToolCallWithTimeout(call, handler, timeout);
|
|
327
|
+
if (_snap !== null &&
|
|
328
|
+
_snap !== JSON.stringify(this._snapshotPublicProps(this.task)))
|
|
329
|
+
this._appendToolDigest(call.function.name, result?.content || '');
|
|
330
|
+
} finally {
|
|
331
|
+
this._completeActiveToolCall(call);
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
const correspondingDeferred = deferredGroup.find(d => d.call.id === call.id);
|
|
336
|
+
const opts = {
|
|
337
|
+
name: call.function.name,
|
|
338
|
+
tool_call_id: call.id,
|
|
339
|
+
_recursive_depth: 1,
|
|
340
|
+
model: correspondingDeferred?.originalMessage.opts.model
|
|
341
|
+
};
|
|
342
|
+
const content = result ? (result.content || result) : '';
|
|
343
|
+
const functions = (i === filteredToolCalls.length - 1 && result && result.functions)
|
|
344
|
+
? result.functions : null;
|
|
345
|
+
|
|
346
|
+
if (i === filteredToolCalls.length - 1) {
|
|
347
|
+
reply2 = await this.sendMessage('tool', content, functions, opts);
|
|
348
|
+
} else {
|
|
349
|
+
const toolResponse = this._createMsgObj('tool', content, null, opts);
|
|
350
|
+
toolResponse.replied = 1;
|
|
351
|
+
this._insertToolResponseAtCorrectPosition(toolResponse, call.id);
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
get messages() {
|
|
358
|
+
return this.__msgs;
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
set messages(value) {
|
|
362
|
+
if (Array.isArray(value)) {
|
|
363
|
+
this._msgs = value.map(m => m.msg ? m : {msg: m, opts: {}, replied: 1});
|
|
364
|
+
} else {
|
|
365
|
+
throw new Error("messages must be assigned an array");
|
|
366
|
+
}
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
push(msg) {
|
|
370
|
+
const m = {msg: msg.msg || msg, opts: msg.opts || {}, msgid: msg.msgid || 0, replied: msg.replied || 2};
|
|
371
|
+
return this._msgs.push(m);
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
pushSummary(summary) {
|
|
375
|
+
const idx = this.push({role: 'user', content: '[SUMMARY]: ' + summary});
|
|
376
|
+
this._msgs[idx - 1].opts.summary = true;
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
toJSON() {
|
|
380
|
+
return this.__msgs;
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
filter(callback) {
|
|
384
|
+
return this.__msgs.filter(callback);
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
concat(arr) {
|
|
388
|
+
return this.__msgs.concat(arr);
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
slice(start, end) {
|
|
392
|
+
return this.__msgs.slice(start, end);
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
reverse() {
|
|
396
|
+
this._msgs.reverse();
|
|
397
|
+
return this;
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
[Symbol.iterator]() {
|
|
401
|
+
return (function* () {
|
|
402
|
+
for (const item of this._msgs) {
|
|
403
|
+
yield item.msg;
|
|
404
|
+
}
|
|
405
|
+
}).call(this);
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
get __msgs() { return this._msgs.map(m => m.msg); }
|
|
409
|
+
|
|
410
|
+
get length() {
|
|
411
|
+
return this._msgs.length;
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
serialize() { return JSON.stringify(this._msgs); }
|
|
415
|
+
|
|
416
|
+
getSummaries() { return this._msgs.filter(m => m.opts.summary); }
|
|
417
|
+
|
|
418
|
+
// Get functions aggregated from this context and all ancestor contexts
|
|
419
|
+
getFunctions() {
|
|
420
|
+
const allFunctions = [];
|
|
421
|
+
|
|
422
|
+
// Get functions from ancestor contexts via task hierarchy
|
|
423
|
+
const ancestorContexts = this.getAncestorContexts();
|
|
424
|
+
for (const ctx of ancestorContexts) {
|
|
425
|
+
if (ctx.functions && Array.isArray(ctx.functions))
|
|
426
|
+
allFunctions.push(...ctx.functions);
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
// Add our own functions
|
|
430
|
+
if (this.functions && Array.isArray(this.functions))
|
|
431
|
+
allFunctions.push(...this.functions);
|
|
432
|
+
|
|
433
|
+
return allFunctions.length > 0 ? allFunctions : null;
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
async summarizeMessages() {
|
|
437
|
+
const tokens = util.countTokens(this.__msgs);
|
|
438
|
+
if (tokens < this.lower_limit)
|
|
439
|
+
return;
|
|
440
|
+
await this._summarizeContext();
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
async close() {
|
|
444
|
+
_log('Closing Context tag', this.tag);
|
|
445
|
+
|
|
446
|
+
if (this._sequential_mode && this._processing_sequential) {
|
|
447
|
+
_ldbg('Sequential mode: waiting for current message to complete before closing tag', this.tag);
|
|
448
|
+
let waitCount = 0;
|
|
449
|
+
while (this._processing_sequential) {
|
|
450
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
451
|
+
waitCount++;
|
|
452
|
+
if (waitCount % 10 === 0)
|
|
453
|
+
_ldbg('Sequential mode: still waiting for tag', this.tag, 'after', waitCount, 'iterations');
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
// Move waiting messages to parent context via task hierarchy
|
|
458
|
+
const parentCtx = this.getParentContext();
|
|
459
|
+
if (parentCtx && this._waitingQueue.length > 0) {
|
|
460
|
+
_log('Moving', this._waitingQueue.length, 'waiting messages to parent context');
|
|
461
|
+
parentCtx._waitingQueue.push(...this._waitingQueue);
|
|
462
|
+
this._waitingQueue = [];
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
if (parentCtx && this._sequential_queue.length > 0) {
|
|
466
|
+
_log('Moving', this._sequential_queue.length, 'sequential queue messages to parent context');
|
|
467
|
+
parentCtx._sequential_queue.push(...this._sequential_queue);
|
|
468
|
+
this._sequential_queue = [];
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
await this._summarizeContext(true, parentCtx);
|
|
472
|
+
_log('Finished closing Context tag', this.tag);
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
// Load chat history from store into message queue
|
|
476
|
+
async loadHistory(store) {
|
|
477
|
+
if (!store || !this.tag)
|
|
478
|
+
return;
|
|
479
|
+
const data = await store.load(this.tag);
|
|
480
|
+
if (!data)
|
|
481
|
+
return;
|
|
482
|
+
if (Array.isArray(data.tool_digest))
|
|
483
|
+
this.tool_digest = data.tool_digest;
|
|
484
|
+
if (!data.chat_history)
|
|
485
|
+
return;
|
|
486
|
+
const messages = await util.decompressMessages(data.chat_history);
|
|
487
|
+
if (!Array.isArray(messages) || messages.length === 0)
|
|
488
|
+
return;
|
|
489
|
+
// Find the index after the last system message to insert history
|
|
490
|
+
let insertIdx = 0;
|
|
491
|
+
for (let i = 0; i < this._msgs.length; i++) {
|
|
492
|
+
if (this._msgs[i].msg.role === 'system')
|
|
493
|
+
insertIdx = i + 1;
|
|
494
|
+
}
|
|
495
|
+
const historyMsgs = messages.map(m => ({
|
|
496
|
+
msg: m,
|
|
497
|
+
opts: {},
|
|
498
|
+
msgid: crypto.randomBytes(2).toString('hex'),
|
|
499
|
+
replied: 1
|
|
500
|
+
}));
|
|
501
|
+
this._msgs.splice(insertIdx, 0, ...historyMsgs);
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
// Remove tool-related messages tagged with a specific tag
|
|
505
|
+
cleanToolCallsByTag(tag) {
|
|
506
|
+
this._msgs = this._msgs.filter(m => {
|
|
507
|
+
if (m.opts.tag !== tag)
|
|
508
|
+
return true;
|
|
509
|
+
if (m.msg.tool_calls)
|
|
510
|
+
return false;
|
|
511
|
+
if (m.msg.role === 'tool')
|
|
512
|
+
return false;
|
|
513
|
+
return true;
|
|
514
|
+
});
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
async _summarizeContext(close, targetCtx) {
|
|
518
|
+
const keep = this._msgs.filter(m => !close && m.summary);
|
|
519
|
+
const summarize = this._msgs.filter(m => (!close || !m.summary) && m.replied);
|
|
520
|
+
const not_replied = this._msgs.filter(m => !m.replied);
|
|
521
|
+
_ldbg('Start summarize messages. # messages', summarize.length, '(total msgs:', this._msgs.length + ')');
|
|
522
|
+
|
|
523
|
+
if (!summarize.length) {
|
|
524
|
+
_ldbg('[' + this.tag + '] No messages to summarize');
|
|
525
|
+
return;
|
|
526
|
+
}
|
|
527
|
+
|
|
528
|
+
const msgs = (close ? [{role: 'system', content: this.prompt}] : []).concat(summarize.map(m => m.msg));
|
|
529
|
+
const summary = await this._summarizeMessages(msgs);
|
|
530
|
+
this._msgs = keep;
|
|
531
|
+
|
|
532
|
+
if (summary) {
|
|
533
|
+
if (close && targetCtx)
|
|
534
|
+
targetCtx.pushSummary(summary);
|
|
535
|
+
else
|
|
536
|
+
this.pushSummary(summary);
|
|
537
|
+
}
|
|
538
|
+
|
|
539
|
+
this._msgs.push(...not_replied);
|
|
540
|
+
_log('Summarized', this.tag, '(close', close + ') conversation to', util.countTokens(this.__msgs),
|
|
541
|
+
'tokens # messages', this._msgs.length);
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
async _summarizeMessages(msgs) {
|
|
545
|
+
let chunks = [msgs];
|
|
546
|
+
const tokens = util.countTokens(chunks[0]);
|
|
547
|
+
|
|
548
|
+
if (tokens > this.upper_limit) {
|
|
549
|
+
chunks = [];
|
|
550
|
+
let chunk_msgs = [];
|
|
551
|
+
let chunk = '';
|
|
552
|
+
|
|
553
|
+
msgs.forEach(m => {
|
|
554
|
+
if (typeof m !== 'object' || Array.isArray(m))
|
|
555
|
+
return _lerr('discarding msg with corrupt structure', m);
|
|
556
|
+
const keys = Object.keys(m);
|
|
557
|
+
for (const k of keys) {
|
|
558
|
+
if (!['role', 'content', 'refusal', 'name', 'tool', 'tool_calls'].includes(k))
|
|
559
|
+
return _lerr('discarding msg with corrupt key', k);
|
|
560
|
+
}
|
|
561
|
+
if (util.countTokens(m) > this.upper_limit)
|
|
562
|
+
return _lerr('discard abnormal size message', tokens, 'tokens\n' + m.content?.slice(0, 1500));
|
|
563
|
+
if (m.function)
|
|
564
|
+
m.content = '<function data>';
|
|
565
|
+
const str = `${m.role.toUpperCase()}: ${m.content || JSON.stringify(m.tool_calls)}`;
|
|
566
|
+
if (util.countTokens(chunk + str + '\n') < this.token_limit / 2) {
|
|
567
|
+
chunk += str + '\n';
|
|
568
|
+
chunk_msgs.push(m);
|
|
569
|
+
} else {
|
|
570
|
+
chunks.push(chunk_msgs);
|
|
571
|
+
chunk = '';
|
|
572
|
+
chunk_msgs = [];
|
|
573
|
+
}
|
|
574
|
+
});
|
|
575
|
+
if (chunk_msgs.length)
|
|
576
|
+
chunks.push(chunk_msgs);
|
|
577
|
+
}
|
|
578
|
+
|
|
579
|
+
if (!chunks.length)
|
|
580
|
+
return _log('No msgs for summary found');
|
|
581
|
+
|
|
582
|
+
_log('Summarizing messages. tokens', tokens, 'messages', msgs.length, 'using', chunks.length, 'chunks');
|
|
583
|
+
|
|
584
|
+
let reply = await openai.send([{role: 'system', content:
|
|
585
|
+
'Please summarize the following conversation. The summary should be one or two paragraphs as follows:' +
|
|
586
|
+
'- First paragraph: the purpose of the conversation and the outcome' +
|
|
587
|
+
'- Second paragraph (optional): next steps or pending requests that should be considered' +
|
|
588
|
+
'- Do not include system errors in the summary.\n' +
|
|
589
|
+
'- Formulate the summary from the AI agent\'s perspective\n' +
|
|
590
|
+
'\nConversation:\n' +
|
|
591
|
+
(chunks.length > 1 ? 'The conversation will be uploaded in ' + chunks.length +
|
|
592
|
+
' chunks. Wait for the last one then summarize all.\nChunk 1:\n'
|
|
593
|
+
: 'The conversation to summarize:\n') + JSON.stringify(chunks[0])}]);
|
|
594
|
+
|
|
595
|
+
let summary = reply.content;
|
|
596
|
+
for (let i = 1; i < chunks.length; i++) {
|
|
597
|
+
reply = await openai.send([{role: 'system', content:
|
|
598
|
+
'Chunk ' + (i === chunks.length ? 'last' : i) + ':\n' + JSON.stringify(chunks[i])}]);
|
|
599
|
+
summary = 'Summary of ' + this.tag + ' conversation:\n' + reply.content;
|
|
600
|
+
}
|
|
601
|
+
return summary;
|
|
602
|
+
}
|
|
603
|
+
|
|
604
|
+
// Get message context - walks up task hierarchy to collect prompts and summaries
|
|
605
|
+
getMsgContext(add_tag) {
|
|
606
|
+
const msgs = [];
|
|
607
|
+
|
|
608
|
+
// Get context from ancestor tasks via task hierarchy
|
|
609
|
+
const ancestorContexts = this.getAncestorContexts();
|
|
610
|
+
for (const ctx of ancestorContexts) {
|
|
611
|
+
if (ctx.prompt)
|
|
612
|
+
msgs.push({role: 'system', content: ctx.prompt});
|
|
613
|
+
// Add summaries from ancestor contexts
|
|
614
|
+
const summaries = ctx._msgs.filter(m => m.opts.summary || m.msg.role === 'system').map(m => {
|
|
615
|
+
if (add_tag)
|
|
616
|
+
m.msg.tag = ctx.tag;
|
|
617
|
+
return m.msg;
|
|
618
|
+
});
|
|
619
|
+
msgs.push(...summaries);
|
|
620
|
+
}
|
|
621
|
+
|
|
622
|
+
// Add this context's prompt
|
|
623
|
+
if (this.prompt)
|
|
624
|
+
msgs.push({role: 'system', content: this.prompt});
|
|
625
|
+
|
|
626
|
+
// Add this context's summaries
|
|
627
|
+
const mySummaries = this._msgs.filter(m => m.opts.summary || m.msg.role === 'system').map(m => {
|
|
628
|
+
if (add_tag)
|
|
629
|
+
m.msg.tag = this.tag;
|
|
630
|
+
return m.msg;
|
|
631
|
+
});
|
|
632
|
+
|
|
633
|
+
return msgs.concat(mySummaries);
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
_createMsgObj(role, content, functions, opts) {
|
|
637
|
+
const name = opts?.name;
|
|
638
|
+
const tool_call_id = opts?.tool_call_id;
|
|
639
|
+
const msg = { role, content, ...(name && { name }), ...(tool_call_id && { tool_call_id }) };
|
|
640
|
+
const msgid = crypto.randomBytes(2).toString('hex');
|
|
641
|
+
const o = {msg, opts: opts || {}, functions, msgid, replied: 0};
|
|
642
|
+
this._msgs.forEach(m => m.opts.noreply ||= !m.replied);
|
|
643
|
+
this._msgs.push(o);
|
|
644
|
+
return o;
|
|
645
|
+
}
|
|
646
|
+
|
|
647
|
+
_insertToolResponseAtCorrectPosition(toolResponseObj, tool_call_id) {
|
|
648
|
+
let insertIndex = -1;
|
|
649
|
+
let originalInsertIndex = -1;
|
|
650
|
+
|
|
651
|
+
for (let i = this._msgs.length - 1; i >= 0; i--) {
|
|
652
|
+
const msg = this._msgs[i];
|
|
653
|
+
if (msg.msg.tool_calls) {
|
|
654
|
+
const hasMatchingCall = msg.msg.tool_calls.some(call => call.id === tool_call_id);
|
|
655
|
+
if (hasMatchingCall) {
|
|
656
|
+
if (insertIndex === -1)
|
|
657
|
+
insertIndex = i + 1;
|
|
658
|
+
|
|
659
|
+
if (msg.msg.content !== 'Processing deferred tool calls') {
|
|
660
|
+
originalInsertIndex = i + 1;
|
|
661
|
+
break;
|
|
662
|
+
}
|
|
663
|
+
}
|
|
664
|
+
}
|
|
665
|
+
}
|
|
666
|
+
|
|
667
|
+
const finalInsertIndex = originalInsertIndex !== -1 ? originalInsertIndex : insertIndex;
|
|
668
|
+
|
|
669
|
+
if (finalInsertIndex !== -1 && finalInsertIndex < this._msgs.length) {
|
|
670
|
+
const lastIndex = this._msgs.length - 1;
|
|
671
|
+
if (this._msgs[lastIndex] === toolResponseObj) {
|
|
672
|
+
this._msgs.pop();
|
|
673
|
+
this._msgs.splice(finalInsertIndex, 0, toolResponseObj);
|
|
674
|
+
}
|
|
675
|
+
}
|
|
676
|
+
}
|
|
677
|
+
|
|
678
|
+
_getToolCallKey(call) {
|
|
679
|
+
return `${call.function.name}:${call.function.arguments}`;
|
|
680
|
+
}
|
|
681
|
+
|
|
682
|
+
_isDuplicateToolCall(call) {
|
|
683
|
+
const key = this._getToolCallKey(call);
|
|
684
|
+
return this._active_tool_calls.has(key);
|
|
685
|
+
}
|
|
686
|
+
|
|
687
|
+
_trackActiveToolCall(call) {
|
|
688
|
+
const key = this._getToolCallKey(call);
|
|
689
|
+
this._active_tool_calls.set(key, {
|
|
690
|
+
call_id: call.id,
|
|
691
|
+
started_at: Date.now(),
|
|
692
|
+
function_name: call.function.name
|
|
693
|
+
});
|
|
694
|
+
_log('Tracking active tool call:', key);
|
|
695
|
+
}
|
|
696
|
+
|
|
697
|
+
_completeActiveToolCall(call) {
|
|
698
|
+
const key = this._getToolCallKey(call);
|
|
699
|
+
if (this._active_tool_calls.has(key)) {
|
|
700
|
+
this._active_tool_calls.delete(key);
|
|
701
|
+
_log('Completed active tool call:', key);
|
|
702
|
+
}
|
|
703
|
+
}
|
|
704
|
+
|
|
705
|
+
async _executeToolCallWithTimeout(call, handler, customTimeoutMs = null) {
|
|
706
|
+
const timeoutMs = customTimeoutMs || 5000;
|
|
707
|
+
|
|
708
|
+
return new Promise(async (resolve) => {
|
|
709
|
+
let timeoutId;
|
|
710
|
+
let completed = false;
|
|
711
|
+
|
|
712
|
+
timeoutId = setTimeout(() => {
|
|
713
|
+
if (!completed) {
|
|
714
|
+
completed = true;
|
|
715
|
+
_log('Tool call timed out after', timeoutMs + 'ms:', call.function.name);
|
|
716
|
+
resolve({
|
|
717
|
+
content: `Tool call "${call.function.name}" timed out after ${timeoutMs/1000} seconds.`,
|
|
718
|
+
functions: null
|
|
719
|
+
});
|
|
720
|
+
}
|
|
721
|
+
}, timeoutMs);
|
|
722
|
+
|
|
723
|
+
try {
|
|
724
|
+
const result = await this.interpretAndApplyChanges(call, handler);
|
|
725
|
+
|
|
726
|
+
if (!completed) {
|
|
727
|
+
completed = true;
|
|
728
|
+
clearTimeout(timeoutId);
|
|
729
|
+
resolve(result);
|
|
730
|
+
}
|
|
731
|
+
} catch (error) {
|
|
732
|
+
if (!completed) {
|
|
733
|
+
completed = true;
|
|
734
|
+
clearTimeout(timeoutId);
|
|
735
|
+
_lerr('Tool call failed with error:', call.function.name, error.message);
|
|
736
|
+
resolve({
|
|
737
|
+
content: `Tool call "${call.function.name}" failed with error: ${error.message}`,
|
|
738
|
+
functions: null
|
|
739
|
+
});
|
|
740
|
+
}
|
|
741
|
+
}
|
|
742
|
+
});
|
|
743
|
+
}
|
|
744
|
+
|
|
745
|
+
_validateToolResponses(msgs) {
|
|
746
|
+
const toolCallIds = new Set();
|
|
747
|
+
const toolResponseIds = new Set();
|
|
748
|
+
|
|
749
|
+
for (const msg of msgs) {
|
|
750
|
+
if (msg.tool_calls) {
|
|
751
|
+
for (const toolCall of msg.tool_calls)
|
|
752
|
+
toolCallIds.add(toolCall.id);
|
|
753
|
+
}
|
|
754
|
+
if (msg.role === 'tool' && msg.tool_call_id)
|
|
755
|
+
toolResponseIds.add(msg.tool_call_id);
|
|
756
|
+
}
|
|
757
|
+
|
|
758
|
+
const validatedMsgs = [];
|
|
759
|
+
const orphanedCalls = [];
|
|
760
|
+
|
|
761
|
+
for (const msg of msgs) {
|
|
762
|
+
if (msg.role === 'tool' && msg.tool_call_id) {
|
|
763
|
+
if (toolCallIds.has(msg.tool_call_id))
|
|
764
|
+
validatedMsgs.push(msg);
|
|
765
|
+
else
|
|
766
|
+
_log('Removing orphaned tool response with tool_call_id:', msg.tool_call_id);
|
|
767
|
+
} else if (msg.role === 'assistant' && msg.tool_calls) {
|
|
768
|
+
const validToolCalls = [];
|
|
769
|
+
for (const toolCall of msg.tool_calls) {
|
|
770
|
+
if (toolResponseIds.has(toolCall.id))
|
|
771
|
+
validToolCalls.push(toolCall);
|
|
772
|
+
else
|
|
773
|
+
orphanedCalls.push({
|
|
774
|
+
tool_call_id: toolCall.id,
|
|
775
|
+
function_name: toolCall.function?.name
|
|
776
|
+
});
|
|
777
|
+
}
|
|
778
|
+
|
|
779
|
+
if (validToolCalls.length > 0)
|
|
780
|
+
validatedMsgs.push({...msg, tool_calls: validToolCalls});
|
|
781
|
+
else if (msg.content && msg.content.trim() !== '') {
|
|
782
|
+
const cleanedMsg = {...msg};
|
|
783
|
+
delete cleanedMsg.tool_calls;
|
|
784
|
+
validatedMsgs.push(cleanedMsg);
|
|
785
|
+
}
|
|
786
|
+
} else {
|
|
787
|
+
validatedMsgs.push(msg);
|
|
788
|
+
}
|
|
789
|
+
}
|
|
790
|
+
|
|
791
|
+
if (orphanedCalls.length > 0)
|
|
792
|
+
_lerr('Removed tool calls without responses:', JSON.stringify(orphanedCalls, null, 2));
|
|
793
|
+
|
|
794
|
+
return validatedMsgs;
|
|
795
|
+
}
|
|
796
|
+
|
|
797
|
+
// Slice the last `limit` messages, walking back to avoid orphaning tool responses
|
|
798
|
+
// and expanding if fewer than MIN_CHAT_MESSAGES user/assistant messages are included.
|
|
799
|
+
_getQueueSlice(msgs, limit) {
|
|
800
|
+
if (msgs.length <= limit) return msgs;
|
|
801
|
+
|
|
802
|
+
let startIdx = msgs.length - limit;
|
|
803
|
+
|
|
804
|
+
// Walk backward if we'd start mid-tool-pair (tool response without its call)
|
|
805
|
+
while (startIdx > 0 && msgs[startIdx] && msgs[startIdx].role === 'tool')
|
|
806
|
+
startIdx--;
|
|
807
|
+
|
|
808
|
+
// Count user/assistant messages in the current window
|
|
809
|
+
let chatCount = 0;
|
|
810
|
+
for (let i = startIdx; i < msgs.length; i++) {
|
|
811
|
+
const role = msgs[i].role;
|
|
812
|
+
if (role === 'user' || role === 'assistant') chatCount++;
|
|
813
|
+
}
|
|
814
|
+
|
|
815
|
+
// Expand backward until we have at least MIN_CHAT_MESSAGES chat messages
|
|
816
|
+
while (chatCount < this.MIN_CHAT_MESSAGES && startIdx > 0) {
|
|
817
|
+
startIdx--;
|
|
818
|
+
const role = msgs[startIdx].role;
|
|
819
|
+
if (role === 'user' || role === 'assistant') chatCount++;
|
|
820
|
+
}
|
|
821
|
+
|
|
822
|
+
return msgs.slice(startIdx);
|
|
823
|
+
}
|
|
824
|
+
|
|
825
|
+
// Build message queue — layered structure:
|
|
826
|
+
// Layer 1: System prompts from ancestor hierarchy + own prompt (transient)
|
|
827
|
+
// Layer 2: [State Summary] from getStateSummary() override (transient, if non-empty)
|
|
828
|
+
// Layer 3: [Tool Activity Log] from tool_digest (transient, if non-empty)
|
|
829
|
+
// Layer 4: Ancestor summaries only + last QUEUE_LIMIT of own messages (persistent)
|
|
830
|
+
_createMsgQ(add_tag, tag_filter) {
|
|
831
|
+
const fullQueue = [];
|
|
832
|
+
const ancestorContexts = this.getAncestorContexts();
|
|
833
|
+
|
|
834
|
+
// Layer 1+2: Each level's prompt followed immediately by its state summary
|
|
835
|
+
for (const ctx of ancestorContexts) {
|
|
836
|
+
if (ctx.prompt) {
|
|
837
|
+
const prompt = {role: 'system', content: ctx.prompt};
|
|
838
|
+
if (add_tag) prompt.tag = ctx.tag;
|
|
839
|
+
fullQueue.push(prompt);
|
|
840
|
+
}
|
|
841
|
+
const ctxSummary = ctx._getStateSummary();
|
|
842
|
+
if (ctxSummary)
|
|
843
|
+
fullQueue.push({role: 'system', content: '[State Summary]\n' + ctxSummary});
|
|
844
|
+
}
|
|
845
|
+
if (this.prompt) {
|
|
846
|
+
const prompt = {role: 'system', content: this.prompt};
|
|
847
|
+
if (add_tag) prompt.tag = this.tag;
|
|
848
|
+
fullQueue.push(prompt);
|
|
849
|
+
}
|
|
850
|
+
const stateSummary = this._getStateSummary();
|
|
851
|
+
if (stateSummary)
|
|
852
|
+
fullQueue.push({role: 'system', content: '[State Summary]\n' + stateSummary});
|
|
853
|
+
|
|
854
|
+
// Layer 3: Tool digest (if non-empty)
|
|
855
|
+
if (this.tool_digest.length > 0) {
|
|
856
|
+
const digestText = this.tool_digest.map(entry =>
|
|
857
|
+
`[${new Date(entry.tm).toISOString()}] ${entry.tool}: ${entry.result}`
|
|
858
|
+
).join('\n');
|
|
859
|
+
fullQueue.push({role: 'system', content: '[Tool Activity Log]\n' + digestText});
|
|
860
|
+
}
|
|
861
|
+
|
|
862
|
+
// Layer 4: Ancestor summaries only (no full ancestor messages)
|
|
863
|
+
for (const ctx of ancestorContexts) {
|
|
864
|
+
const summaries = ctx._msgs
|
|
865
|
+
.filter(m => m.opts.summary)
|
|
866
|
+
.map(m => add_tag ? Object.assign({}, m.msg, {tag: ctx.tag}) : m.msg);
|
|
867
|
+
fullQueue.push(...summaries);
|
|
868
|
+
}
|
|
869
|
+
|
|
870
|
+
// Own messages — filter by tag if requested, then slice to QUEUE_LIMIT
|
|
871
|
+
let my_msgs;
|
|
872
|
+
if (tag_filter !== undefined) {
|
|
873
|
+
my_msgs = this._msgs.filter(m => {
|
|
874
|
+
if (m.opts.summary) return true;
|
|
875
|
+
if (m.opts.tag === tag_filter) return true;
|
|
876
|
+
return false;
|
|
877
|
+
}).map(m => m.msg);
|
|
878
|
+
} else {
|
|
879
|
+
my_msgs = this.__msgs;
|
|
880
|
+
}
|
|
881
|
+
|
|
882
|
+
if (add_tag)
|
|
883
|
+
my_msgs = my_msgs.map(m => Object.assign({}, m, {tag: this.tag}));
|
|
884
|
+
|
|
885
|
+
fullQueue.push(...this._getQueueSlice(my_msgs, this.QUEUE_LIMIT));
|
|
886
|
+
|
|
887
|
+
return this._validateToolResponses(fullQueue);
|
|
888
|
+
}
|
|
889
|
+
|
|
890
|
+
async sendMessage(role, content, functions, opts) {
|
|
891
|
+
if (!content)
|
|
892
|
+
return console.error('trying to send a message with no content');
|
|
893
|
+
|
|
894
|
+
const isRecursiveCall = opts?._recursive_depth !== undefined;
|
|
895
|
+
|
|
896
|
+
if (this._sequential_mode && this._processing_sequential && !isRecursiveCall) {
|
|
897
|
+
_log('Sequential mode: queueing message:', role, content?.slice(0, 50));
|
|
898
|
+
return new Promise((resolve, reject) => {
|
|
899
|
+
this._sequential_queue.push({ role, content, functions, opts, resolve, reject });
|
|
900
|
+
});
|
|
901
|
+
}
|
|
902
|
+
|
|
903
|
+
return await this._sendMessageInternal(role, content, functions, opts);
|
|
904
|
+
}
|
|
905
|
+
|
|
906
|
+
async _sendMessageInternal(role, content, functions, opts) {
|
|
907
|
+
const isRecursiveCall = opts?._recursive_depth !== undefined;
|
|
908
|
+
|
|
909
|
+
if (!isRecursiveCall)
|
|
910
|
+
this._current_depth++;
|
|
911
|
+
|
|
912
|
+
const currentDepth = isRecursiveCall ? opts._recursive_depth : this._current_depth;
|
|
913
|
+
|
|
914
|
+
const wasProcessing = this._processing_sequential;
|
|
915
|
+
if (this._sequential_mode && !isRecursiveCall) {
|
|
916
|
+
_ldbg('[' + this.tag + '] _sendMessageInternal setting _processing_sequential = true');
|
|
917
|
+
this._processing_sequential = true;
|
|
918
|
+
}
|
|
919
|
+
|
|
920
|
+
try {
|
|
921
|
+
if (this._hasPendingToolCalls() && role !== 'tool') {
|
|
922
|
+
_log('Tool calls pending, queueing message:', role, content?.slice(0, 50));
|
|
923
|
+
this._waitingQueue.push({ role, content, functions, opts });
|
|
924
|
+
return { content: '', queued: true };
|
|
925
|
+
}
|
|
926
|
+
|
|
927
|
+
const o = this._createMsgObj(role, content, functions, opts);
|
|
928
|
+
|
|
929
|
+
if (role === 'tool' && opts?.tool_call_id)
|
|
930
|
+
this._insertToolResponseAtCorrectPosition(o, opts.tool_call_id);
|
|
931
|
+
|
|
932
|
+
return await this._processSendMessage(o, currentDepth);
|
|
933
|
+
} finally {
|
|
934
|
+
if (!isRecursiveCall) {
|
|
935
|
+
this._current_depth--;
|
|
936
|
+
|
|
937
|
+
if (this._sequential_mode) {
|
|
938
|
+
_ldbg('[' + this.tag + '] restoring _processing_sequential to:', wasProcessing);
|
|
939
|
+
this._processing_sequential = wasProcessing;
|
|
940
|
+
}
|
|
941
|
+
|
|
942
|
+
if (this._sequential_mode && !wasProcessing)
|
|
943
|
+
setImmediate(() => this._processSequentialQueue());
|
|
944
|
+
}
|
|
945
|
+
}
|
|
946
|
+
}
|
|
947
|
+
|
|
948
|
+
_debugQDump(Q, functions) {
|
|
949
|
+
if (util.is_mocha && process.env.PROD)
|
|
950
|
+
return;
|
|
951
|
+
const dbgQ = Q || this._createMsgQ(true);
|
|
952
|
+
if (debug) {
|
|
953
|
+
console.log('MSGQDEBUG - Q:', JSON.stringify(dbgQ.map(m => ({
|
|
954
|
+
role: m.role,
|
|
955
|
+
content: m.content?.substring?.(0, 50),
|
|
956
|
+
tool_calls: m.tool_calls,
|
|
957
|
+
tool_call_id: m.tool_call_id,
|
|
958
|
+
tag: m.tag
|
|
959
|
+
})), 0, 4), functions?.map?.(f => f.name));
|
|
960
|
+
}
|
|
961
|
+
}
|
|
962
|
+
|
|
963
|
+
async _processSendMessage(o, depth) {
|
|
964
|
+
let Q;
|
|
965
|
+
try {
|
|
966
|
+
const name = o.opts?.name;
|
|
967
|
+
_log('@@@@@@@@@ [>>(' + depth + ') ' + o.msgid + (o.opts?.tag ? '-' + o.opts.tag : '') +
|
|
968
|
+
(name ? ' F(' + name + ')' : '') + ' ] SEND-AI', o.msg.role,
|
|
969
|
+
o.msg.content.slice(0, 2000) + (o.msg.content?.length > 2000 ? '... ' : ''));
|
|
970
|
+
|
|
971
|
+
if (this._waitingQueue.length > 0 && !this._hasPendingToolCalls()) {
|
|
972
|
+
_log('Processing waiting queue before OpenAI call:', this._waitingQueue.length, 'messages');
|
|
973
|
+
this._processWaitingQueue();
|
|
974
|
+
}
|
|
975
|
+
|
|
976
|
+
Q = this._createMsgQ(false, o.opts?.tag);
|
|
977
|
+
|
|
978
|
+
// Aggregate functions from hierarchy and merge with message-specific functions
|
|
979
|
+
const hierarchyFuncs = this.getFunctions() || [];
|
|
980
|
+
const messageFuncs = o.functions || [];
|
|
981
|
+
const funcs = [...hierarchyFuncs, ...messageFuncs].length > 0
|
|
982
|
+
? [...hierarchyFuncs, ...messageFuncs]
|
|
983
|
+
: null;
|
|
984
|
+
|
|
985
|
+
if (debug)
|
|
986
|
+
this._debugQDump(Q, funcs);
|
|
987
|
+
|
|
988
|
+
const reply = await openai.send(Q, funcs, o.opts?.model);
|
|
989
|
+
|
|
990
|
+
_log('@@@@@@@@@ [<<', o.msgid + (reply.tool_calls ? ' TC:' + (reply.tool_calls?.length || 0) : '') +
|
|
991
|
+
' ] REPLY-AI', reply.role,
|
|
992
|
+
(reply.content && !o.opts?.noreply ? ' Content: ' + reply.content.slice(0, 2000) : '') +
|
|
993
|
+
(reply.content?.length > 2000 ? '... ' : '') +
|
|
994
|
+
(reply.tool_calls && !o.opts?.nofunc ? '\nCall tools ' + JSON.stringify(reply.tool_calls, 0, 4) : ''));
|
|
995
|
+
|
|
996
|
+
o.replied = 1;
|
|
997
|
+
delete o.functions;
|
|
998
|
+
|
|
999
|
+
if (o.opts?.nofunc)
|
|
1000
|
+
delete reply.tool_calls;
|
|
1001
|
+
if (o.opts?.debug_empty && !reply.content)
|
|
1002
|
+
this._debugQDump(Q, o.functions);
|
|
1003
|
+
|
|
1004
|
+
this._msgs.push({msg: reply, msgid: o.msgid, opts: o.opts || {}, replied: 3});
|
|
1005
|
+
|
|
1006
|
+
let reply2 = {};
|
|
1007
|
+
if (reply?.tool_calls) {
|
|
1008
|
+
const toolNames = reply.tool_calls.map(call => call.function.name);
|
|
1009
|
+
this._resetToolSequenceIfDifferent(toolNames);
|
|
1010
|
+
|
|
1011
|
+
const filteredToolCalls = this._filterExcessiveToolCalls(reply.tool_calls);
|
|
1012
|
+
|
|
1013
|
+
let toolCallsToProcess = filteredToolCalls;
|
|
1014
|
+
let deferredToolCalls = [];
|
|
1015
|
+
|
|
1016
|
+
if (depth >= this.max_depth && filteredToolCalls.length > 0) {
|
|
1017
|
+
_log('Max depth', this.max_depth, 'reached at depth', depth, ', deferring',
|
|
1018
|
+
filteredToolCalls.length, 'tool calls');
|
|
1019
|
+
deferredToolCalls = filteredToolCalls;
|
|
1020
|
+
toolCallsToProcess = [];
|
|
1021
|
+
|
|
1022
|
+
this._deferred_tool_calls.push(...deferredToolCalls.map(call => ({
|
|
1023
|
+
call,
|
|
1024
|
+
originalMessage: o,
|
|
1025
|
+
depth: depth
|
|
1026
|
+
})));
|
|
1027
|
+
}
|
|
1028
|
+
|
|
1029
|
+
const toolCallsWithResults = [];
|
|
1030
|
+
for (const call of toolCallsToProcess) {
|
|
1031
|
+
const toolName = call.function.name;
|
|
1032
|
+
this._trackToolCall(toolName);
|
|
1033
|
+
|
|
1034
|
+
if (this._isDuplicateToolCall(call)) {
|
|
1035
|
+
_log('Duplicate tool call detected:', call.function.name);
|
|
1036
|
+
const result = {
|
|
1037
|
+
content: `Duplicate call detected. An identical "${call.function.name}" ` +
|
|
1038
|
+
`tool call with the same arguments is already running.`,
|
|
1039
|
+
functions: null
|
|
1040
|
+
};
|
|
1041
|
+
toolCallsWithResults.push({ call, result, isDuplicate: true });
|
|
1042
|
+
} else {
|
|
1043
|
+
this._trackActiveToolCall(call);
|
|
1044
|
+
toolCallsWithResults.push({ call, result: null, isDuplicate: false });
|
|
1045
|
+
}
|
|
1046
|
+
}
|
|
1047
|
+
|
|
1048
|
+
for (const { call, isDuplicate } of toolCallsWithResults) {
|
|
1049
|
+
if (!isDuplicate) {
|
|
1050
|
+
const _snap = this.task
|
|
1051
|
+
? JSON.stringify(this._snapshotPublicProps(this.task)) : null;
|
|
1052
|
+
try {
|
|
1053
|
+
const result = await this._executeToolCallWithTimeout(
|
|
1054
|
+
call, o.opts?.handler, o.opts?.timeout);
|
|
1055
|
+
const item = toolCallsWithResults.find(item => item.call.id === call.id);
|
|
1056
|
+
if (item) item.result = result;
|
|
1057
|
+
if (_snap !== null &&
|
|
1058
|
+
_snap !== JSON.stringify(this._snapshotPublicProps(this.task)))
|
|
1059
|
+
this._appendToolDigest(call.function.name, result?.content || '');
|
|
1060
|
+
} finally {
|
|
1061
|
+
this._completeActiveToolCall(call);
|
|
1062
|
+
}
|
|
1063
|
+
}
|
|
1064
|
+
}
|
|
1065
|
+
|
|
1066
|
+
for (const [i, { call, result }] of toolCallsWithResults.entries()) {
|
|
1067
|
+
const opts = {
|
|
1068
|
+
name: call.function.name,
|
|
1069
|
+
tool_call_id: call.id,
|
|
1070
|
+
_recursive_depth: depth + 1,
|
|
1071
|
+
model: o.opts?.model
|
|
1072
|
+
};
|
|
1073
|
+
const content = result ? (result.content || result) : '';
|
|
1074
|
+
const functions = (i === toolCallsWithResults.length - 1 && result && result.functions)
|
|
1075
|
+
? result.functions : null;
|
|
1076
|
+
|
|
1077
|
+
if (i === toolCallsWithResults.length - 1)
|
|
1078
|
+
reply2 = await this.sendMessage('tool', content, functions, opts);
|
|
1079
|
+
else {
|
|
1080
|
+
const toolResponse = this._createMsgObj('tool', content, null, opts);
|
|
1081
|
+
toolResponse.replied = 1;
|
|
1082
|
+
this._insertToolResponseAtCorrectPosition(toolResponse, call.id);
|
|
1083
|
+
}
|
|
1084
|
+
}
|
|
1085
|
+
}
|
|
1086
|
+
|
|
1087
|
+
reply.content ||= '';
|
|
1088
|
+
reply.content += reply2?.content ? '\n' + reply2.content : '';
|
|
1089
|
+
|
|
1090
|
+
const hasPending = this._hasPendingToolCalls();
|
|
1091
|
+
const queueLength = this._waitingQueue.length;
|
|
1092
|
+
|
|
1093
|
+
if (!hasPending && queueLength > 0) {
|
|
1094
|
+
_log('No more pending tool calls, processing', queueLength, 'waiting messages');
|
|
1095
|
+
this._processWaitingQueue();
|
|
1096
|
+
}
|
|
1097
|
+
|
|
1098
|
+
const isRecursiveCall = o.opts?._recursive_depth !== undefined;
|
|
1099
|
+
if (!isRecursiveCall && this._current_depth === 1 && !this._hasPendingToolCalls()
|
|
1100
|
+
&& this._waitingQueue.length === 0 && this._deferred_tool_calls.length > 0) {
|
|
1101
|
+
_log('Processing', this._deferred_tool_calls.length, 'deferred tool calls');
|
|
1102
|
+
await this._processDeferredToolCalls();
|
|
1103
|
+
}
|
|
1104
|
+
|
|
1105
|
+
return reply;
|
|
1106
|
+
} catch (err) {
|
|
1107
|
+
console.error('sendMessage error:', err);
|
|
1108
|
+
this._debugQDump(Q, o?.functions);
|
|
1109
|
+
throw err;
|
|
1110
|
+
}
|
|
1111
|
+
}
|
|
1112
|
+
|
|
1113
|
+
async interpretAndApplyChanges(call, handler) {
|
|
1114
|
+
_log('apply tool', call.function.name, 'have handler', !!handler, !!this.tool_handler);
|
|
1115
|
+
if (!call)
|
|
1116
|
+
return { content: '', functions: null };
|
|
1117
|
+
|
|
1118
|
+
_log('invoking function', call.function.name);
|
|
1119
|
+
handler ||= this.tool_handler;
|
|
1120
|
+
let result = await handler(call.function.name, call.function.arguments);
|
|
1121
|
+
|
|
1122
|
+
let content = result?.content || result || '';
|
|
1123
|
+
let functions = result?.functions || null;
|
|
1124
|
+
|
|
1125
|
+
if (content && typeof content !== 'string')
|
|
1126
|
+
content = JSON.stringify(content);
|
|
1127
|
+
else if (!content)
|
|
1128
|
+
{
|
|
1129
|
+
content = `tool call ${call.function.name} ${call.id} completed. do not reply. wait for the next msg `
|
|
1130
|
+
+`from the user`;
|
|
1131
|
+
}
|
|
1132
|
+
|
|
1133
|
+
_log('FUNCTION RESULT', call.function.name, call.id, content.substring(0, 50) + '...',
|
|
1134
|
+
functions ? 'with functions' : 'no functions');
|
|
1135
|
+
return { content, functions };
|
|
1136
|
+
}
|
|
1137
|
+
|
|
1138
|
+
// Spawn child context (creates a child task with its own context)
|
|
1139
|
+
spawnChild(prompt, tag, config = {}) {
|
|
1140
|
+
if (!this.task) {
|
|
1141
|
+
// If no task, create a standalone context (legacy mode)
|
|
1142
|
+
return createContext(prompt, null, { ...config, tag });
|
|
1143
|
+
}
|
|
1144
|
+
|
|
1145
|
+
// Create a child task with its own context
|
|
1146
|
+
const Itask = require('./itask.js');
|
|
1147
|
+
const childTask = new Itask({
|
|
1148
|
+
name: tag || 'child-context',
|
|
1149
|
+
prompt,
|
|
1150
|
+
async: true,
|
|
1151
|
+
spawn_parent: this.task,
|
|
1152
|
+
contextConfig: config
|
|
1153
|
+
}, []);
|
|
1154
|
+
|
|
1155
|
+
const childContext = new Context(prompt, childTask, { ...config, tag });
|
|
1156
|
+
childTask.setContext(childContext);
|
|
1157
|
+
|
|
1158
|
+
return childContext;
|
|
1159
|
+
}
|
|
1160
|
+
}
|
|
1161
|
+
|
|
1162
|
+
// Factory function to create a Context with Proxy wrapper
|
|
1163
|
+
function createContext(prompt, task, config = {}) {
|
|
1164
|
+
const instance = new Context(prompt, task, config);
|
|
1165
|
+
|
|
1166
|
+
return new Proxy(instance, {
|
|
1167
|
+
get(target, prop, receiver) {
|
|
1168
|
+
if (typeof prop === 'string' && !isNaN(prop)) {
|
|
1169
|
+
return target._msgs[Number(prop)]?.msg;
|
|
1170
|
+
}
|
|
1171
|
+
|
|
1172
|
+
if (typeof target._msgs[prop] === 'function') {
|
|
1173
|
+
return target[prop].bind(target);
|
|
1174
|
+
}
|
|
1175
|
+
|
|
1176
|
+
if (prop === 'length') {
|
|
1177
|
+
return target._msgs.length;
|
|
1178
|
+
}
|
|
1179
|
+
|
|
1180
|
+
return Reflect.get(target, prop, receiver);
|
|
1181
|
+
},
|
|
1182
|
+
|
|
1183
|
+
set(target, prop, value, receiver) {
|
|
1184
|
+
if (typeof prop === 'string' && !isNaN(prop)) {
|
|
1185
|
+
target._msgs[Number(prop)] = {msg: value};
|
|
1186
|
+
return true;
|
|
1187
|
+
}
|
|
1188
|
+
|
|
1189
|
+
return Reflect.set(target, prop, value, receiver);
|
|
1190
|
+
},
|
|
1191
|
+
|
|
1192
|
+
has(target, prop) {
|
|
1193
|
+
if (typeof prop === 'string' && !isNaN(prop)) return true;
|
|
1194
|
+
if (prop in target._msgs) return true;
|
|
1195
|
+
return prop in target;
|
|
1196
|
+
},
|
|
1197
|
+
|
|
1198
|
+
ownKeys(target) {
|
|
1199
|
+
const keys = Reflect.ownKeys(target);
|
|
1200
|
+
const msgKeys = Object.keys(target._msgs);
|
|
1201
|
+
return [...new Set([...msgKeys, ...keys])];
|
|
1202
|
+
},
|
|
1203
|
+
|
|
1204
|
+
getOwnPropertyDescriptor(target, prop) {
|
|
1205
|
+
if (typeof prop === 'string' && !isNaN(prop)) {
|
|
1206
|
+
return Object.getOwnPropertyDescriptor(target._msgs, prop);
|
|
1207
|
+
}
|
|
1208
|
+
return Object.getOwnPropertyDescriptor(target, prop);
|
|
1209
|
+
}
|
|
1210
|
+
});
|
|
1211
|
+
}
|
|
1212
|
+
|
|
1213
|
+
module.exports = { Context, createContext };
|