@ouro.bot/cli 0.0.1-alpha.0 → 0.1.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AdoptionSpecialist.ouro/agent.json +20 -0
- package/AdoptionSpecialist.ouro/psyche/SOUL.md +22 -0
- package/AdoptionSpecialist.ouro/psyche/identities/basilisk.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/jafar.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/jormungandr.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/kaa.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/medusa.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/monty.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/nagini.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/ouroboros.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/python.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/quetzalcoatl.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/sir-hiss.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/the-serpent.md +31 -0
- package/AdoptionSpecialist.ouro/psyche/identities/the-snake.md +31 -0
- package/README.md +224 -6
- package/dist/heart/agent-entry.js +17 -0
- package/dist/heart/api-error.js +34 -0
- package/dist/heart/config.js +296 -0
- package/dist/heart/core.js +485 -0
- package/dist/heart/daemon/daemon-cli.js +626 -0
- package/dist/heart/daemon/daemon-entry.js +74 -0
- package/dist/heart/daemon/daemon.js +310 -0
- package/dist/heart/daemon/hatch-flow.js +284 -0
- package/dist/heart/daemon/hatch-specialist.js +107 -0
- package/dist/heart/daemon/health-monitor.js +79 -0
- package/dist/heart/daemon/message-router.js +98 -0
- package/dist/heart/daemon/ouro-bot-entry.js +23 -0
- package/dist/heart/daemon/ouro-bot-wrapper.js +90 -0
- package/dist/heart/daemon/ouro-entry.js +23 -0
- package/dist/heart/daemon/ouro-uti.js +212 -0
- package/dist/heart/daemon/process-manager.js +220 -0
- package/dist/heart/daemon/runtime-logging.js +98 -0
- package/dist/heart/daemon/subagent-installer.js +125 -0
- package/dist/heart/daemon/task-scheduler.js +237 -0
- package/dist/heart/harness.js +26 -0
- package/dist/heart/identity.js +270 -0
- package/dist/heart/kicks.js +144 -0
- package/dist/heart/primitives.js +4 -0
- package/dist/heart/providers/anthropic.js +329 -0
- package/dist/heart/providers/azure.js +66 -0
- package/dist/heart/providers/minimax.js +53 -0
- package/dist/heart/providers/openai-codex.js +162 -0
- package/dist/heart/streaming.js +412 -0
- package/dist/heart/turn-coordinator.js +62 -0
- package/dist/inner-worker-entry.js +4 -0
- package/dist/mind/associative-recall.js +176 -0
- package/dist/mind/bundle-manifest.js +118 -0
- package/dist/mind/context.js +218 -0
- package/dist/mind/first-impressions.js +43 -0
- package/dist/mind/format.js +56 -0
- package/dist/mind/friends/channel.js +41 -0
- package/dist/mind/friends/resolver.js +84 -0
- package/dist/mind/friends/store-file.js +171 -0
- package/dist/mind/friends/store.js +4 -0
- package/dist/mind/friends/tokens.js +26 -0
- package/dist/mind/friends/types.js +21 -0
- package/dist/mind/memory.js +326 -0
- package/dist/mind/phrases.js +43 -0
- package/dist/mind/prompt.js +254 -0
- package/dist/mind/token-estimate.js +119 -0
- package/dist/nerves/cli-logging.js +31 -0
- package/dist/nerves/coverage/audit-rules.js +81 -0
- package/dist/nerves/coverage/audit.js +200 -0
- package/dist/nerves/coverage/cli-main.js +5 -0
- package/dist/nerves/coverage/cli.js +51 -0
- package/dist/nerves/coverage/contract.js +23 -0
- package/dist/nerves/coverage/file-completeness.js +46 -0
- package/dist/nerves/coverage/run-artifacts.js +77 -0
- package/dist/nerves/coverage/source-scanner.js +34 -0
- package/dist/nerves/index.js +152 -0
- package/dist/nerves/runtime.js +38 -0
- package/dist/repertoire/ado-client.js +211 -0
- package/dist/repertoire/ado-context.js +73 -0
- package/dist/repertoire/ado-semantic.js +841 -0
- package/dist/repertoire/ado-templates.js +146 -0
- package/dist/repertoire/coding/index.js +36 -0
- package/dist/repertoire/coding/manager.js +489 -0
- package/dist/repertoire/coding/monitor.js +60 -0
- package/dist/repertoire/coding/reporter.js +45 -0
- package/dist/repertoire/coding/spawner.js +102 -0
- package/dist/repertoire/coding/tools.js +167 -0
- package/dist/repertoire/coding/types.js +2 -0
- package/dist/repertoire/data/ado-endpoints.json +122 -0
- package/dist/repertoire/data/graph-endpoints.json +212 -0
- package/dist/repertoire/github-client.js +64 -0
- package/dist/repertoire/graph-client.js +118 -0
- package/dist/repertoire/skills.js +156 -0
- package/dist/repertoire/tasks/board.js +122 -0
- package/dist/repertoire/tasks/index.js +210 -0
- package/dist/repertoire/tasks/lifecycle.js +80 -0
- package/dist/repertoire/tasks/middleware.js +65 -0
- package/dist/repertoire/tasks/parser.js +173 -0
- package/dist/repertoire/tasks/scanner.js +132 -0
- package/dist/repertoire/tasks/transitions.js +145 -0
- package/dist/repertoire/tasks/types.js +2 -0
- package/dist/repertoire/tools-base.js +622 -0
- package/dist/repertoire/tools-github.js +53 -0
- package/dist/repertoire/tools-teams.js +308 -0
- package/dist/repertoire/tools.js +199 -0
- package/dist/senses/cli-entry.js +15 -0
- package/dist/senses/cli.js +523 -0
- package/dist/senses/commands.js +98 -0
- package/dist/senses/inner-dialog-worker.js +61 -0
- package/dist/senses/inner-dialog.js +216 -0
- package/dist/senses/teams-entry.js +15 -0
- package/dist/senses/teams.js +695 -0
- package/dist/senses/trust-gate.js +150 -0
- package/package.json +34 -11
- package/subagents/README.md +71 -0
- package/subagents/work-doer.md +233 -0
- package/subagents/work-merger.md +593 -0
- package/subagents/work-planner.md +373 -0
- package/bin/ouro.js +0 -6
|
@@ -0,0 +1,412 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.FinalAnswerParser = void 0;
|
|
4
|
+
exports.toResponsesInput = toResponsesInput;
|
|
5
|
+
exports.toResponsesTools = toResponsesTools;
|
|
6
|
+
exports.streamChatCompletion = streamChatCompletion;
|
|
7
|
+
exports.streamResponsesApi = streamResponsesApi;
|
|
8
|
+
const runtime_1 = require("../nerves/runtime");
|
|
9
|
+
// Character-level state machine that extracts the answer value from
|
|
10
|
+
// `final_answer` tool call JSON arguments as they stream in.
|
|
11
|
+
// Scans for prefix `"answer":"` or `"answer": "` in the character stream,
|
|
12
|
+
// then emits text handling JSON escapes, stopping at unescaped closing `"`.
|
|
13
|
+
class FinalAnswerParser {
|
|
14
|
+
// Possible prefixes to match (with and without space after colon)
|
|
15
|
+
static PREFIXES = ['"answer":"', '"answer": "'];
|
|
16
|
+
// Buffer of characters seen so far (pre-activation only)
|
|
17
|
+
buf = "";
|
|
18
|
+
_active = false;
|
|
19
|
+
_complete = false;
|
|
20
|
+
inEscape = false;
|
|
21
|
+
get active() { return this._active; }
|
|
22
|
+
get complete() { return this._complete; }
|
|
23
|
+
process(delta) {
|
|
24
|
+
if (this._complete)
|
|
25
|
+
return "";
|
|
26
|
+
let out = "";
|
|
27
|
+
for (let i = 0; i < delta.length; i++) {
|
|
28
|
+
const ch = delta[i];
|
|
29
|
+
if (!this._active) {
|
|
30
|
+
this.buf += ch;
|
|
31
|
+
// Check if any prefix has been fully matched in the buffer
|
|
32
|
+
for (const prefix of FinalAnswerParser.PREFIXES) {
|
|
33
|
+
if (this.buf.endsWith(prefix)) {
|
|
34
|
+
this._active = true;
|
|
35
|
+
break;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
else {
|
|
40
|
+
// Active: emit characters, handling JSON escapes
|
|
41
|
+
if (this.inEscape) {
|
|
42
|
+
this.inEscape = false;
|
|
43
|
+
switch (ch) {
|
|
44
|
+
case '"':
|
|
45
|
+
out += '"';
|
|
46
|
+
break;
|
|
47
|
+
case '\\':
|
|
48
|
+
out += '\\';
|
|
49
|
+
break;
|
|
50
|
+
case 'n':
|
|
51
|
+
out += '\n';
|
|
52
|
+
break;
|
|
53
|
+
case 't':
|
|
54
|
+
out += '\t';
|
|
55
|
+
break;
|
|
56
|
+
case '/':
|
|
57
|
+
out += '/';
|
|
58
|
+
break;
|
|
59
|
+
default:
|
|
60
|
+
out += ch;
|
|
61
|
+
break; // unknown escape: pass through character
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
else if (ch === '\\') {
|
|
65
|
+
this.inEscape = true;
|
|
66
|
+
}
|
|
67
|
+
else if (ch === '"') {
|
|
68
|
+
this._complete = true;
|
|
69
|
+
return out; // stop processing, closing quote found
|
|
70
|
+
}
|
|
71
|
+
else {
|
|
72
|
+
out += ch;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
return out;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
exports.FinalAnswerParser = FinalAnswerParser;
|
|
80
|
+
function toResponsesInput(messages) {
|
|
81
|
+
let instructions = "";
|
|
82
|
+
const input = [];
|
|
83
|
+
for (const msg of messages) {
|
|
84
|
+
if (msg.role === "system") {
|
|
85
|
+
if (!instructions) {
|
|
86
|
+
const sys = msg;
|
|
87
|
+
instructions = (typeof sys.content === "string" ? sys.content : "") || "";
|
|
88
|
+
}
|
|
89
|
+
continue;
|
|
90
|
+
}
|
|
91
|
+
if (msg.role === "user") {
|
|
92
|
+
const u = msg;
|
|
93
|
+
input.push({ role: "user", content: typeof u.content === "string" ? u.content : "" });
|
|
94
|
+
continue;
|
|
95
|
+
}
|
|
96
|
+
if (msg.role === "assistant") {
|
|
97
|
+
const a = msg;
|
|
98
|
+
// Restore reasoning items before content (matching API item order)
|
|
99
|
+
if (a._reasoning_items) {
|
|
100
|
+
for (const ri of a._reasoning_items) {
|
|
101
|
+
input.push(ri);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
if (a.content) {
|
|
105
|
+
input.push({ role: "assistant", content: typeof a.content === "string" ? a.content : "" });
|
|
106
|
+
}
|
|
107
|
+
if (a.tool_calls) {
|
|
108
|
+
for (const tc of a.tool_calls) {
|
|
109
|
+
input.push({
|
|
110
|
+
type: "function_call",
|
|
111
|
+
call_id: tc.id,
|
|
112
|
+
name: tc.function.name,
|
|
113
|
+
arguments: tc.function.arguments,
|
|
114
|
+
status: "completed",
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
continue;
|
|
119
|
+
}
|
|
120
|
+
if (msg.role === "tool") {
|
|
121
|
+
const t = msg;
|
|
122
|
+
input.push({
|
|
123
|
+
type: "function_call_output",
|
|
124
|
+
call_id: t.tool_call_id,
|
|
125
|
+
output: typeof t.content === "string" ? t.content : "",
|
|
126
|
+
});
|
|
127
|
+
continue;
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
return { instructions, input };
|
|
131
|
+
}
|
|
132
|
+
function toResponsesTools(ccTools) {
|
|
133
|
+
return ccTools.map((t) => ({
|
|
134
|
+
type: "function",
|
|
135
|
+
name: t.function.name,
|
|
136
|
+
description: t.function.description ?? null,
|
|
137
|
+
parameters: t.function.parameters ?? null,
|
|
138
|
+
strict: false,
|
|
139
|
+
}));
|
|
140
|
+
}
|
|
141
|
+
async function streamChatCompletion(client, createParams, callbacks, signal) {
|
|
142
|
+
(0, runtime_1.emitNervesEvent)({
|
|
143
|
+
component: "engine",
|
|
144
|
+
event: "engine.stream_start",
|
|
145
|
+
message: "chat completion stream start",
|
|
146
|
+
meta: {},
|
|
147
|
+
});
|
|
148
|
+
// Request usage data in the final streaming chunk
|
|
149
|
+
createParams.stream_options = { include_usage: true };
|
|
150
|
+
const response = await client.chat.completions.create(createParams, signal ? { signal } : {});
|
|
151
|
+
let content = "";
|
|
152
|
+
let toolCalls = {};
|
|
153
|
+
let streamStarted = false;
|
|
154
|
+
let usage;
|
|
155
|
+
const answerParser = new FinalAnswerParser();
|
|
156
|
+
let finalAnswerDetected = false;
|
|
157
|
+
// State machine for parsing inline <think> tags (MiniMax pattern)
|
|
158
|
+
let contentBuf = "";
|
|
159
|
+
let inThinkTag = false;
|
|
160
|
+
const OPEN_TAG = "<think>";
|
|
161
|
+
const CLOSE_TAG = "</think>";
|
|
162
|
+
function processContentBuf(flush) {
|
|
163
|
+
while (contentBuf.length > 0) {
|
|
164
|
+
if (inThinkTag) {
|
|
165
|
+
const end = contentBuf.indexOf(CLOSE_TAG);
|
|
166
|
+
if (end !== -1) {
|
|
167
|
+
const reasoning = contentBuf.slice(0, end);
|
|
168
|
+
if (reasoning)
|
|
169
|
+
callbacks.onReasoningChunk(reasoning);
|
|
170
|
+
contentBuf = contentBuf.slice(end + CLOSE_TAG.length);
|
|
171
|
+
inThinkTag = false;
|
|
172
|
+
}
|
|
173
|
+
else {
|
|
174
|
+
// Check if buffer ends with a partial </think> prefix
|
|
175
|
+
if (!flush) {
|
|
176
|
+
let retain = 0;
|
|
177
|
+
for (let i = 1; i < CLOSE_TAG.length && i <= contentBuf.length; i++) {
|
|
178
|
+
if (contentBuf.endsWith(CLOSE_TAG.slice(0, i)))
|
|
179
|
+
retain = i;
|
|
180
|
+
}
|
|
181
|
+
if (retain > 0) {
|
|
182
|
+
const reasoning = contentBuf.slice(0, -retain);
|
|
183
|
+
if (reasoning)
|
|
184
|
+
callbacks.onReasoningChunk(reasoning);
|
|
185
|
+
contentBuf = contentBuf.slice(-retain);
|
|
186
|
+
return;
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
// All reasoning, flush it
|
|
190
|
+
callbacks.onReasoningChunk(contentBuf);
|
|
191
|
+
contentBuf = "";
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
else {
|
|
195
|
+
const start = contentBuf.indexOf(OPEN_TAG);
|
|
196
|
+
if (start !== -1) {
|
|
197
|
+
const text = contentBuf.slice(0, start);
|
|
198
|
+
if (text)
|
|
199
|
+
callbacks.onTextChunk(text);
|
|
200
|
+
contentBuf = contentBuf.slice(start + OPEN_TAG.length);
|
|
201
|
+
inThinkTag = true;
|
|
202
|
+
}
|
|
203
|
+
else {
|
|
204
|
+
// Check if buffer ends with a partial <think> prefix
|
|
205
|
+
if (!flush) {
|
|
206
|
+
let retain = 0;
|
|
207
|
+
for (let i = 1; i < OPEN_TAG.length && i <= contentBuf.length; i++) {
|
|
208
|
+
if (contentBuf.endsWith(OPEN_TAG.slice(0, i)))
|
|
209
|
+
retain = i;
|
|
210
|
+
}
|
|
211
|
+
if (retain > 0) {
|
|
212
|
+
const text = contentBuf.slice(0, -retain);
|
|
213
|
+
if (text)
|
|
214
|
+
callbacks.onTextChunk(text);
|
|
215
|
+
contentBuf = contentBuf.slice(-retain);
|
|
216
|
+
return;
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
// All content, flush it
|
|
220
|
+
callbacks.onTextChunk(contentBuf);
|
|
221
|
+
contentBuf = "";
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
for await (const chunk of response) {
|
|
227
|
+
if (signal?.aborted)
|
|
228
|
+
break;
|
|
229
|
+
// Capture usage from final chunk (sent when stream_options.include_usage is true)
|
|
230
|
+
if (chunk.usage) {
|
|
231
|
+
const u = chunk.usage;
|
|
232
|
+
usage = {
|
|
233
|
+
input_tokens: u.prompt_tokens,
|
|
234
|
+
output_tokens: u.completion_tokens,
|
|
235
|
+
reasoning_tokens: u.completion_tokens_details?.reasoning_tokens ?? 0,
|
|
236
|
+
total_tokens: u.total_tokens,
|
|
237
|
+
};
|
|
238
|
+
}
|
|
239
|
+
const d = chunk.choices[0]?.delta;
|
|
240
|
+
if (!d)
|
|
241
|
+
continue;
|
|
242
|
+
// Handle reasoning_content (Azure AI models like DeepSeek-R1)
|
|
243
|
+
if (d.reasoning_content) {
|
|
244
|
+
if (!streamStarted) {
|
|
245
|
+
callbacks.onModelStreamStart();
|
|
246
|
+
streamStarted = true;
|
|
247
|
+
}
|
|
248
|
+
callbacks.onReasoningChunk(d.reasoning_content);
|
|
249
|
+
}
|
|
250
|
+
if (d.content) {
|
|
251
|
+
if (!streamStarted) {
|
|
252
|
+
callbacks.onModelStreamStart();
|
|
253
|
+
streamStarted = true;
|
|
254
|
+
}
|
|
255
|
+
content += d.content;
|
|
256
|
+
contentBuf += d.content;
|
|
257
|
+
processContentBuf(false);
|
|
258
|
+
}
|
|
259
|
+
if (d.tool_calls) {
|
|
260
|
+
for (const tc of d.tool_calls) {
|
|
261
|
+
if (!toolCalls[tc.index])
|
|
262
|
+
toolCalls[tc.index] = {
|
|
263
|
+
id: tc.id ?? "",
|
|
264
|
+
name: tc.function?.name ?? "",
|
|
265
|
+
arguments: "",
|
|
266
|
+
};
|
|
267
|
+
if (tc.id)
|
|
268
|
+
toolCalls[tc.index].id = tc.id;
|
|
269
|
+
if (tc.function?.name) {
|
|
270
|
+
toolCalls[tc.index].name = tc.function.name;
|
|
271
|
+
// Detect final_answer tool call on first name delta.
|
|
272
|
+
// Only activate streaming if this is the sole tool call (index 0
|
|
273
|
+
// and no other indices seen). Mixed calls are rejected by core.ts.
|
|
274
|
+
if (tc.function.name === "final_answer" && !finalAnswerDetected
|
|
275
|
+
&& tc.index === 0 && Object.keys(toolCalls).length === 1) {
|
|
276
|
+
finalAnswerDetected = true;
|
|
277
|
+
callbacks.onClearText?.();
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
if (tc.function?.arguments) {
|
|
281
|
+
toolCalls[tc.index].arguments += tc.function.arguments;
|
|
282
|
+
// Feed final_answer argument deltas to the parser for progressive
|
|
283
|
+
// streaming, but only when it appears to be the sole tool call.
|
|
284
|
+
if (finalAnswerDetected && toolCalls[tc.index].name === "final_answer"
|
|
285
|
+
&& Object.keys(toolCalls).length === 1) {
|
|
286
|
+
const text = answerParser.process(tc.function.arguments);
|
|
287
|
+
if (text)
|
|
288
|
+
callbacks.onTextChunk(text);
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
// Flush any remaining buffer at end of stream
|
|
295
|
+
if (contentBuf)
|
|
296
|
+
processContentBuf(true);
|
|
297
|
+
return {
|
|
298
|
+
content,
|
|
299
|
+
toolCalls: Object.values(toolCalls),
|
|
300
|
+
outputItems: [],
|
|
301
|
+
usage,
|
|
302
|
+
finalAnswerStreamed: answerParser.active,
|
|
303
|
+
};
|
|
304
|
+
}
|
|
305
|
+
async function streamResponsesApi(client, createParams, callbacks, signal) {
|
|
306
|
+
(0, runtime_1.emitNervesEvent)({
|
|
307
|
+
component: "engine",
|
|
308
|
+
event: "engine.stream_start",
|
|
309
|
+
message: "responses API stream start",
|
|
310
|
+
meta: {},
|
|
311
|
+
});
|
|
312
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Azure Responses API not in OpenAI SDK types
|
|
313
|
+
const response = await client.responses.create(createParams, signal ? { signal } : {});
|
|
314
|
+
let content = "";
|
|
315
|
+
let streamStarted = false;
|
|
316
|
+
const toolCalls = [];
|
|
317
|
+
const outputItems = [];
|
|
318
|
+
let currentToolCall = null;
|
|
319
|
+
let usage;
|
|
320
|
+
const answerParser = new FinalAnswerParser();
|
|
321
|
+
let functionCallCount = 0;
|
|
322
|
+
let finalAnswerDetected = false;
|
|
323
|
+
for await (const event of response) {
|
|
324
|
+
if (signal?.aborted)
|
|
325
|
+
break;
|
|
326
|
+
switch (event.type) {
|
|
327
|
+
case "response.output_text.delta":
|
|
328
|
+
case "response.reasoning_summary_text.delta": {
|
|
329
|
+
if (!streamStarted) {
|
|
330
|
+
callbacks.onModelStreamStart();
|
|
331
|
+
streamStarted = true;
|
|
332
|
+
}
|
|
333
|
+
const delta = String(event.delta);
|
|
334
|
+
if (event.type === "response.output_text.delta") {
|
|
335
|
+
callbacks.onTextChunk(delta);
|
|
336
|
+
content += delta;
|
|
337
|
+
}
|
|
338
|
+
else {
|
|
339
|
+
callbacks.onReasoningChunk(delta);
|
|
340
|
+
}
|
|
341
|
+
break;
|
|
342
|
+
}
|
|
343
|
+
case "response.output_item.added": {
|
|
344
|
+
if (event.item?.type === "function_call") {
|
|
345
|
+
functionCallCount++;
|
|
346
|
+
currentToolCall = {
|
|
347
|
+
call_id: String(event.item.call_id),
|
|
348
|
+
name: String(event.item.name),
|
|
349
|
+
arguments: "",
|
|
350
|
+
};
|
|
351
|
+
// Detect final_answer function call -- clear any streamed noise.
|
|
352
|
+
// Only activate when this is the first (and so far only) function call.
|
|
353
|
+
// Mixed calls are rejected by core.ts; no need to stream their args.
|
|
354
|
+
if (String(event.item.name) === "final_answer" && functionCallCount === 1) {
|
|
355
|
+
finalAnswerDetected = true;
|
|
356
|
+
callbacks.onClearText?.();
|
|
357
|
+
}
|
|
358
|
+
}
|
|
359
|
+
break;
|
|
360
|
+
}
|
|
361
|
+
case "response.function_call_arguments.delta": {
|
|
362
|
+
if (currentToolCall) {
|
|
363
|
+
currentToolCall.arguments += event.delta;
|
|
364
|
+
// Feed final_answer argument deltas to the parser for progressive
|
|
365
|
+
// streaming, but only when it appears to be the sole function call.
|
|
366
|
+
if (finalAnswerDetected && currentToolCall.name === "final_answer"
|
|
367
|
+
&& functionCallCount === 1) {
|
|
368
|
+
const text = answerParser.process(String(event.delta));
|
|
369
|
+
if (text)
|
|
370
|
+
callbacks.onTextChunk(text);
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
break;
|
|
374
|
+
}
|
|
375
|
+
case "response.output_item.done": {
|
|
376
|
+
outputItems.push(event.item);
|
|
377
|
+
if (event.item?.type === "function_call") {
|
|
378
|
+
toolCalls.push({
|
|
379
|
+
id: String(event.item.call_id),
|
|
380
|
+
name: String(event.item.name),
|
|
381
|
+
arguments: String(event.item.arguments),
|
|
382
|
+
});
|
|
383
|
+
currentToolCall = null;
|
|
384
|
+
}
|
|
385
|
+
break;
|
|
386
|
+
}
|
|
387
|
+
case "response.completed":
|
|
388
|
+
case "response.done": {
|
|
389
|
+
const u = event.response?.usage;
|
|
390
|
+
if (u) {
|
|
391
|
+
usage = {
|
|
392
|
+
input_tokens: u.input_tokens,
|
|
393
|
+
output_tokens: u.output_tokens,
|
|
394
|
+
reasoning_tokens: u.output_tokens_details?.reasoning_tokens ?? 0,
|
|
395
|
+
total_tokens: u.total_tokens,
|
|
396
|
+
};
|
|
397
|
+
}
|
|
398
|
+
break;
|
|
399
|
+
}
|
|
400
|
+
default:
|
|
401
|
+
// Unknown/unhandled events silently ignored
|
|
402
|
+
break;
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
return {
|
|
406
|
+
content,
|
|
407
|
+
toolCalls,
|
|
408
|
+
outputItems,
|
|
409
|
+
usage,
|
|
410
|
+
finalAnswerStreamed: answerParser.active,
|
|
411
|
+
};
|
|
412
|
+
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createTurnCoordinator = createTurnCoordinator;
|
|
4
|
+
const runtime_1 = require("../nerves/runtime");
|
|
5
|
+
function createTurnCoordinator() {
|
|
6
|
+
const turnLocks = new Map();
|
|
7
|
+
const activeTurns = new Set();
|
|
8
|
+
const followUpBuffers = new Map();
|
|
9
|
+
return {
|
|
10
|
+
async withTurnLock(key, fn) {
|
|
11
|
+
(0, runtime_1.emitNervesEvent)({
|
|
12
|
+
component: "engine",
|
|
13
|
+
event: "engine.turn_start",
|
|
14
|
+
message: "turn lock acquired",
|
|
15
|
+
meta: { key },
|
|
16
|
+
});
|
|
17
|
+
const prev = turnLocks.get(key) ?? Promise.resolve();
|
|
18
|
+
const run = prev.then(async () => {
|
|
19
|
+
activeTurns.add(key);
|
|
20
|
+
try {
|
|
21
|
+
return await fn();
|
|
22
|
+
}
|
|
23
|
+
finally {
|
|
24
|
+
activeTurns.delete(key);
|
|
25
|
+
}
|
|
26
|
+
});
|
|
27
|
+
const settled = run.then(() => undefined, () => undefined);
|
|
28
|
+
turnLocks.set(key, settled);
|
|
29
|
+
try {
|
|
30
|
+
return await run;
|
|
31
|
+
}
|
|
32
|
+
finally {
|
|
33
|
+
if (turnLocks.get(key) === settled)
|
|
34
|
+
turnLocks.delete(key);
|
|
35
|
+
}
|
|
36
|
+
},
|
|
37
|
+
tryBeginTurn(key) {
|
|
38
|
+
if (activeTurns.has(key))
|
|
39
|
+
return false;
|
|
40
|
+
activeTurns.add(key);
|
|
41
|
+
return true;
|
|
42
|
+
},
|
|
43
|
+
endTurn(key) {
|
|
44
|
+
activeTurns.delete(key);
|
|
45
|
+
},
|
|
46
|
+
isTurnActive(key) {
|
|
47
|
+
return activeTurns.has(key);
|
|
48
|
+
},
|
|
49
|
+
enqueueFollowUp(key, followUp) {
|
|
50
|
+
const current = followUpBuffers.get(key) ?? [];
|
|
51
|
+
current.push(followUp);
|
|
52
|
+
followUpBuffers.set(key, current);
|
|
53
|
+
},
|
|
54
|
+
drainFollowUps(key) {
|
|
55
|
+
const current = followUpBuffers.get(key);
|
|
56
|
+
if (!current || current.length === 0)
|
|
57
|
+
return [];
|
|
58
|
+
followUpBuffers.delete(key);
|
|
59
|
+
return [...current];
|
|
60
|
+
},
|
|
61
|
+
};
|
|
62
|
+
}
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.cosineSimilarity = cosineSimilarity;
|
|
37
|
+
exports.recallFactsForQuery = recallFactsForQuery;
|
|
38
|
+
exports.injectAssociativeRecall = injectAssociativeRecall;
|
|
39
|
+
const fs = __importStar(require("fs"));
|
|
40
|
+
const path = __importStar(require("path"));
|
|
41
|
+
const config_1 = require("../heart/config");
|
|
42
|
+
const identity_1 = require("../heart/identity");
|
|
43
|
+
const runtime_1 = require("../nerves/runtime");
|
|
44
|
+
const DEFAULT_EMBEDDING_MODEL = "text-embedding-3-small";
|
|
45
|
+
const DEFAULT_MIN_SCORE = 0.5;
|
|
46
|
+
const DEFAULT_TOP_K = 3;
|
|
47
|
+
class OpenAIEmbeddingProvider {
|
|
48
|
+
apiKey;
|
|
49
|
+
model;
|
|
50
|
+
constructor(apiKey, model = DEFAULT_EMBEDDING_MODEL) {
|
|
51
|
+
this.apiKey = apiKey;
|
|
52
|
+
this.model = model;
|
|
53
|
+
}
|
|
54
|
+
async embed(texts) {
|
|
55
|
+
const response = await fetch("https://api.openai.com/v1/embeddings", {
|
|
56
|
+
method: "POST",
|
|
57
|
+
headers: {
|
|
58
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
59
|
+
"Content-Type": "application/json",
|
|
60
|
+
},
|
|
61
|
+
body: JSON.stringify({
|
|
62
|
+
model: this.model,
|
|
63
|
+
input: texts,
|
|
64
|
+
}),
|
|
65
|
+
});
|
|
66
|
+
if (!response.ok) {
|
|
67
|
+
throw new Error(`embedding request failed: ${response.status} ${response.statusText}`);
|
|
68
|
+
}
|
|
69
|
+
const payload = (await response.json());
|
|
70
|
+
if (!payload.data || payload.data.length !== texts.length) {
|
|
71
|
+
throw new Error("embedding response missing expected vectors");
|
|
72
|
+
}
|
|
73
|
+
return payload.data.map((entry) => entry.embedding);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
function createDefaultProvider() {
|
|
77
|
+
const apiKey = (0, config_1.getOpenAIEmbeddingsApiKey)();
|
|
78
|
+
if (!apiKey) {
|
|
79
|
+
throw new Error("openaiEmbeddingsApiKey not configured");
|
|
80
|
+
}
|
|
81
|
+
return new OpenAIEmbeddingProvider(apiKey);
|
|
82
|
+
}
|
|
83
|
+
function readFacts(memoryRoot) {
|
|
84
|
+
const factsPath = path.join(memoryRoot, "facts.jsonl");
|
|
85
|
+
if (!fs.existsSync(factsPath))
|
|
86
|
+
return [];
|
|
87
|
+
const raw = fs.readFileSync(factsPath, "utf8").trim();
|
|
88
|
+
if (!raw)
|
|
89
|
+
return [];
|
|
90
|
+
return raw.split("\n").map((line) => JSON.parse(line));
|
|
91
|
+
}
|
|
92
|
+
function getLatestUserText(messages) {
|
|
93
|
+
for (let i = messages.length - 1; i >= 0; i--) {
|
|
94
|
+
const message = messages[i];
|
|
95
|
+
if (message.role !== "user")
|
|
96
|
+
continue;
|
|
97
|
+
if (typeof message.content !== "string")
|
|
98
|
+
continue;
|
|
99
|
+
const text = message.content.trim();
|
|
100
|
+
if (text.length > 0)
|
|
101
|
+
return text;
|
|
102
|
+
}
|
|
103
|
+
return "";
|
|
104
|
+
}
|
|
105
|
+
function cosineSimilarity(left, right) {
|
|
106
|
+
if (left.length === 0 || right.length === 0 || left.length !== right.length)
|
|
107
|
+
return 0;
|
|
108
|
+
let dot = 0;
|
|
109
|
+
let leftNorm = 0;
|
|
110
|
+
let rightNorm = 0;
|
|
111
|
+
for (let i = 0; i < left.length; i++) {
|
|
112
|
+
dot += left[i] * right[i];
|
|
113
|
+
leftNorm += left[i] * left[i];
|
|
114
|
+
rightNorm += right[i] * right[i];
|
|
115
|
+
}
|
|
116
|
+
if (leftNorm === 0 || rightNorm === 0)
|
|
117
|
+
return 0;
|
|
118
|
+
return dot / (Math.sqrt(leftNorm) * Math.sqrt(rightNorm));
|
|
119
|
+
}
|
|
120
|
+
async function recallFactsForQuery(query, facts, provider, options) {
|
|
121
|
+
const trimmed = query.trim();
|
|
122
|
+
if (!trimmed)
|
|
123
|
+
return [];
|
|
124
|
+
const minScore = options?.minScore ?? DEFAULT_MIN_SCORE;
|
|
125
|
+
const topK = options?.topK ?? DEFAULT_TOP_K;
|
|
126
|
+
const [queryEmbedding] = await provider.embed([trimmed]);
|
|
127
|
+
return facts
|
|
128
|
+
.map((fact) => ({
|
|
129
|
+
...fact,
|
|
130
|
+
score: cosineSimilarity(queryEmbedding, fact.embedding),
|
|
131
|
+
}))
|
|
132
|
+
.filter((fact) => fact.score >= minScore)
|
|
133
|
+
.sort((left, right) => right.score - left.score)
|
|
134
|
+
.slice(0, topK);
|
|
135
|
+
}
|
|
136
|
+
async function injectAssociativeRecall(messages, options) {
|
|
137
|
+
try {
|
|
138
|
+
if (messages[0]?.role !== "system" || typeof messages[0].content !== "string")
|
|
139
|
+
return;
|
|
140
|
+
const query = getLatestUserText(messages);
|
|
141
|
+
if (!query)
|
|
142
|
+
return;
|
|
143
|
+
const memoryRoot = options?.memoryRoot ?? path.join((0, identity_1.getAgentRoot)(), "psyche", "memory");
|
|
144
|
+
const facts = readFacts(memoryRoot);
|
|
145
|
+
if (facts.length === 0)
|
|
146
|
+
return;
|
|
147
|
+
const provider = options?.provider ?? createDefaultProvider();
|
|
148
|
+
const recalled = await recallFactsForQuery(query, facts, provider, options);
|
|
149
|
+
if (recalled.length === 0)
|
|
150
|
+
return;
|
|
151
|
+
const recallSection = recalled
|
|
152
|
+
.map((fact, index) => `${index + 1}. ${fact.text} [score=${fact.score.toFixed(3)} source=${fact.source}]`)
|
|
153
|
+
.join("\n");
|
|
154
|
+
messages[0] = {
|
|
155
|
+
role: "system",
|
|
156
|
+
content: `${messages[0].content}\n\n## recalled context\n${recallSection}`,
|
|
157
|
+
};
|
|
158
|
+
(0, runtime_1.emitNervesEvent)({
|
|
159
|
+
component: "mind",
|
|
160
|
+
event: "mind.associative_recall",
|
|
161
|
+
message: "associative recall injected",
|
|
162
|
+
meta: { count: recalled.length },
|
|
163
|
+
});
|
|
164
|
+
}
|
|
165
|
+
catch (error) {
|
|
166
|
+
(0, runtime_1.emitNervesEvent)({
|
|
167
|
+
level: "warn",
|
|
168
|
+
component: "mind",
|
|
169
|
+
event: "mind.associative_recall_error",
|
|
170
|
+
message: "associative recall failed",
|
|
171
|
+
meta: {
|
|
172
|
+
reason: error instanceof Error ? error.message : String(error),
|
|
173
|
+
},
|
|
174
|
+
});
|
|
175
|
+
}
|
|
176
|
+
}
|