0agent 1.0.73 → 1.0.74
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/daemon.mjs +21 -7
- package/package.json +1 -1
package/dist/daemon.mjs
CHANGED
|
@@ -6339,6 +6339,23 @@ var SessionManager = class {
|
|
|
6339
6339
|
const activeLLM = this.getFreshLLM();
|
|
6340
6340
|
if (activeLLM?.isConfigured) {
|
|
6341
6341
|
const userEntityId = enrichedReq.entity_id ?? this.identity?.entity_node_id;
|
|
6342
|
+
const isConversational = /^(hey|hi|hello|sup|yo|what'?s up|how are you|thanks|ok|cool|bye|good\s+(morning|evening|afternoon)|lol|nice)[!?.\s,]*$/i.test(enrichedReq.task.trim());
|
|
6343
|
+
if (isConversational) {
|
|
6344
|
+
const resp = await activeLLM.complete(
|
|
6345
|
+
[{ role: "user", content: enrichedReq.task }],
|
|
6346
|
+
"You are a helpful assistant."
|
|
6347
|
+
);
|
|
6348
|
+
this.emit({ type: "session.token", session_id: sessionId, token: resp.content });
|
|
6349
|
+
this.addStep(sessionId, `Done (${resp.tokens_used} tokens, 1 LLM turns)`);
|
|
6350
|
+
this.completeSession(sessionId, {
|
|
6351
|
+
output: resp.content,
|
|
6352
|
+
files_written: [],
|
|
6353
|
+
commands_run: [],
|
|
6354
|
+
tokens_used: resp.tokens_used,
|
|
6355
|
+
model: resp.model
|
|
6356
|
+
});
|
|
6357
|
+
return this.sessions.get(sessionId);
|
|
6358
|
+
}
|
|
6342
6359
|
const executor = new AgentExecutor(
|
|
6343
6360
|
activeLLM,
|
|
6344
6361
|
{ cwd: this.cwd, agent_root: this.agentRoot, graph: this.graph, onMemoryWrite: this.onMemoryWritten, entityNodeId: userEntityId },
|
|
@@ -6436,8 +6453,7 @@ Current task:`;
|
|
|
6436
6453
|
this.addStep(sessionId, `Commands run: ${agentResult.commands_run.length}`);
|
|
6437
6454
|
}
|
|
6438
6455
|
this.addStep(sessionId, `Done (${agentResult.tokens_used} tokens, ${agentResult.iterations} LLM turns)`);
|
|
6439
|
-
|
|
6440
|
-
if (!isConversational && this.graph) {
|
|
6456
|
+
if (this.graph) {
|
|
6441
6457
|
try {
|
|
6442
6458
|
const nodeId = `memory:session_${sessionId.slice(0, 8)}`;
|
|
6443
6459
|
const label = enrichedReq.task.slice(0, 80);
|
|
@@ -6469,11 +6485,9 @@ Current task:`;
|
|
|
6469
6485
|
console.warn("[0agent] Graph: baseline write failed:", err instanceof Error ? err.message : err);
|
|
6470
6486
|
}
|
|
6471
6487
|
}
|
|
6472
|
-
|
|
6473
|
-
|
|
6474
|
-
|
|
6475
|
-
});
|
|
6476
|
-
}
|
|
6488
|
+
this._extractAndPersistFacts(enrichedReq.task, agentResult.output, activeLLM, userEntityId).catch((err) => {
|
|
6489
|
+
console.warn("[0agent] Memory extraction outer error:", err instanceof Error ? err.message : err);
|
|
6490
|
+
});
|
|
6477
6491
|
this.completeSession(sessionId, {
|
|
6478
6492
|
output: agentResult.output,
|
|
6479
6493
|
files_written: agentResult.files_written,
|