@oh-my-pi/pi-agent-core 8.1.0 → 8.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +9 -10
- package/src/agent-loop.ts +7 -8
- package/src/agent.ts +5 -6
- package/src/proxy.ts +2 -3
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@oh-my-pi/pi-agent-core",
|
|
3
|
-
"version": "8.1
|
|
3
|
+
"version": "8.2.1",
|
|
4
4
|
"description": "General-purpose agent with transport abstraction, state management, and attachment support",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./src/index.ts",
|
|
@@ -11,8 +11,8 @@
|
|
|
11
11
|
"import": "./src/index.ts"
|
|
12
12
|
},
|
|
13
13
|
"./*": {
|
|
14
|
-
"types": "./src
|
|
15
|
-
"import": "./src
|
|
14
|
+
"types": "./src/*.ts",
|
|
15
|
+
"import": "./src/*.ts"
|
|
16
16
|
}
|
|
17
17
|
},
|
|
18
18
|
"files": [
|
|
@@ -20,13 +20,13 @@
|
|
|
20
20
|
"README.md"
|
|
21
21
|
],
|
|
22
22
|
"scripts": {
|
|
23
|
-
"
|
|
24
|
-
"
|
|
23
|
+
"check": "tsgo -p tsconfig.json",
|
|
24
|
+
"test": "bun test"
|
|
25
25
|
},
|
|
26
26
|
"dependencies": {
|
|
27
|
-
"@oh-my-pi/pi-ai": "
|
|
28
|
-
"@oh-my-pi/pi-tui": "
|
|
29
|
-
"@oh-my-pi/pi-utils": "
|
|
27
|
+
"@oh-my-pi/pi-ai": "8.2.1",
|
|
28
|
+
"@oh-my-pi/pi-tui": "8.2.1",
|
|
29
|
+
"@oh-my-pi/pi-utils": "8.2.1"
|
|
30
30
|
},
|
|
31
31
|
"keywords": [
|
|
32
32
|
"ai",
|
|
@@ -47,7 +47,6 @@
|
|
|
47
47
|
},
|
|
48
48
|
"devDependencies": {
|
|
49
49
|
"@sinclair/typebox": "^0.34.46",
|
|
50
|
-
"@types/node": "^
|
|
51
|
-
"vitest": "^3.2.4"
|
|
50
|
+
"@types/node": "^25.0.10"
|
|
52
51
|
}
|
|
53
52
|
}
|
package/src/agent-loop.ts
CHANGED
|
@@ -2,7 +2,6 @@
|
|
|
2
2
|
* Agent loop that works with AgentMessage throughout.
|
|
3
3
|
* Transforms to Message[] only at the LLM call boundary.
|
|
4
4
|
*/
|
|
5
|
-
|
|
6
5
|
import {
|
|
7
6
|
type AssistantMessage,
|
|
8
7
|
type Context,
|
|
@@ -107,12 +106,12 @@ function normalizeMessagesForProvider(
|
|
|
107
106
|
}
|
|
108
107
|
|
|
109
108
|
let changed = false;
|
|
110
|
-
const normalized = messages.map(
|
|
109
|
+
const normalized = messages.map(message => {
|
|
111
110
|
if (message.role !== "assistant" || !Array.isArray(message.content)) {
|
|
112
111
|
return message;
|
|
113
112
|
}
|
|
114
113
|
|
|
115
|
-
const filtered = message.content.filter(
|
|
114
|
+
const filtered = message.content.filter(block => block.type !== "thinking");
|
|
116
115
|
if (filtered.length === message.content.length) {
|
|
117
116
|
return message;
|
|
118
117
|
}
|
|
@@ -186,7 +185,7 @@ async function runLoop(
|
|
|
186
185
|
}
|
|
187
186
|
|
|
188
187
|
// Check for tool calls
|
|
189
|
-
const toolCalls = message.content.filter(
|
|
188
|
+
const toolCalls = message.content.filter(c => c.type === "toolCall");
|
|
190
189
|
hasMoreToolCalls = toolCalls.length > 0;
|
|
191
190
|
|
|
192
191
|
const toolResults: ToolResultMessage[] = [];
|
|
@@ -373,16 +372,16 @@ async function executeToolCalls(
|
|
|
373
372
|
getToolContext?: AgentLoopConfig["getToolContext"],
|
|
374
373
|
interruptMode: AgentLoopConfig["interruptMode"] = "immediate",
|
|
375
374
|
): Promise<{ toolResults: ToolResultMessage[]; steeringMessages?: AgentMessage[] }> {
|
|
376
|
-
const toolCalls = assistantMessage.content.filter(
|
|
375
|
+
const toolCalls = assistantMessage.content.filter(c => c.type === "toolCall");
|
|
377
376
|
const results: ToolResultMessage[] = [];
|
|
378
377
|
let steeringMessages: AgentMessage[] | undefined;
|
|
379
378
|
const shouldInterruptImmediately = interruptMode !== "wait";
|
|
380
|
-
const toolCallInfos = toolCalls.map(
|
|
379
|
+
const toolCallInfos = toolCalls.map(call => ({ id: call.id, name: call.name }));
|
|
381
380
|
const batchId = `${assistantMessage.timestamp ?? Date.now()}_${toolCalls[0]?.id ?? "batch"}`;
|
|
382
381
|
|
|
383
382
|
for (let index = 0; index < toolCalls.length; index++) {
|
|
384
383
|
const toolCall = toolCalls[index];
|
|
385
|
-
const tool = tools?.find(
|
|
384
|
+
const tool = tools?.find(t => t.name === toolCall.name);
|
|
386
385
|
|
|
387
386
|
stream.push({
|
|
388
387
|
type: "tool_execution_start",
|
|
@@ -411,7 +410,7 @@ async function executeToolCalls(
|
|
|
411
410
|
toolCall.id,
|
|
412
411
|
validatedArgs,
|
|
413
412
|
tool.nonAbortable ? undefined : signal,
|
|
414
|
-
|
|
413
|
+
partialResult => {
|
|
415
414
|
stream.push({
|
|
416
415
|
type: "tool_execution_update",
|
|
417
416
|
toolCallId: toolCall.id,
|
package/src/agent.ts
CHANGED
|
@@ -2,7 +2,6 @@
|
|
|
2
2
|
* Agent class that uses the agent-loop directly.
|
|
3
3
|
* No transport abstraction - calls streamSimple via the loop.
|
|
4
4
|
*/
|
|
5
|
-
|
|
6
5
|
import {
|
|
7
6
|
type AssistantMessage,
|
|
8
7
|
type CursorExecHandlers,
|
|
@@ -34,7 +33,7 @@ import type {
|
|
|
34
33
|
* Default convertToLlm: Keep only LLM-compatible messages, convert attachments.
|
|
35
34
|
*/
|
|
36
35
|
function defaultConvertToLlm(messages: AgentMessage[]): Message[] {
|
|
37
|
-
return messages.filter(
|
|
36
|
+
return messages.filter(m => m.role === "user" || m.role === "assistant" || m.role === "toolResult");
|
|
38
37
|
}
|
|
39
38
|
|
|
40
39
|
export interface AgentOptions {
|
|
@@ -420,7 +419,7 @@ export class Agent {
|
|
|
420
419
|
const model = this._state.model;
|
|
421
420
|
if (!model) throw new Error("No model configured");
|
|
422
421
|
|
|
423
|
-
this.runningPrompt = new Promise<void>(
|
|
422
|
+
this.runningPrompt = new Promise<void>(resolve => {
|
|
424
423
|
this.resolveRunningPrompt = resolve;
|
|
425
424
|
});
|
|
426
425
|
|
|
@@ -569,7 +568,7 @@ export class Agent {
|
|
|
569
568
|
// Handle any remaining partial message
|
|
570
569
|
if (partial && partial.role === "assistant" && partial.content.length > 0) {
|
|
571
570
|
const onlyEmpty = !partial.content.some(
|
|
572
|
-
|
|
571
|
+
c =>
|
|
573
572
|
(c.type === "thinking" && c.thinking.trim().length > 0) ||
|
|
574
573
|
(c.type === "text" && c.text.trim().length > 0) ||
|
|
575
574
|
(c.type === "toolCall" && c.name.trim().length > 0),
|
|
@@ -655,7 +654,7 @@ export class Agent {
|
|
|
655
654
|
}
|
|
656
655
|
|
|
657
656
|
// Find the split point: minimum text length at first tool call
|
|
658
|
-
const splitPoint = Math.min(...buffer.map(
|
|
657
|
+
const splitPoint = Math.min(...buffer.map(r => r.textLengthAtCall));
|
|
659
658
|
|
|
660
659
|
// Extract text content from assistant message
|
|
661
660
|
const content = assistantMessage.content;
|
|
@@ -687,7 +686,7 @@ export class Agent {
|
|
|
687
686
|
const continuationText = fullText.slice(splitPoint);
|
|
688
687
|
|
|
689
688
|
// Create preamble message (text before tools)
|
|
690
|
-
const preambleContent = content.map(
|
|
689
|
+
const preambleContent = content.map(block => {
|
|
691
690
|
if (block.type === "text") {
|
|
692
691
|
return { ...block, text: preambleText };
|
|
693
692
|
}
|
package/src/proxy.ts
CHANGED
|
@@ -2,7 +2,6 @@
|
|
|
2
2
|
* Proxy stream function for apps that route LLM calls through a server.
|
|
3
3
|
* The server manages auth and proxies requests to LLM providers.
|
|
4
4
|
*/
|
|
5
|
-
|
|
6
5
|
import {
|
|
7
6
|
type AssistantMessage,
|
|
8
7
|
type AssistantMessageEvent,
|
|
@@ -20,8 +19,8 @@ import { readSseEvents } from "@oh-my-pi/pi-utils";
|
|
|
20
19
|
class ProxyMessageEventStream extends EventStream<AssistantMessageEvent, AssistantMessage> {
|
|
21
20
|
constructor() {
|
|
22
21
|
super(
|
|
23
|
-
|
|
24
|
-
|
|
22
|
+
event => event.type === "done" || event.type === "error",
|
|
23
|
+
event => {
|
|
25
24
|
if (event.type === "done") return event.message;
|
|
26
25
|
if (event.type === "error") return event.error;
|
|
27
26
|
throw new Error("Unexpected event type");
|