@oh-my-pi/pi-agent-core 3.20.0 → 3.21.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/package.json +3 -3
- package/src/agent-loop.ts +1 -1
- package/src/agent.ts +26 -8
- package/src/proxy.ts +2 -2
- package/src/types.ts +7 -1
package/README.md
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# @oh-my-pi/pi-agent
|
|
2
2
|
|
|
3
|
-
Stateful agent with tool execution and event streaming. Built on `@
|
|
3
|
+
Stateful agent with tool execution and event streaming. Built on `@mariozechner/pi-ai`.
|
|
4
4
|
|
|
5
5
|
## Installation
|
|
6
6
|
|
|
@@ -12,7 +12,7 @@ npm install @oh-my-pi/pi-agent
|
|
|
12
12
|
|
|
13
13
|
```typescript
|
|
14
14
|
import { Agent } from "@oh-my-pi/pi-agent";
|
|
15
|
-
import { getModel } from "@
|
|
15
|
+
import { getModel } from "@mariozechner/pi-ai";
|
|
16
16
|
|
|
17
17
|
const agent = new Agent({
|
|
18
18
|
initialState: {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@oh-my-pi/pi-agent-core",
|
|
3
|
-
"version": "3.
|
|
3
|
+
"version": "3.21.0",
|
|
4
4
|
"description": "General-purpose agent with transport abstraction, state management, and attachment support",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./src/index.ts",
|
|
@@ -13,8 +13,8 @@
|
|
|
13
13
|
"test": "vitest --run"
|
|
14
14
|
},
|
|
15
15
|
"dependencies": {
|
|
16
|
-
"@
|
|
17
|
-
"@oh-my-pi/pi-tui": "3.
|
|
16
|
+
"@mariozechner/pi-ai": "^0.37.4",
|
|
17
|
+
"@oh-my-pi/pi-tui": "3.21.0"
|
|
18
18
|
},
|
|
19
19
|
"keywords": [
|
|
20
20
|
"ai",
|
package/src/agent-loop.ts
CHANGED
package/src/agent.ts
CHANGED
|
@@ -8,10 +8,9 @@ import {
|
|
|
8
8
|
type ImageContent,
|
|
9
9
|
type Message,
|
|
10
10
|
type Model,
|
|
11
|
-
type ReasoningEffort,
|
|
12
11
|
streamSimple,
|
|
13
12
|
type TextContent,
|
|
14
|
-
} from "@
|
|
13
|
+
} from "@mariozechner/pi-ai";
|
|
15
14
|
import { agentLoop, agentLoopContinue } from "./agent-loop";
|
|
16
15
|
import type {
|
|
17
16
|
AgentContext,
|
|
@@ -69,6 +68,12 @@ export interface AgentOptions {
|
|
|
69
68
|
*/
|
|
70
69
|
streamFn?: StreamFn;
|
|
71
70
|
|
|
71
|
+
/**
|
|
72
|
+
* Optional session identifier forwarded to LLM providers.
|
|
73
|
+
* Used by providers that support session-based caching (e.g., OpenAI Codex).
|
|
74
|
+
*/
|
|
75
|
+
sessionId?: string;
|
|
76
|
+
|
|
72
77
|
/**
|
|
73
78
|
* Resolves an API key dynamically for each LLM call.
|
|
74
79
|
* Useful for expiring tokens (e.g., GitHub Copilot OAuth).
|
|
@@ -105,6 +110,7 @@ export class Agent {
|
|
|
105
110
|
private followUpMode: "all" | "one-at-a-time";
|
|
106
111
|
private interruptMode: "immediate" | "wait";
|
|
107
112
|
public streamFn: StreamFn;
|
|
113
|
+
private _sessionId?: string;
|
|
108
114
|
public getApiKey?: (provider: string) => Promise<string | undefined> | string | undefined;
|
|
109
115
|
private getToolContext?: () => AgentToolContext | undefined;
|
|
110
116
|
private runningPrompt?: Promise<void>;
|
|
@@ -118,10 +124,26 @@ export class Agent {
|
|
|
118
124
|
this.followUpMode = opts.followUpMode || "one-at-a-time";
|
|
119
125
|
this.interruptMode = opts.interruptMode || "immediate";
|
|
120
126
|
this.streamFn = opts.streamFn || streamSimple;
|
|
127
|
+
this._sessionId = opts.sessionId;
|
|
121
128
|
this.getApiKey = opts.getApiKey;
|
|
122
129
|
this.getToolContext = opts.getToolContext;
|
|
123
130
|
}
|
|
124
131
|
|
|
132
|
+
/**
|
|
133
|
+
* Get the current session ID used for provider caching.
|
|
134
|
+
*/
|
|
135
|
+
get sessionId(): string | undefined {
|
|
136
|
+
return this._sessionId;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
/**
|
|
140
|
+
* Set the session ID for provider caching.
|
|
141
|
+
* Call this when switching sessions (new session, branch, resume).
|
|
142
|
+
*/
|
|
143
|
+
set sessionId(value: string | undefined) {
|
|
144
|
+
this._sessionId = value;
|
|
145
|
+
}
|
|
146
|
+
|
|
125
147
|
get state(): AgentState {
|
|
126
148
|
return this._state;
|
|
127
149
|
}
|
|
@@ -314,12 +336,7 @@ export class Agent {
|
|
|
314
336
|
this._state.streamMessage = null;
|
|
315
337
|
this._state.error = undefined;
|
|
316
338
|
|
|
317
|
-
const reasoning
|
|
318
|
-
this._state.thinkingLevel === "off"
|
|
319
|
-
? undefined
|
|
320
|
-
: this._state.thinkingLevel === "minimal"
|
|
321
|
-
? "low"
|
|
322
|
-
: (this._state.thinkingLevel as ReasoningEffort);
|
|
339
|
+
const reasoning = this._state.thinkingLevel === "off" ? undefined : this._state.thinkingLevel;
|
|
323
340
|
|
|
324
341
|
const context: AgentContext = {
|
|
325
342
|
systemPrompt: this._state.systemPrompt,
|
|
@@ -331,6 +348,7 @@ export class Agent {
|
|
|
331
348
|
model,
|
|
332
349
|
reasoning,
|
|
333
350
|
interruptMode: this.interruptMode,
|
|
351
|
+
sessionId: this._sessionId,
|
|
334
352
|
convertToLlm: this.convertToLlm,
|
|
335
353
|
transformContext: this.transformContext,
|
|
336
354
|
getApiKey: this.getApiKey,
|
package/src/proxy.ts
CHANGED
|
@@ -12,8 +12,8 @@ import {
|
|
|
12
12
|
type SimpleStreamOptions,
|
|
13
13
|
type StopReason,
|
|
14
14
|
type ToolCall,
|
|
15
|
-
} from "@
|
|
16
|
-
import { parseStreamingJson } from "@
|
|
15
|
+
} from "@mariozechner/pi-ai";
|
|
16
|
+
import { parseStreamingJson } from "@mariozechner/pi-ai/dist/utils/json-parse.js";
|
|
17
17
|
|
|
18
18
|
// Create stream class matching ProxyMessageEventStream
|
|
19
19
|
class ProxyMessageEventStream extends EventStream<AssistantMessageEvent, AssistantMessage> {
|
package/src/types.ts
CHANGED
|
@@ -8,7 +8,7 @@ import type {
|
|
|
8
8
|
TextContent,
|
|
9
9
|
Tool,
|
|
10
10
|
ToolResultMessage,
|
|
11
|
-
} from "@
|
|
11
|
+
} from "@mariozechner/pi-ai";
|
|
12
12
|
import type { Static, TSchema } from "@sinclair/typebox";
|
|
13
13
|
|
|
14
14
|
/** Stream function - can return sync or Promise for async config lookup */
|
|
@@ -29,6 +29,12 @@ export interface AgentLoopConfig extends SimpleStreamOptions {
|
|
|
29
29
|
*/
|
|
30
30
|
interruptMode?: "immediate" | "wait";
|
|
31
31
|
|
|
32
|
+
/**
|
|
33
|
+
* Optional session identifier forwarded to LLM providers.
|
|
34
|
+
* Used by providers that support session-based caching (e.g., OpenAI Codex).
|
|
35
|
+
*/
|
|
36
|
+
sessionId?: string;
|
|
37
|
+
|
|
32
38
|
/**
|
|
33
39
|
* Converts AgentMessage[] to LLM-compatible Message[] before each LLM call.
|
|
34
40
|
*
|