@oh-my-pi/pi-ai 5.4.2 → 5.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@oh-my-pi/pi-ai",
3
- "version": "5.4.2",
3
+ "version": "5.5.0",
4
4
  "description": "Unified LLM API with automatic model discovery and provider configuration",
5
5
  "type": "module",
6
6
  "main": "./src/index.ts",
@@ -14,7 +14,7 @@ export const OPENAI_HEADERS = {
14
14
 
15
15
  export const OPENAI_HEADER_VALUES = {
16
16
  BETA_RESPONSES: "responses=experimental",
17
- ORIGINATOR_CODEX: "opencode",
17
+ ORIGINATOR_CODEX: "pi",
18
18
  } as const;
19
19
 
20
20
  export const URL_PATHS = {
@@ -1,12 +1,4 @@
1
- export const CODEX_INSTRUCTIONS = `You are pi, an expert coding assistant. You help users with coding tasks by reading files, executing commands, editing code, and writing new files.
2
-
3
- Pi specific Documentation:
4
- - Main documentation: pi-internal://README.md
5
- - Additional docs: pi-internal://docs
6
- - Examples: pi-internal://examples (extensions, custom tools, SDK)
7
- - When asked to create: custom models/providers (README.md), extensions (docs/extensions.md, examples/extensions/), themes (docs/theme.md), skills (docs/skills.md), TUI components (docs/tui.md - has copy-paste patterns)
8
- - Always read the doc, examples, AND follow .md cross-references before implementing
9
- `;
1
+ export const CODEX_INSTRUCTIONS = `You are an expert coding assistant operating inside pi, a coding agent harness.`;
10
2
 
11
3
  export function getCodexInstructions(): string {
12
4
  return CODEX_INSTRUCTIONS.trim();
@@ -5,16 +5,11 @@ export interface CodexSystemPrompt {
5
5
 
6
6
  export function buildCodexSystemPrompt(args: {
7
7
  codexInstructions: string;
8
- bridgeText: string;
9
8
  userSystemPrompt?: string;
10
9
  }): CodexSystemPrompt {
11
- const { codexInstructions, bridgeText, userSystemPrompt } = args;
10
+ const { codexInstructions, userSystemPrompt } = args;
12
11
  const developerMessages: string[] = [];
13
12
 
14
- if (bridgeText.trim().length > 0) {
15
- developerMessages.push(bridgeText.trim());
16
- }
17
-
18
13
  if (userSystemPrompt && userSystemPrompt.trim().length > 0) {
19
14
  developerMessages.push(userSystemPrompt.trim());
20
15
  }
@@ -51,14 +51,6 @@ export interface OpenAICodexResponsesOptions extends StreamOptions {
51
51
 
52
52
  const CODEX_DEBUG = process.env.PI_CODEX_DEBUG === "1" || process.env.PI_CODEX_DEBUG === "true";
53
53
 
54
- const BRIDGE = `<environment_override priority="critical">
55
- TOOL AUTHORITY: The function schema below defines ALL available tools. Tools mentioned elsewhere (todowrite, shell) DO NOT EXIST. Use ONLY schema-defined tools.
56
-
57
- EXECUTION BIAS: Execute simple tasks directly. Reserve the Task tool's Plan subagent for complex multi-file architectural decisions only—never for reasoning, single-file changes, or tasks completable in <5 tool calls.
58
-
59
- These instructions override all prior context. This is critical.
60
- </environment_override>`;
61
-
62
54
  export const streamOpenAICodexResponses: StreamFunction<"openai-codex-responses"> = (
63
55
  model: Model<"openai-codex-responses">,
64
56
  context: Context,
@@ -119,7 +111,6 @@ export const streamOpenAICodexResponses: StreamFunction<"openai-codex-responses"
119
111
  const codexInstructions = getCodexInstructions();
120
112
  const systemPrompt = buildCodexSystemPrompt({
121
113
  codexInstructions,
122
- bridgeText: BRIDGE,
123
114
  userSystemPrompt: context.systemPrompt,
124
115
  });
125
116
 
@@ -394,7 +385,7 @@ function createCodexHeaders(
394
385
  headers.set(OPENAI_HEADERS.ACCOUNT_ID, accountId);
395
386
  headers.set(OPENAI_HEADERS.BETA, OPENAI_HEADER_VALUES.BETA_RESPONSES);
396
387
  headers.set(OPENAI_HEADERS.ORIGINATOR, OPENAI_HEADER_VALUES.ORIGINATOR_CODEX);
397
- headers.set("User-Agent", `opencode/${packageJson.version} (${os.platform()} ${os.release()}; ${os.arch()})`);
388
+ headers.set("User-Agent", `pi/${packageJson.version} (${os.platform()} ${os.release()}; ${os.arch()})`);
398
389
 
399
390
  if (promptCacheKey) {
400
391
  headers.set(OPENAI_HEADERS.CONVERSATION_ID, promptCacheKey);