@benvargas/pi-openai-fast 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +55 -0
- package/extensions/index.ts +199 -0
- package/package.json +42 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Ben Vargas
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
# @benvargas/pi-openai-fast
|
|
2
|
+
|
|
3
|
+
Session-scoped `/fast` toggle for pi that enables OpenAI priority service tier on supported GPT-5.4 models.
|
|
4
|
+
|
|
5
|
+
This extension does not change the model, thinking level, tools, or prompts. It only adds `service_tier=priority` to provider requests when fast mode is active and the current model supports it.
|
|
6
|
+
|
|
7
|
+
Requires pi `0.57.0` or newer.
|
|
8
|
+
|
|
9
|
+
## Install
|
|
10
|
+
|
|
11
|
+
```bash
|
|
12
|
+
pi install npm:@benvargas/pi-openai-fast
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
Or try without installing:
|
|
16
|
+
|
|
17
|
+
```bash
|
|
18
|
+
pi -e npm:@benvargas/pi-openai-fast
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
## Usage
|
|
22
|
+
|
|
23
|
+
- `/fast` toggles fast mode on or off.
|
|
24
|
+
- `/fast on` explicitly enables fast mode.
|
|
25
|
+
- `/fast off` explicitly disables fast mode.
|
|
26
|
+
- `/fast status` reports the current fast-mode state.
|
|
27
|
+
- `--fast` starts the session with fast mode enabled.
|
|
28
|
+
|
|
29
|
+
Example:
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
pi -e npm:@benvargas/pi-openai-fast --fast
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
## Supported Models
|
|
36
|
+
|
|
37
|
+
- `openai/gpt-5.4`
|
|
38
|
+
- `openai-codex/gpt-5.4`
|
|
39
|
+
|
|
40
|
+
If fast mode is enabled on an unsupported model, the setting stays on but requests are left unchanged until you switch back to a supported model.
|
|
41
|
+
|
|
42
|
+
## Notes
|
|
43
|
+
|
|
44
|
+
- Fast mode is stored as session state, so it persists with the session branch.
|
|
45
|
+
- On supported models, fast mode maps to OpenAI `service_tier=priority`.
|
|
46
|
+
|
|
47
|
+
## Uninstall
|
|
48
|
+
|
|
49
|
+
```bash
|
|
50
|
+
pi remove npm:@benvargas/pi-openai-fast
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
## License
|
|
54
|
+
|
|
55
|
+
MIT
|
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
|
|
2
|
+
|
|
3
|
+
const FAST_COMMAND = "fast";
|
|
4
|
+
const FAST_FLAG = "fast";
|
|
5
|
+
const FAST_STATE_ENTRY = "pi-openai-fast.state";
|
|
6
|
+
const FAST_COMMAND_ARGS = ["on", "off", "status"] as const;
|
|
7
|
+
const FAST_SERVICE_TIER = "priority";
|
|
8
|
+
const FAST_SUPPORTED_MODELS = [
|
|
9
|
+
{ provider: "openai", id: "gpt-5.4" },
|
|
10
|
+
{ provider: "openai-codex", id: "gpt-5.4" },
|
|
11
|
+
] as const;
|
|
12
|
+
|
|
13
|
+
interface FastModeState {
|
|
14
|
+
active: boolean;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
type FastPayload = {
|
|
18
|
+
service_tier?: string;
|
|
19
|
+
[key: string]: unknown;
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
function isRecord(value: unknown): value is Record<string, unknown> {
|
|
23
|
+
return typeof value === "object" && value !== null && !Array.isArray(value);
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
function parseFastModeState(value: unknown): FastModeState | undefined {
|
|
27
|
+
if (!isRecord(value) || typeof value.active !== "boolean") {
|
|
28
|
+
return undefined;
|
|
29
|
+
}
|
|
30
|
+
return { active: value.active };
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
function getSavedFastModeState(ctx: ExtensionContext): FastModeState | undefined {
|
|
34
|
+
const entries = ctx.sessionManager.getBranch();
|
|
35
|
+
for (let i = entries.length - 1; i >= 0; i--) {
|
|
36
|
+
const entry = entries[i];
|
|
37
|
+
if (entry.type === "custom" && entry.customType === FAST_STATE_ENTRY) {
|
|
38
|
+
return parseFastModeState(entry.data);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
return undefined;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
function getCurrentModelKey(model: ExtensionContext["model"]): string | undefined {
|
|
45
|
+
if (!model) {
|
|
46
|
+
return undefined;
|
|
47
|
+
}
|
|
48
|
+
return `${model.provider}/${model.id}`;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
function isFastSupportedModel(model: ExtensionContext["model"]): boolean {
|
|
52
|
+
if (!model) {
|
|
53
|
+
return false;
|
|
54
|
+
}
|
|
55
|
+
return FAST_SUPPORTED_MODELS.some((supported) => supported.provider === model.provider && supported.id === model.id);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
function describeSupportedModels(): string {
|
|
59
|
+
return FAST_SUPPORTED_MODELS.map((supported) => `${supported.provider}/${supported.id}`).join(", ");
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
function describeCurrentState(ctx: ExtensionContext, active: boolean): string {
|
|
63
|
+
const model = getCurrentModelKey(ctx.model) ?? "none";
|
|
64
|
+
if (!active) {
|
|
65
|
+
return `Fast mode is off. Current model: ${model}.`;
|
|
66
|
+
}
|
|
67
|
+
if (!ctx.model) {
|
|
68
|
+
return `Fast mode is on. No model is selected. Supported models: ${describeSupportedModels()}.`;
|
|
69
|
+
}
|
|
70
|
+
if (isFastSupportedModel(ctx.model)) {
|
|
71
|
+
return `Fast mode is on for ${model}.`;
|
|
72
|
+
}
|
|
73
|
+
return `Fast mode is on, but ${model} does not support it. Supported models: ${describeSupportedModels()}.`;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
function applyFastServiceTier(payload: unknown): unknown {
|
|
77
|
+
if (!isRecord(payload)) {
|
|
78
|
+
return payload;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
const nextPayload: FastPayload = { ...payload };
|
|
82
|
+
nextPayload.service_tier = FAST_SERVICE_TIER;
|
|
83
|
+
return nextPayload;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
export default function piOpenAIFast(pi: ExtensionAPI): void {
|
|
87
|
+
let state: FastModeState = { active: false };
|
|
88
|
+
|
|
89
|
+
function persistState(): void {
|
|
90
|
+
pi.appendEntry(FAST_STATE_ENTRY, state);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
async function enableFastMode(ctx: ExtensionContext, options?: { notify?: boolean }): Promise<void> {
|
|
94
|
+
if (state.active) {
|
|
95
|
+
if (options?.notify !== false) {
|
|
96
|
+
ctx.ui.notify("Fast mode is already on.", "info");
|
|
97
|
+
}
|
|
98
|
+
return;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
state = { active: true };
|
|
102
|
+
persistState();
|
|
103
|
+
|
|
104
|
+
if (options?.notify !== false) {
|
|
105
|
+
ctx.ui.notify(describeCurrentState(ctx, state.active), "info");
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
async function disableFastMode(ctx: ExtensionContext, options?: { notify?: boolean }): Promise<void> {
|
|
110
|
+
if (!state.active) {
|
|
111
|
+
if (options?.notify !== false) {
|
|
112
|
+
ctx.ui.notify("Fast mode is already off.", "info");
|
|
113
|
+
}
|
|
114
|
+
return;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
state = { active: false };
|
|
118
|
+
persistState();
|
|
119
|
+
|
|
120
|
+
if (options?.notify !== false) {
|
|
121
|
+
ctx.ui.notify("Fast mode disabled.", "info");
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
async function toggleFastMode(ctx: ExtensionContext): Promise<void> {
|
|
126
|
+
if (state.active) {
|
|
127
|
+
await disableFastMode(ctx);
|
|
128
|
+
return;
|
|
129
|
+
}
|
|
130
|
+
await enableFastMode(ctx);
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
pi.registerFlag(FAST_FLAG, {
|
|
134
|
+
description: "Start with OpenAI fast mode enabled",
|
|
135
|
+
type: "boolean",
|
|
136
|
+
default: false,
|
|
137
|
+
});
|
|
138
|
+
|
|
139
|
+
pi.registerCommand(FAST_COMMAND, {
|
|
140
|
+
description: "Toggle fast mode (priority service tier for supported OpenAI GPT-5.4 models)",
|
|
141
|
+
getArgumentCompletions: (prefix) => {
|
|
142
|
+
const items = FAST_COMMAND_ARGS.filter((value) => value.startsWith(prefix)).map((value) => ({
|
|
143
|
+
value,
|
|
144
|
+
label: value,
|
|
145
|
+
}));
|
|
146
|
+
return items.length > 0 ? items : null;
|
|
147
|
+
},
|
|
148
|
+
handler: async (args, ctx) => {
|
|
149
|
+
const command = args.trim().toLowerCase();
|
|
150
|
+
|
|
151
|
+
if (command.length === 0) {
|
|
152
|
+
await toggleFastMode(ctx);
|
|
153
|
+
return;
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
switch (command) {
|
|
157
|
+
case "on":
|
|
158
|
+
await enableFastMode(ctx);
|
|
159
|
+
return;
|
|
160
|
+
case "off":
|
|
161
|
+
await disableFastMode(ctx);
|
|
162
|
+
return;
|
|
163
|
+
case "status":
|
|
164
|
+
ctx.ui.notify(describeCurrentState(ctx, state.active), "info");
|
|
165
|
+
return;
|
|
166
|
+
default:
|
|
167
|
+
ctx.ui.notify("Usage: /fast [on|off|status]", "error");
|
|
168
|
+
}
|
|
169
|
+
},
|
|
170
|
+
});
|
|
171
|
+
|
|
172
|
+
pi.on("before_provider_request", (event, ctx) => {
|
|
173
|
+
if (!state.active || !isFastSupportedModel(ctx.model)) {
|
|
174
|
+
return;
|
|
175
|
+
}
|
|
176
|
+
return applyFastServiceTier(event.payload);
|
|
177
|
+
});
|
|
178
|
+
|
|
179
|
+
pi.on("session_start", async (_event, ctx) => {
|
|
180
|
+
state = getSavedFastModeState(ctx) ?? { active: false };
|
|
181
|
+
|
|
182
|
+
if (pi.getFlag(FAST_FLAG) === true && !state.active) {
|
|
183
|
+
await enableFastMode(ctx, { notify: true });
|
|
184
|
+
}
|
|
185
|
+
});
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
export const _test = {
|
|
189
|
+
FAST_COMMAND,
|
|
190
|
+
FAST_FLAG,
|
|
191
|
+
FAST_STATE_ENTRY,
|
|
192
|
+
FAST_COMMAND_ARGS,
|
|
193
|
+
FAST_SERVICE_TIER,
|
|
194
|
+
FAST_SUPPORTED_MODELS,
|
|
195
|
+
parseFastModeState,
|
|
196
|
+
isFastSupportedModel,
|
|
197
|
+
describeCurrentState,
|
|
198
|
+
applyFastServiceTier,
|
|
199
|
+
};
|
package/package.json
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@benvargas/pi-openai-fast",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "OpenAI fast mode toggle for pi - Enables priority service tier on supported GPT-5.4 models",
|
|
5
|
+
"keywords": [
|
|
6
|
+
"pi",
|
|
7
|
+
"pi-package",
|
|
8
|
+
"pi-extension",
|
|
9
|
+
"pi-coding-agent",
|
|
10
|
+
"openai",
|
|
11
|
+
"codex",
|
|
12
|
+
"gpt-5.4",
|
|
13
|
+
"fast",
|
|
14
|
+
"priority",
|
|
15
|
+
"service-tier"
|
|
16
|
+
],
|
|
17
|
+
"type": "module",
|
|
18
|
+
"files": [
|
|
19
|
+
"extensions/",
|
|
20
|
+
"README.md",
|
|
21
|
+
"LICENSE"
|
|
22
|
+
],
|
|
23
|
+
"pi": {
|
|
24
|
+
"extensions": [
|
|
25
|
+
"./extensions/index.ts"
|
|
26
|
+
]
|
|
27
|
+
},
|
|
28
|
+
"peerDependencies": {
|
|
29
|
+
"@mariozechner/pi-coding-agent": ">=0.57.0"
|
|
30
|
+
},
|
|
31
|
+
"repository": {
|
|
32
|
+
"type": "git",
|
|
33
|
+
"url": "git+https://github.com/ben-vargas/pi-packages.git",
|
|
34
|
+
"directory": "packages/pi-openai-fast"
|
|
35
|
+
},
|
|
36
|
+
"author": "Ben Vargas",
|
|
37
|
+
"license": "MIT",
|
|
38
|
+
"bugs": {
|
|
39
|
+
"url": "https://github.com/ben-vargas/pi-packages/issues"
|
|
40
|
+
},
|
|
41
|
+
"homepage": "https://github.com/ben-vargas/pi-packages/tree/main/packages/pi-openai-fast#readme"
|
|
42
|
+
}
|