oh-my-opencode-gpt-slim 0.1.3 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/index.js +24 -17
- package/dist/cli/run/event-state.d.ts +2 -0
- package/dist/cli/run/output-renderer.d.ts +2 -1
- package/dist/cli/run/types.d.ts +1 -0
- package/dist/index.js +79 -30
- package/dist/shared/agent-variant.d.ts +4 -0
- package/package.json +10 -10
package/dist/cli/index.js
CHANGED
|
@@ -2145,7 +2145,7 @@ var package_default;
|
|
|
2145
2145
|
var init_package = __esm(() => {
|
|
2146
2146
|
package_default = {
|
|
2147
2147
|
name: "oh-my-opencode-gpt-slim",
|
|
2148
|
-
version: "0.1.
|
|
2148
|
+
version: "0.1.5",
|
|
2149
2149
|
description: "GPT-optimized lean fork of oh-my-openagent \u2014 33 hooks removed, 5 tools removed, Sisyphus prompt rewritten based on OpenAI Codex prompt.md",
|
|
2150
2150
|
main: "dist/index.js",
|
|
2151
2151
|
types: "dist/index.d.ts",
|
|
@@ -2223,15 +2223,15 @@ var init_package = __esm(() => {
|
|
|
2223
2223
|
typescript: "^5.7.3"
|
|
2224
2224
|
},
|
|
2225
2225
|
optionalDependencies: {
|
|
2226
|
-
"oh-my-opencode-gpt-slim-darwin-arm64": "0.1.
|
|
2227
|
-
"oh-my-opencode-gpt-slim-darwin-x64": "0.1.
|
|
2228
|
-
"oh-my-opencode-gpt-slim-darwin-x64-baseline": "0.1.
|
|
2229
|
-
"oh-my-opencode-gpt-slim-linux-arm64": "0.1.
|
|
2230
|
-
"oh-my-opencode-gpt-slim-linux-arm64-musl": "0.1.
|
|
2231
|
-
"oh-my-opencode-gpt-slim-linux-x64": "0.1.
|
|
2232
|
-
"oh-my-opencode-gpt-slim-linux-x64-baseline": "0.1.
|
|
2233
|
-
"oh-my-opencode-gpt-slim-linux-x64-musl": "0.1.
|
|
2234
|
-
"oh-my-opencode-gpt-slim-linux-x64-musl-baseline": "0.1.
|
|
2226
|
+
"oh-my-opencode-gpt-slim-darwin-arm64": "0.1.5",
|
|
2227
|
+
"oh-my-opencode-gpt-slim-darwin-x64": "0.1.5",
|
|
2228
|
+
"oh-my-opencode-gpt-slim-darwin-x64-baseline": "0.1.5",
|
|
2229
|
+
"oh-my-opencode-gpt-slim-linux-arm64": "0.1.5",
|
|
2230
|
+
"oh-my-opencode-gpt-slim-linux-arm64-musl": "0.1.5",
|
|
2231
|
+
"oh-my-opencode-gpt-slim-linux-x64": "0.1.5",
|
|
2232
|
+
"oh-my-opencode-gpt-slim-linux-x64-baseline": "0.1.5",
|
|
2233
|
+
"oh-my-opencode-gpt-slim-linux-x64-musl": "0.1.5",
|
|
2234
|
+
"oh-my-opencode-gpt-slim-linux-x64-musl-baseline": "0.1.5"
|
|
2235
2235
|
},
|
|
2236
2236
|
overrides: {
|
|
2237
2237
|
"@opencode-ai/sdk": "^1.2.17"
|
|
@@ -9686,6 +9686,7 @@ function createEventState() {
|
|
|
9686
9686
|
currentAgent: null,
|
|
9687
9687
|
currentModel: null,
|
|
9688
9688
|
currentVariant: null,
|
|
9689
|
+
currentReasoningEffort: null,
|
|
9689
9690
|
currentMessageRole: null,
|
|
9690
9691
|
agentColorsByName: {},
|
|
9691
9692
|
partTypesById: {},
|
|
@@ -9968,13 +9969,13 @@ var displayChars = {
|
|
|
9968
9969
|
|
|
9969
9970
|
// src/cli/run/output-renderer.ts
|
|
9970
9971
|
var import_picocolors6 = __toESM(require_picocolors(), 1);
|
|
9971
|
-
function renderAgentHeader(agent, model, variant, agentColorsByName) {
|
|
9972
|
+
function renderAgentHeader(agent, model, variant, reasoningEffort, agentColorsByName) {
|
|
9972
9973
|
if (!agent && !model)
|
|
9973
9974
|
return;
|
|
9974
9975
|
const agentLabel = agent ? import_picocolors6.default.bold(colorizeWithProfileColor(agent, agentColorsByName[agent])) : "";
|
|
9975
9976
|
const modelBase = model ?? "";
|
|
9976
|
-
const
|
|
9977
|
-
const modelLabel = model ? import_picocolors6.default.dim(`${modelBase}${
|
|
9977
|
+
const modelDetails = formatModelDetails(variant, reasoningEffort);
|
|
9978
|
+
const modelLabel = model ? import_picocolors6.default.dim(`${modelBase}${modelDetails}`) : "";
|
|
9978
9979
|
process.stdout.write(`
|
|
9979
9980
|
`);
|
|
9980
9981
|
if (modelLabel) {
|
|
@@ -9988,6 +9989,10 @@ function renderAgentHeader(agent, model, variant, agentColorsByName) {
|
|
|
9988
9989
|
process.stdout.write(`
|
|
9989
9990
|
`);
|
|
9990
9991
|
}
|
|
9992
|
+
function formatModelDetails(variant, reasoningEffort) {
|
|
9993
|
+
const details = [variant, reasoningEffort ? `effort: ${reasoningEffort}` : null].filter(Boolean);
|
|
9994
|
+
return details.length > 0 ? ` (${details.join(", ")})` : "";
|
|
9995
|
+
}
|
|
9991
9996
|
function openThinkBlock() {
|
|
9992
9997
|
process.stdout.write(`
|
|
9993
9998
|
${import_picocolors6.default.dim("\u2503 Thinking:")} `);
|
|
@@ -10066,9 +10071,9 @@ function renderCompletionMetaLine(state, messageID) {
|
|
|
10066
10071
|
const elapsedSec = startedAt ? ((Date.now() - startedAt) / 1000).toFixed(1) : "0.0";
|
|
10067
10072
|
const agent = state.currentAgent ?? "assistant";
|
|
10068
10073
|
const model = state.currentModel ?? "unknown-model";
|
|
10069
|
-
const
|
|
10074
|
+
const modelDetails = formatModelDetails(state.currentVariant, state.currentReasoningEffort);
|
|
10070
10075
|
process.stdout.write(import_picocolors7.default.dim(`
|
|
10071
|
-
${displayChars.treeEnd} ${agent} \xB7 ${model}${
|
|
10076
|
+
${displayChars.treeEnd} ${agent} \xB7 ${model}${modelDetails} \xB7 ${elapsedSec}s
|
|
10072
10077
|
`));
|
|
10073
10078
|
state.completionMetaPrintedByMessageId[messageID] = true;
|
|
10074
10079
|
}
|
|
@@ -10254,11 +10259,13 @@ function handleMessageUpdated(ctx, payload, state) {
|
|
|
10254
10259
|
const agent = props?.info?.agent ?? null;
|
|
10255
10260
|
const model = props?.info?.modelID ?? null;
|
|
10256
10261
|
const variant = props?.info?.variant ?? null;
|
|
10257
|
-
|
|
10262
|
+
const reasoningEffort = props?.info?.reasoningEffort ?? null;
|
|
10263
|
+
if (agent !== state.currentAgent || model !== state.currentModel || variant !== state.currentVariant || reasoningEffort !== state.currentReasoningEffort) {
|
|
10258
10264
|
state.currentAgent = agent;
|
|
10259
10265
|
state.currentModel = model;
|
|
10260
10266
|
state.currentVariant = variant;
|
|
10261
|
-
|
|
10267
|
+
state.currentReasoningEffort = reasoningEffort;
|
|
10268
|
+
renderAgentHeader(agent, model, variant, reasoningEffort, state.agentColorsByName);
|
|
10262
10269
|
}
|
|
10263
10270
|
}
|
|
10264
10271
|
function handleToolExecute(ctx, payload, state) {
|
|
@@ -17,6 +17,8 @@ export interface EventState {
|
|
|
17
17
|
currentModel: string | null;
|
|
18
18
|
/** Current model variant from the latest assistant message */
|
|
19
19
|
currentVariant: string | null;
|
|
20
|
+
/** Current model reasoning effort from the latest assistant message */
|
|
21
|
+
currentReasoningEffort: string | null;
|
|
20
22
|
/** Current message role (user/assistant) — used to filter user messages from display */
|
|
21
23
|
currentMessageRole: string | null;
|
|
22
24
|
/** Agent profile colors keyed by display name */
|
|
@@ -1,4 +1,5 @@
|
|
|
1
|
-
export declare function renderAgentHeader(agent: string | null, model: string | null, variant: string | null, agentColorsByName: Record<string, string>): void;
|
|
1
|
+
export declare function renderAgentHeader(agent: string | null, model: string | null, variant: string | null, reasoningEffort: string | null, agentColorsByName: Record<string, string>): void;
|
|
2
|
+
export declare function formatModelDetails(variant: string | null, reasoningEffort: string | null): string;
|
|
2
3
|
export declare function openThinkBlock(): void;
|
|
3
4
|
export declare function closeThinkBlock(): void;
|
|
4
5
|
export declare function writePaddedText(text: string, atLineStart: boolean): {
|
package/dist/cli/run/types.d.ts
CHANGED
package/dist/index.js
CHANGED
|
@@ -12308,7 +12308,7 @@ function initConfigContext(binary, version) {
|
|
|
12308
12308
|
// package.json
|
|
12309
12309
|
var package_default = {
|
|
12310
12310
|
name: "oh-my-opencode-gpt-slim",
|
|
12311
|
-
version: "0.1.
|
|
12311
|
+
version: "0.1.5",
|
|
12312
12312
|
description: "GPT-optimized lean fork of oh-my-openagent \u2014 33 hooks removed, 5 tools removed, Sisyphus prompt rewritten based on OpenAI Codex prompt.md",
|
|
12313
12313
|
main: "dist/index.js",
|
|
12314
12314
|
types: "dist/index.d.ts",
|
|
@@ -12386,15 +12386,15 @@ var package_default = {
|
|
|
12386
12386
|
typescript: "^5.7.3"
|
|
12387
12387
|
},
|
|
12388
12388
|
optionalDependencies: {
|
|
12389
|
-
"oh-my-opencode-gpt-slim-darwin-arm64": "0.1.
|
|
12390
|
-
"oh-my-opencode-gpt-slim-darwin-x64": "0.1.
|
|
12391
|
-
"oh-my-opencode-gpt-slim-darwin-x64-baseline": "0.1.
|
|
12392
|
-
"oh-my-opencode-gpt-slim-linux-arm64": "0.1.
|
|
12393
|
-
"oh-my-opencode-gpt-slim-linux-arm64-musl": "0.1.
|
|
12394
|
-
"oh-my-opencode-gpt-slim-linux-x64": "0.1.
|
|
12395
|
-
"oh-my-opencode-gpt-slim-linux-x64-baseline": "0.1.
|
|
12396
|
-
"oh-my-opencode-gpt-slim-linux-x64-musl": "0.1.
|
|
12397
|
-
"oh-my-opencode-gpt-slim-linux-x64-musl-baseline": "0.1.
|
|
12389
|
+
"oh-my-opencode-gpt-slim-darwin-arm64": "0.1.5",
|
|
12390
|
+
"oh-my-opencode-gpt-slim-darwin-x64": "0.1.5",
|
|
12391
|
+
"oh-my-opencode-gpt-slim-darwin-x64-baseline": "0.1.5",
|
|
12392
|
+
"oh-my-opencode-gpt-slim-linux-arm64": "0.1.5",
|
|
12393
|
+
"oh-my-opencode-gpt-slim-linux-arm64-musl": "0.1.5",
|
|
12394
|
+
"oh-my-opencode-gpt-slim-linux-x64": "0.1.5",
|
|
12395
|
+
"oh-my-opencode-gpt-slim-linux-x64-baseline": "0.1.5",
|
|
12396
|
+
"oh-my-opencode-gpt-slim-linux-x64-musl": "0.1.5",
|
|
12397
|
+
"oh-my-opencode-gpt-slim-linux-x64-musl-baseline": "0.1.5"
|
|
12398
12398
|
},
|
|
12399
12399
|
overrides: {
|
|
12400
12400
|
"@opencode-ai/sdk": "^1.2.17"
|
|
@@ -17144,6 +17144,32 @@ var CATEGORY_MODEL_REQUIREMENTS = {
|
|
|
17144
17144
|
]
|
|
17145
17145
|
}
|
|
17146
17146
|
};
|
|
17147
|
+
|
|
17148
|
+
// src/shared/agent-variant.ts
|
|
17149
|
+
function resolveAgentReasoningEffort(config, agentName) {
|
|
17150
|
+
if (!agentName) {
|
|
17151
|
+
return;
|
|
17152
|
+
}
|
|
17153
|
+
const agentOverrides = config.agents;
|
|
17154
|
+
const agentOverride = agentOverrides ? agentOverrides[agentName] ?? Object.entries(agentOverrides).find(([key]) => key.toLowerCase() === agentName.toLowerCase())?.[1] : undefined;
|
|
17155
|
+
if (!agentOverride) {
|
|
17156
|
+
return;
|
|
17157
|
+
}
|
|
17158
|
+
if (agentOverride.reasoningEffort) {
|
|
17159
|
+
return agentOverride.reasoningEffort;
|
|
17160
|
+
}
|
|
17161
|
+
const categoryName = agentOverride.category;
|
|
17162
|
+
if (!categoryName) {
|
|
17163
|
+
return;
|
|
17164
|
+
}
|
|
17165
|
+
return config.categories?.[categoryName]?.reasoningEffort;
|
|
17166
|
+
}
|
|
17167
|
+
function applyAgentReasoningEffort(config, agentName, message) {
|
|
17168
|
+
const reasoningEffort = resolveAgentReasoningEffort(config, agentName);
|
|
17169
|
+
if (reasoningEffort !== undefined && message.reasoningEffort === undefined) {
|
|
17170
|
+
message.reasoningEffort = reasoningEffort;
|
|
17171
|
+
}
|
|
17172
|
+
}
|
|
17147
17173
|
// src/shared/session-cursor.ts
|
|
17148
17174
|
var sessionCursors = new Map;
|
|
17149
17175
|
function buildMessageKey(message, index) {
|
|
@@ -77192,6 +77218,7 @@ function acquireLock(dirPath) {
|
|
|
77192
77218
|
|
|
77193
77219
|
// src/tools/task/todo-sync.ts
|
|
77194
77220
|
init_logger();
|
|
77221
|
+
var sessionTodoSyncQueue = new Map;
|
|
77195
77222
|
function mapTaskStatusToTodoStatus(taskStatus) {
|
|
77196
77223
|
switch (taskStatus) {
|
|
77197
77224
|
case "pending":
|
|
@@ -77256,32 +77283,53 @@ function extractTodos(response) {
|
|
|
77256
77283
|
}
|
|
77257
77284
|
return [];
|
|
77258
77285
|
}
|
|
77286
|
+
async function enqueueTodoSync(sessionID, operation) {
|
|
77287
|
+
const previous = sessionTodoSyncQueue.get(sessionID) ?? Promise.resolve();
|
|
77288
|
+
const next = previous.catch(() => {
|
|
77289
|
+
return;
|
|
77290
|
+
}).then(operation);
|
|
77291
|
+
const tracked = next.then(() => {
|
|
77292
|
+
return;
|
|
77293
|
+
}, () => {
|
|
77294
|
+
return;
|
|
77295
|
+
});
|
|
77296
|
+
sessionTodoSyncQueue.set(sessionID, tracked);
|
|
77297
|
+
try {
|
|
77298
|
+
return await next;
|
|
77299
|
+
} finally {
|
|
77300
|
+
if (sessionTodoSyncQueue.get(sessionID) === tracked) {
|
|
77301
|
+
sessionTodoSyncQueue.delete(sessionID);
|
|
77302
|
+
}
|
|
77303
|
+
}
|
|
77304
|
+
}
|
|
77259
77305
|
async function syncTaskTodoUpdate(ctx, task, sessionID, writer) {
|
|
77260
77306
|
if (!ctx)
|
|
77261
77307
|
return;
|
|
77262
77308
|
try {
|
|
77263
|
-
|
|
77264
|
-
|
|
77265
|
-
|
|
77266
|
-
|
|
77267
|
-
|
|
77268
|
-
|
|
77269
|
-
|
|
77270
|
-
|
|
77271
|
-
|
|
77272
|
-
|
|
77273
|
-
|
|
77309
|
+
await enqueueTodoSync(sessionID, async () => {
|
|
77310
|
+
const response = await ctx.client.session.todo({
|
|
77311
|
+
path: { id: sessionID }
|
|
77312
|
+
});
|
|
77313
|
+
const currentTodos = extractTodos(response);
|
|
77314
|
+
const taskTodo = syncTaskToTodo(task);
|
|
77315
|
+
const nextTodos = currentTodos.filter((todo2) => {
|
|
77316
|
+
if (taskTodo) {
|
|
77317
|
+
return !todosMatch(todo2, taskTodo);
|
|
77318
|
+
}
|
|
77319
|
+
if (todo2.id) {
|
|
77320
|
+
return todo2.id !== task.id;
|
|
77321
|
+
}
|
|
77322
|
+
return todo2.content !== task.subject;
|
|
77323
|
+
});
|
|
77324
|
+
const todo = taskTodo;
|
|
77325
|
+
if (todo) {
|
|
77326
|
+
nextTodos.push(todo);
|
|
77274
77327
|
}
|
|
77275
|
-
|
|
77328
|
+
const resolvedWriter = writer ?? await resolveTodoWriter();
|
|
77329
|
+
if (!resolvedWriter)
|
|
77330
|
+
return;
|
|
77331
|
+
await resolvedWriter({ sessionID, todos: nextTodos });
|
|
77276
77332
|
});
|
|
77277
|
-
const todo = taskTodo;
|
|
77278
|
-
if (todo) {
|
|
77279
|
-
nextTodos.push(todo);
|
|
77280
|
-
}
|
|
77281
|
-
const resolvedWriter = writer ?? await resolveTodoWriter();
|
|
77282
|
-
if (!resolvedWriter)
|
|
77283
|
-
return;
|
|
77284
|
-
await resolvedWriter({ sessionID, todos: nextTodos });
|
|
77285
77333
|
} catch (err) {
|
|
77286
77334
|
log("[todo-sync] Failed to sync task todo", {
|
|
77287
77335
|
error: String(err),
|
|
@@ -79103,6 +79151,7 @@ function createChatMessageHandler2(args) {
|
|
|
79103
79151
|
if (firstMessageVariantGate.shouldOverride(input.sessionID)) {
|
|
79104
79152
|
firstMessageVariantGate.markApplied(input.sessionID);
|
|
79105
79153
|
}
|
|
79154
|
+
applyAgentReasoningEffort(pluginConfig, input.agent, output.message);
|
|
79106
79155
|
if (!isRuntimeFallbackEnabled) {
|
|
79107
79156
|
await hooks2.modelFallback?.["chat.message"]?.(input, output);
|
|
79108
79157
|
}
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import type { OhMyOpenCodeConfig } from "../config";
|
|
2
2
|
export declare function resolveAgentVariant(config: OhMyOpenCodeConfig, agentName?: string): string | undefined;
|
|
3
|
+
export declare function resolveAgentReasoningEffort(config: OhMyOpenCodeConfig, agentName?: string): string | undefined;
|
|
3
4
|
export declare function resolveVariantForModel(config: OhMyOpenCodeConfig, agentName: string, currentModel: {
|
|
4
5
|
providerID: string;
|
|
5
6
|
modelID: string;
|
|
@@ -7,3 +8,6 @@ export declare function resolveVariantForModel(config: OhMyOpenCodeConfig, agent
|
|
|
7
8
|
export declare function applyAgentVariant(config: OhMyOpenCodeConfig, agentName: string | undefined, message: {
|
|
8
9
|
variant?: string;
|
|
9
10
|
}): void;
|
|
11
|
+
export declare function applyAgentReasoningEffort(config: OhMyOpenCodeConfig, agentName: string | undefined, message: {
|
|
12
|
+
reasoningEffort?: string;
|
|
13
|
+
}): void;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "oh-my-opencode-gpt-slim",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.5",
|
|
4
4
|
"description": "GPT-optimized lean fork of oh-my-openagent — 33 hooks removed, 5 tools removed, Sisyphus prompt rewritten based on OpenAI Codex prompt.md",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -78,15 +78,15 @@
|
|
|
78
78
|
"typescript": "^5.7.3"
|
|
79
79
|
},
|
|
80
80
|
"optionalDependencies": {
|
|
81
|
-
"oh-my-opencode-gpt-slim-darwin-arm64": "0.1.
|
|
82
|
-
"oh-my-opencode-gpt-slim-darwin-x64": "0.1.
|
|
83
|
-
"oh-my-opencode-gpt-slim-darwin-x64-baseline": "0.1.
|
|
84
|
-
"oh-my-opencode-gpt-slim-linux-arm64": "0.1.
|
|
85
|
-
"oh-my-opencode-gpt-slim-linux-arm64-musl": "0.1.
|
|
86
|
-
"oh-my-opencode-gpt-slim-linux-x64": "0.1.
|
|
87
|
-
"oh-my-opencode-gpt-slim-linux-x64-baseline": "0.1.
|
|
88
|
-
"oh-my-opencode-gpt-slim-linux-x64-musl": "0.1.
|
|
89
|
-
"oh-my-opencode-gpt-slim-linux-x64-musl-baseline": "0.1.
|
|
81
|
+
"oh-my-opencode-gpt-slim-darwin-arm64": "0.1.5",
|
|
82
|
+
"oh-my-opencode-gpt-slim-darwin-x64": "0.1.5",
|
|
83
|
+
"oh-my-opencode-gpt-slim-darwin-x64-baseline": "0.1.5",
|
|
84
|
+
"oh-my-opencode-gpt-slim-linux-arm64": "0.1.5",
|
|
85
|
+
"oh-my-opencode-gpt-slim-linux-arm64-musl": "0.1.5",
|
|
86
|
+
"oh-my-opencode-gpt-slim-linux-x64": "0.1.5",
|
|
87
|
+
"oh-my-opencode-gpt-slim-linux-x64-baseline": "0.1.5",
|
|
88
|
+
"oh-my-opencode-gpt-slim-linux-x64-musl": "0.1.5",
|
|
89
|
+
"oh-my-opencode-gpt-slim-linux-x64-musl-baseline": "0.1.5"
|
|
90
90
|
},
|
|
91
91
|
"overrides": {
|
|
92
92
|
"@opencode-ai/sdk": "^1.2.17"
|