@pushpalsdev/cli 1.0.77 → 1.0.79
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -1736,6 +1736,7 @@ var DEFAULT_OLLAMA_ENDPOINT = "http://127.0.0.1:11434/api/chat";
|
|
|
1736
1736
|
var DEFAULT_OPENAI_ENDPOINT = "https://api.openai.com/v1/chat/completions";
|
|
1737
1737
|
var DEFAULT_MODEL = "local-model";
|
|
1738
1738
|
var DEFAULT_CODEX_MODEL = "gpt-5.5";
|
|
1739
|
+
var LEGACY_CODEX_MODEL_FALLBACK = "gpt-5.4";
|
|
1739
1740
|
var DEFAULT_CODEX_REASONING_EFFORT = "xhigh";
|
|
1740
1741
|
var DEFAULT_CODEX_TIMEOUT_MS = 120000;
|
|
1741
1742
|
var DEFAULT_LMSTUDIO_CONTEXT_WINDOW = 4096;
|
|
@@ -1860,6 +1861,55 @@ function codexReasoningEffort(configured, model) {
|
|
|
1860
1861
|
}
|
|
1861
1862
|
return defaultEffort;
|
|
1862
1863
|
}
|
|
1864
|
+
function isDefaultCodexLauncher(command) {
|
|
1865
|
+
const normalized = command.map((part) => part.trim().toLowerCase()).filter(Boolean);
|
|
1866
|
+
return normalized.length === 0 || normalized.join("\x00") === ["bun", "x", "--yes", "@openai/codex"].join("\x00") || normalized.join("\x00") === ["bunx", "--yes", "@openai/codex"].join("\x00");
|
|
1867
|
+
}
|
|
1868
|
+
function parseCodexCliVersion(text) {
|
|
1869
|
+
const match = text.match(/(?:codex(?:-cli)?|openai\s+codex)?\s*v?(\d+)\.(\d+)\.(\d+)(?:-([0-9a-z.-]+))?/i);
|
|
1870
|
+
if (!match)
|
|
1871
|
+
return null;
|
|
1872
|
+
return {
|
|
1873
|
+
major: Number.parseInt(match[1], 10),
|
|
1874
|
+
minor: Number.parseInt(match[2], 10),
|
|
1875
|
+
patch: Number.parseInt(match[3], 10),
|
|
1876
|
+
prerelease: match[4] ?? ""
|
|
1877
|
+
};
|
|
1878
|
+
}
|
|
1879
|
+
function compareCodexVersions(a, b) {
|
|
1880
|
+
if (a && !b)
|
|
1881
|
+
return 1;
|
|
1882
|
+
if (!a && b)
|
|
1883
|
+
return -1;
|
|
1884
|
+
if (!a || !b)
|
|
1885
|
+
return 0;
|
|
1886
|
+
for (const key of ["major", "minor", "patch"]) {
|
|
1887
|
+
if (a[key] !== b[key])
|
|
1888
|
+
return a[key] > b[key] ? 1 : -1;
|
|
1889
|
+
}
|
|
1890
|
+
if (a.prerelease === b.prerelease)
|
|
1891
|
+
return 0;
|
|
1892
|
+
if (!a.prerelease)
|
|
1893
|
+
return 1;
|
|
1894
|
+
if (!b.prerelease)
|
|
1895
|
+
return -1;
|
|
1896
|
+
return a.prerelease.localeCompare(b.prerelease);
|
|
1897
|
+
}
|
|
1898
|
+
function chooseCodexCommandProbe(probes, opts) {
|
|
1899
|
+
if (probes.length === 0)
|
|
1900
|
+
return null;
|
|
1901
|
+
if (!opts.preferNewestCompatible)
|
|
1902
|
+
return probes[0];
|
|
1903
|
+
return probes.reduce((best, probe) => compareCodexVersions(probe.version, best.version) > 0 ? probe : best);
|
|
1904
|
+
}
|
|
1905
|
+
function requiresNewerCodexForModel(stdout, stderr) {
|
|
1906
|
+
const combined = `${stdout}
|
|
1907
|
+
${stderr}`.toLowerCase();
|
|
1908
|
+
return combined.includes("requires a newer version of codex") || combined.includes("requires newer") && combined.includes("codex");
|
|
1909
|
+
}
|
|
1910
|
+
function isDefaultCodexModel(model) {
|
|
1911
|
+
return model.trim().toLowerCase() === DEFAULT_CODEX_MODEL.toLowerCase();
|
|
1912
|
+
}
|
|
1863
1913
|
function normalizeCodexModel(rawModel) {
|
|
1864
1914
|
const model = rawModel.trim();
|
|
1865
1915
|
if (!model)
|
|
@@ -1888,6 +1938,63 @@ function normalizeOpenAiBaseFromEndpoint(rawEndpoint) {
|
|
|
1888
1938
|
return trimmed;
|
|
1889
1939
|
}
|
|
1890
1940
|
async function runProcess(command, opts) {
|
|
1941
|
+
const bunRuntime = globalThis.Bun;
|
|
1942
|
+
if (typeof bunRuntime?.spawn === "function") {
|
|
1943
|
+
return runProcessWithBun(command, opts);
|
|
1944
|
+
}
|
|
1945
|
+
return runProcessWithNode(command, opts);
|
|
1946
|
+
}
|
|
1947
|
+
async function runProcessWithBun(command, opts) {
|
|
1948
|
+
const bunRuntime = globalThis.Bun;
|
|
1949
|
+
const timeoutMs = opts.timeoutMs ?? 0;
|
|
1950
|
+
let timedOut = false;
|
|
1951
|
+
let timeout = null;
|
|
1952
|
+
let killTimeout = null;
|
|
1953
|
+
const proc = bunRuntime.spawn(command, {
|
|
1954
|
+
cwd: opts.cwd,
|
|
1955
|
+
env: opts.env,
|
|
1956
|
+
stdin: "pipe",
|
|
1957
|
+
stdout: "pipe",
|
|
1958
|
+
stderr: "pipe"
|
|
1959
|
+
});
|
|
1960
|
+
const stdoutPromise = new Response(proc.stdout).text();
|
|
1961
|
+
const stderrPromise = new Response(proc.stderr).text();
|
|
1962
|
+
if (timeoutMs > 0) {
|
|
1963
|
+
timeout = setTimeout(() => {
|
|
1964
|
+
timedOut = true;
|
|
1965
|
+
try {
|
|
1966
|
+
proc.kill("SIGTERM");
|
|
1967
|
+
} catch {}
|
|
1968
|
+
killTimeout = setTimeout(() => {
|
|
1969
|
+
try {
|
|
1970
|
+
proc.kill("SIGKILL");
|
|
1971
|
+
} catch {}
|
|
1972
|
+
}, 1000);
|
|
1973
|
+
killTimeout.unref?.();
|
|
1974
|
+
}, timeoutMs);
|
|
1975
|
+
}
|
|
1976
|
+
try {
|
|
1977
|
+
if (typeof opts.stdin === "string") {
|
|
1978
|
+
proc.stdin?.write(opts.stdin);
|
|
1979
|
+
}
|
|
1980
|
+
proc.stdin?.end();
|
|
1981
|
+
const code = await proc.exited;
|
|
1982
|
+
const [stdout, stderr] = await Promise.all([stdoutPromise, stderrPromise]);
|
|
1983
|
+
return {
|
|
1984
|
+
code,
|
|
1985
|
+
signal: null,
|
|
1986
|
+
stdout,
|
|
1987
|
+
stderr,
|
|
1988
|
+
timedOut
|
|
1989
|
+
};
|
|
1990
|
+
} finally {
|
|
1991
|
+
if (timeout)
|
|
1992
|
+
clearTimeout(timeout);
|
|
1993
|
+
if (killTimeout)
|
|
1994
|
+
clearTimeout(killTimeout);
|
|
1995
|
+
}
|
|
1996
|
+
}
|
|
1997
|
+
async function runProcessWithNode(command, opts) {
|
|
1891
1998
|
const timeoutMs = opts.timeoutMs ?? 0;
|
|
1892
1999
|
return new Promise((resolve4, reject) => {
|
|
1893
2000
|
const child = spawn(command[0], command.slice(1), {
|
|
@@ -1946,6 +2053,10 @@ async function runProcess(command, opts) {
|
|
|
1946
2053
|
});
|
|
1947
2054
|
}
|
|
1948
2055
|
var cachedCodexCommandPrefix = new Map;
|
|
2056
|
+
function bunCodexCommandFromEnv(env) {
|
|
2057
|
+
const bunBin = (env.PUSHPALS_BUN_BIN ?? "").trim();
|
|
2058
|
+
return bunBin ? [bunBin, "x", "--yes", "@openai/codex"] : [];
|
|
2059
|
+
}
|
|
1949
2060
|
async function resolveCodexCommandPrefix(configuredCommand) {
|
|
1950
2061
|
const override = codexCommandOverrideParts(configuredCommand);
|
|
1951
2062
|
const cacheKey = override.join("\x00");
|
|
@@ -1953,6 +2064,7 @@ async function resolveCodexCommandPrefix(configuredCommand) {
|
|
|
1953
2064
|
if (cached)
|
|
1954
2065
|
return cached;
|
|
1955
2066
|
const preferred = override.length > 0 ? override : ["bun", "x", "--yes", "@openai/codex"];
|
|
2067
|
+
const preferNewestCompatible = isDefaultCodexLauncher(preferred);
|
|
1956
2068
|
const candidates = [];
|
|
1957
2069
|
const pushCandidate = (cmd) => {
|
|
1958
2070
|
if (cmd.length === 0)
|
|
@@ -1963,6 +2075,7 @@ async function resolveCodexCommandPrefix(configuredCommand) {
|
|
|
1963
2075
|
candidates.push(cmd);
|
|
1964
2076
|
};
|
|
1965
2077
|
pushCandidate(preferred);
|
|
2078
|
+
pushCandidate(bunCodexCommandFromEnv(process.env));
|
|
1966
2079
|
const execPath = (process.execPath ?? "").trim();
|
|
1967
2080
|
if (execPath) {
|
|
1968
2081
|
const lower = execPath.toLowerCase();
|
|
@@ -1976,6 +2089,7 @@ async function resolveCodexCommandPrefix(configuredCommand) {
|
|
|
1976
2089
|
const cwd = process.cwd();
|
|
1977
2090
|
const env = process.env;
|
|
1978
2091
|
const attemptErrors = [];
|
|
2092
|
+
const successfulProbes = [];
|
|
1979
2093
|
for (const candidate of candidates) {
|
|
1980
2094
|
if (candidate.length === 0)
|
|
1981
2095
|
continue;
|
|
@@ -1987,8 +2101,15 @@ async function resolveCodexCommandPrefix(configuredCommand) {
|
|
|
1987
2101
|
timeoutMs: 15000
|
|
1988
2102
|
});
|
|
1989
2103
|
if (probe.code === 0) {
|
|
1990
|
-
|
|
1991
|
-
|
|
2104
|
+
const versionText = (probe.stdout || probe.stderr || "").trim().split(/\r?\n/, 1)[0] ?? "";
|
|
2105
|
+
successfulProbes.push({
|
|
2106
|
+
command: candidate,
|
|
2107
|
+
version: parseCodexCliVersion(versionText),
|
|
2108
|
+
versionText
|
|
2109
|
+
});
|
|
2110
|
+
if (!preferNewestCompatible)
|
|
2111
|
+
break;
|
|
2112
|
+
continue;
|
|
1992
2113
|
}
|
|
1993
2114
|
const detail = (probe.stderr || probe.stdout || "").trim();
|
|
1994
2115
|
attemptErrors.push(`${rendered} -> exit ${probe.code ?? "unknown"}${detail ? ` (${detail.split(/\r?\n/, 1)[0]})` : ""}`);
|
|
@@ -1996,6 +2117,12 @@ async function resolveCodexCommandPrefix(configuredCommand) {
|
|
|
1996
2117
|
attemptErrors.push(`${rendered} -> ${String(err)}`);
|
|
1997
2118
|
}
|
|
1998
2119
|
}
|
|
2120
|
+
const selected = chooseCodexCommandProbe(successfulProbes, { preferNewestCompatible });
|
|
2121
|
+
if (selected) {
|
|
2122
|
+
cachedCodexCommandPrefix.set(cacheKey, selected.command);
|
|
2123
|
+
console.log(`[LLM] Resolved Codex CLI command: ${selected.command.join(" ")}${selected.versionText ? ` (${selected.versionText})` : ""}.`);
|
|
2124
|
+
return selected.command;
|
|
2125
|
+
}
|
|
1999
2126
|
const details = attemptErrors.length > 0 ? ` Tried: ${attemptErrors.join("; ")}` : "";
|
|
2000
2127
|
throw new Error("OpenAI Codex CLI is unavailable. Install/use Codex CLI (`bun x --yes @openai/codex` or `codex`) and retry." + details);
|
|
2001
2128
|
}
|
|
@@ -2742,7 +2869,7 @@ class OpenAiCodexCliClient {
|
|
|
2742
2869
|
service: this.service,
|
|
2743
2870
|
sessionId: this.sessionTag || undefined,
|
|
2744
2871
|
backend: "openai_codex",
|
|
2745
|
-
modelId: this.model,
|
|
2872
|
+
modelId: usage.modelId ?? this.model,
|
|
2746
2873
|
promptTokens: usage.promptTokens,
|
|
2747
2874
|
completionTokens: usage.completionTokens,
|
|
2748
2875
|
totalTokens: usage.promptTokens + usage.completionTokens,
|
|
@@ -2788,6 +2915,13 @@ class OpenAiCodexCliClient {
|
|
|
2788
2915
|
}
|
|
2789
2916
|
}
|
|
2790
2917
|
async runCodexExec(prompt) {
|
|
2918
|
+
return this.runCodexExecAttempt(prompt, {
|
|
2919
|
+
model: this.model,
|
|
2920
|
+
modelCompatibilityRecoveryAttempt: 0
|
|
2921
|
+
});
|
|
2922
|
+
}
|
|
2923
|
+
async runCodexExecAttempt(prompt, opts) {
|
|
2924
|
+
const model = normalizeCodexModel(opts.model);
|
|
2791
2925
|
const commandPrefix = await resolveCodexCommandPrefix(this.codexBin);
|
|
2792
2926
|
const env = { ...process.env };
|
|
2793
2927
|
env.PYTHONIOENCODING = "utf-8";
|
|
@@ -2821,7 +2955,7 @@ class OpenAiCodexCliClient {
|
|
|
2821
2955
|
const command = [
|
|
2822
2956
|
...commandPrefix,
|
|
2823
2957
|
"-c",
|
|
2824
|
-
`model_reasoning_effort="${codexReasoningEffort(this.reasoningEffort,
|
|
2958
|
+
`model_reasoning_effort="${codexReasoningEffort(this.reasoningEffort, model)}"`,
|
|
2825
2959
|
"-a",
|
|
2826
2960
|
"never",
|
|
2827
2961
|
"-s",
|
|
@@ -2832,8 +2966,8 @@ class OpenAiCodexCliClient {
|
|
|
2832
2966
|
"--output-last-message",
|
|
2833
2967
|
lastMessagePath
|
|
2834
2968
|
];
|
|
2835
|
-
if (
|
|
2836
|
-
command.push("-m",
|
|
2969
|
+
if (model) {
|
|
2970
|
+
command.push("-m", model);
|
|
2837
2971
|
}
|
|
2838
2972
|
command.push("-");
|
|
2839
2973
|
const result = await runProcess(command, {
|
|
@@ -2850,13 +2984,20 @@ class OpenAiCodexCliClient {
|
|
|
2850
2984
|
const lastMessage = existsSync3(lastMessagePath) ? readFileSync4(lastMessagePath, "utf8").trim() : "";
|
|
2851
2985
|
if (result.code !== 0) {
|
|
2852
2986
|
const detail = stderr || stdout || "codex exec exited with non-zero status";
|
|
2987
|
+
if (opts.modelCompatibilityRecoveryAttempt < 1 && isDefaultCodexModel(model) && LEGACY_CODEX_MODEL_FALLBACK.trim().toLowerCase() !== DEFAULT_CODEX_MODEL.toLowerCase() && requiresNewerCodexForModel(stdout, stderr)) {
|
|
2988
|
+
console.warn(`[LLM] Codex CLI rejected default model ${DEFAULT_CODEX_MODEL}; retrying once with ${LEGACY_CODEX_MODEL_FALLBACK}. Upgrade Codex CLI to use ${DEFAULT_CODEX_MODEL}.`);
|
|
2989
|
+
return this.runCodexExecAttempt(prompt, {
|
|
2990
|
+
model: LEGACY_CODEX_MODEL_FALLBACK,
|
|
2991
|
+
modelCompatibilityRecoveryAttempt: opts.modelCompatibilityRecoveryAttempt + 1
|
|
2992
|
+
});
|
|
2993
|
+
}
|
|
2853
2994
|
throw new Error(`Codex CLI request failed (exit ${result.code ?? "unknown"}): ${detail}`);
|
|
2854
2995
|
}
|
|
2855
2996
|
const text = lastMessage || stdout;
|
|
2856
2997
|
if (!text) {
|
|
2857
2998
|
throw new Error("Codex CLI completed without producing a response.");
|
|
2858
2999
|
}
|
|
2859
|
-
return { text, stderr };
|
|
3000
|
+
return { text, stderr, model };
|
|
2860
3001
|
} finally {
|
|
2861
3002
|
rmSync(tmp, { recursive: true, force: true });
|
|
2862
3003
|
}
|
|
@@ -2874,7 +3015,7 @@ class OpenAiCodexCliClient {
|
|
|
2874
3015
|
promptTokens: estimateTokensFromText(prompt),
|
|
2875
3016
|
completionTokens: estimateTokensFromText(result.text)
|
|
2876
3017
|
});
|
|
2877
|
-
await this.maybeReportUsage(usage);
|
|
3018
|
+
await this.maybeReportUsage({ ...usage, modelId: result.model });
|
|
2878
3019
|
return {
|
|
2879
3020
|
text: result.text,
|
|
2880
3021
|
usage: {
|
|
@@ -4355,6 +4496,12 @@ function asNumber(value, fallback = 0) {
|
|
|
4355
4496
|
const n = Number(value);
|
|
4356
4497
|
return Number.isFinite(n) ? n : fallback;
|
|
4357
4498
|
}
|
|
4499
|
+
function compactStatusDetail(value, max = 240) {
|
|
4500
|
+
const normalized = value.replace(/\s+/g, " ").trim();
|
|
4501
|
+
if (normalized.length <= max)
|
|
4502
|
+
return normalized;
|
|
4503
|
+
return `${normalized.slice(0, Math.max(0, max - 3))}...`;
|
|
4504
|
+
}
|
|
4358
4505
|
function uniqueLowercaseTokens(values, max = 24) {
|
|
4359
4506
|
const out = [];
|
|
4360
4507
|
const seen = new Set;
|
|
@@ -5914,6 +6061,9 @@ class RemoteBuddyAutonomousEngine {
|
|
|
5914
6061
|
const maxPhaseTimeoutMs = Math.max(this.phaseTimeoutMs("ideation"), this.phaseTimeoutMs("scoring"), this.phaseTimeoutMs("planning"));
|
|
5915
6062
|
return Math.max(this.cfg.tickIntervalMs * 3, this.cfg.ideationBudgetMs * 2 + maxPhaseTimeoutMs * 6, 30000);
|
|
5916
6063
|
}
|
|
6064
|
+
lockStaleAfterMs() {
|
|
6065
|
+
return Math.max(this.phaseTimeoutMs("ideation") + 30000, this.cfg.heartbeatLogMs * 2, 120000);
|
|
6066
|
+
}
|
|
5917
6067
|
cycleBudgetMs() {
|
|
5918
6068
|
const ideationTimeoutMs = this.phaseTimeoutMs("ideation");
|
|
5919
6069
|
const scoringTimeoutMs = this.phaseTimeoutMs("scoring");
|
|
@@ -6224,10 +6374,15 @@ class RemoteBuddyAutonomousEngine {
|
|
|
6224
6374
|
body: JSON.stringify({
|
|
6225
6375
|
sessionId: this.sessionId,
|
|
6226
6376
|
runId,
|
|
6227
|
-
ttlMs
|
|
6377
|
+
ttlMs,
|
|
6378
|
+
staleAfterMs: this.lockStaleAfterMs()
|
|
6228
6379
|
})
|
|
6229
6380
|
});
|
|
6230
|
-
|
|
6381
|
+
if (res.ok)
|
|
6382
|
+
return { ok: true };
|
|
6383
|
+
const payload = await res.json().catch(() => ({}));
|
|
6384
|
+
const reason = asString2(payload.reason ?? payload.message);
|
|
6385
|
+
return { ok: false, reason };
|
|
6231
6386
|
}
|
|
6232
6387
|
async renewDispatchLock(runId) {
|
|
6233
6388
|
const res = await fetch(`${this.server}/autonomy/lock/renew`, {
|
|
@@ -6530,9 +6685,10 @@ ${JSON.stringify(input.messages ?? [])}`),
|
|
|
6530
6685
|
let outcomeDetail = "not_dispatched";
|
|
6531
6686
|
try {
|
|
6532
6687
|
this.setPhase("acquire_lock");
|
|
6533
|
-
|
|
6688
|
+
const lockResult = await this.acquireDispatchLock(runId);
|
|
6689
|
+
lockAcquired = lockResult.ok;
|
|
6534
6690
|
if (!lockAcquired) {
|
|
6535
|
-
outcomeDetail = "lock_not_acquired";
|
|
6691
|
+
outcomeDetail = lockResult.reason ? compactStatusDetail(`lock_not_acquired:${lockResult.reason}`) : "lock_not_acquired";
|
|
6536
6692
|
return;
|
|
6537
6693
|
}
|
|
6538
6694
|
this.setPhase("prepare_worktree");
|