open-agents-ai 0.187.311 → 0.187.313
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +119 -8
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -299369,15 +299369,33 @@ The session corrections MUST become hard rules in the SKILL.md Rules section.`;
|
|
|
299369
299369
|
label: `${t2.enabled ? "●" : "○"} ${t2.name || "(task)"} ${t2.schedule ? "[" + t2.schedule + "]" : ""}`,
|
|
299370
299370
|
detail: `${t2.file}#${t2.index}`
|
|
299371
299371
|
}));
|
|
299372
|
-
items.push({ key: "__kill__", label: "Kill OA schedulers", detail: "Stop scheduler/nexus
|
|
299372
|
+
items.push({ key: "__kill__", label: "Kill OA schedulers + active runs", detail: "Stop scheduler/nexus processes and terminate active OA runs" });
|
|
299373
299373
|
const result = await tuiSelect({
|
|
299374
299374
|
items,
|
|
299375
299375
|
title: "Scheduled Tasks",
|
|
299376
299376
|
onEnter: (item, { done }) => {
|
|
299377
299377
|
(async () => {
|
|
299378
299378
|
if (item.key === "__kill__") {
|
|
299379
|
-
await doFetch("/v1/scheduled/kill", { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify({}) });
|
|
299380
|
-
|
|
299379
|
+
const resp = await doFetch("/v1/scheduled/kill", { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify({}) });
|
|
299380
|
+
try {
|
|
299381
|
+
const j = await resp.json();
|
|
299382
|
+
const kb = Array.isArray(j.killed) ? j.killed.length : 0;
|
|
299383
|
+
const ka = Array.isArray(j.additionally) ? j.additionally.length : 0;
|
|
299384
|
+
renderInfo2(`Killed ${kb + ka} processes (schedulers + active runs).`);
|
|
299385
|
+
const before = j.gpu_before?.[0];
|
|
299386
|
+
const after = j.gpu_after?.[0];
|
|
299387
|
+
if (before && after) {
|
|
299388
|
+
renderInfo2(`GPU util: ${before.gpu_pct}% → ${after.gpu_pct}%, VRAM: ${before.vram_used_gb}/${before.vram_total_gb} GB → ${after.vram_used_gb}/${after.vram_total_gb} GB`);
|
|
299389
|
+
}
|
|
299390
|
+
const rem = Array.isArray(j.procs_after) ? j.procs_after.length : 0;
|
|
299391
|
+
if (rem > 0) {
|
|
299392
|
+
renderWarning2(`Remaining matched processes: ${rem}`);
|
|
299393
|
+
} else {
|
|
299394
|
+
renderInfo2("No remaining matched processes.");
|
|
299395
|
+
}
|
|
299396
|
+
} catch {
|
|
299397
|
+
renderInfo2("Kill signal sent.");
|
|
299398
|
+
}
|
|
299381
299399
|
done();
|
|
299382
299400
|
return;
|
|
299383
299401
|
}
|
|
@@ -321792,9 +321810,24 @@ async function loadScheduled() {
|
|
|
321792
321810
|
|
|
321793
321811
|
(window as any).killScheduled = async function() {
|
|
321794
321812
|
try {
|
|
321795
|
-
await fetch('/v1/scheduled/kill', { method:'POST', headers: headers(), body: JSON.stringify({}) });
|
|
321796
|
-
|
|
321797
|
-
|
|
321813
|
+
const r = await fetch('/v1/scheduled/kill', { method:'POST', headers: headers(), body: JSON.stringify({}) });
|
|
321814
|
+
const j = await r.json();
|
|
321815
|
+
const kb = Array.isArray(j.killed) ? j.killed.length : 0;
|
|
321816
|
+
const ka = Array.isArray(j.additionally) ? j.additionally.length : 0;
|
|
321817
|
+
const before = j.gpu_before && j.gpu_before[0] ? j.gpu_before[0] : null;
|
|
321818
|
+
const after = j.gpu_after && j.gpu_after[0] ? j.gpu_after[0] : null;
|
|
321819
|
+
const rem = Array.isArray(j.procs_after) ? j.procs_after.length : 0;
|
|
321820
|
+
let msg = 'Killed ' + (kb + ka) + ' processes.';
|
|
321821
|
+
if (before && after) {
|
|
321822
|
+
msg += '
|
|
321823
|
+
GPU util: ' + before.gpu_pct + '% → ' + after.gpu_pct + '%, VRAM: ' + before.vram_used_gb + '/' + before.vram_total_gb + ' GB → ' + after.vram_used_gb + '/' + after.vram_total_gb + ' GB';
|
|
321824
|
+
}
|
|
321825
|
+
msg += rem > 0 ? ('
|
|
321826
|
+
Remaining matched processes: ' + rem) : '
|
|
321827
|
+
No remaining matched processes.';
|
|
321828
|
+
alert(msg);
|
|
321829
|
+
loadScheduled();
|
|
321830
|
+
} catch (e) { alert('Kill failed: ' + (e && e.message || String(e))); }
|
|
321798
321831
|
}
|
|
321799
321832
|
|
|
321800
321833
|
(window as any).disableAllScheduled = async function() {
|
|
@@ -327540,9 +327573,50 @@ async function handleRequest(req2, res, ollamaUrl, verbose) {
|
|
|
327540
327573
|
if (pathname === "/v1/scheduled/kill" && method === "POST") {
|
|
327541
327574
|
const body = await parseJsonBody(req2);
|
|
327542
327575
|
const pids = Array.isArray(body?.pids) ? body.pids.filter((n2) => Number.isInteger(n2)) : void 0;
|
|
327543
|
-
const pattern = typeof body?.pattern === "string" && body.pattern.trim() ? body.pattern.trim() : "(/bin/oa|open-agents-ai|nexus-daemon|OPEN-AGENTS-SCHEDULED)";
|
|
327576
|
+
const pattern = typeof body?.pattern === "string" && body.pattern.trim() ? body.pattern.trim() : "(/bin/oa|open-agents-ai|nexus-daemon|OPEN-AGENTS-SCHEDULED|ollama)";
|
|
327577
|
+
const procsBefore = listMatchingProcesses(pattern);
|
|
327578
|
+
const gpuBefore = sampleGpuUtil();
|
|
327544
327579
|
const killed = killScheduledProcesses(pids, pattern);
|
|
327545
|
-
|
|
327580
|
+
const additionally = [];
|
|
327581
|
+
try {
|
|
327582
|
+
for (const [rid, child] of Array.from(runningProcesses.entries())) {
|
|
327583
|
+
const pid = child?.pid ?? 0;
|
|
327584
|
+
if (pid > 0) {
|
|
327585
|
+
try {
|
|
327586
|
+
process.kill(pid, "SIGTERM");
|
|
327587
|
+
additionally.push({ pid, ok: true, signal: "TERM", run_id: rid });
|
|
327588
|
+
} catch {
|
|
327589
|
+
additionally.push({ pid, ok: false, signal: "TERM", run_id: rid });
|
|
327590
|
+
}
|
|
327591
|
+
try {
|
|
327592
|
+
process.kill(pid, 0);
|
|
327593
|
+
try {
|
|
327594
|
+
process.kill(pid, "SIGKILL");
|
|
327595
|
+
additionally.push({ pid, ok: true, signal: "KILL", run_id: rid });
|
|
327596
|
+
} catch {
|
|
327597
|
+
}
|
|
327598
|
+
} catch {
|
|
327599
|
+
}
|
|
327600
|
+
}
|
|
327601
|
+
runningProcesses.delete(rid);
|
|
327602
|
+
}
|
|
327603
|
+
} catch {
|
|
327604
|
+
}
|
|
327605
|
+
try {
|
|
327606
|
+
await new Promise((r2) => setTimeout(r2, 600));
|
|
327607
|
+
} catch {
|
|
327608
|
+
}
|
|
327609
|
+
const procsAfter = listMatchingProcesses(pattern);
|
|
327610
|
+
const gpuAfter = sampleGpuUtil();
|
|
327611
|
+
jsonResponse(res, 200, {
|
|
327612
|
+
killed_count: killed.length + additionally.length,
|
|
327613
|
+
killed,
|
|
327614
|
+
additionally,
|
|
327615
|
+
procs_before: procsBefore,
|
|
327616
|
+
procs_after: procsAfter,
|
|
327617
|
+
gpu_before: gpuBefore,
|
|
327618
|
+
gpu_after: gpuAfter
|
|
327619
|
+
});
|
|
327546
327620
|
return;
|
|
327547
327621
|
}
|
|
327548
327622
|
if ((pathname === "/v1/chat" || pathname === "/api/chat") && method === "POST") {
|
|
@@ -328550,6 +328624,43 @@ function killScheduledProcesses(pids, pattern) {
|
|
|
328550
328624
|
}
|
|
328551
328625
|
return killed;
|
|
328552
328626
|
}
|
|
328627
|
+
function listMatchingProcesses(pattern) {
|
|
328628
|
+
const list = [];
|
|
328629
|
+
try {
|
|
328630
|
+
const { execSync: es } = __require("node:child_process");
|
|
328631
|
+
const re = new RegExp(pattern, "i");
|
|
328632
|
+
const ps = es("ps -eo pid,pcpu,pmem,command", { encoding: "utf8", stdio: "pipe" });
|
|
328633
|
+
for (const line of ps.split("\n")) {
|
|
328634
|
+
const m2 = line.trim().match(/^(\d+)\s+([0-9.]+)?\s+([0-9.]+)?\s+(.+)$/);
|
|
328635
|
+
if (!m2) continue;
|
|
328636
|
+
const pid = parseInt(m2[1], 10);
|
|
328637
|
+
const cpu = m2[2] ? parseFloat(m2[2]) : null;
|
|
328638
|
+
const mem = m2[3] ? parseFloat(m2[3]) : null;
|
|
328639
|
+
const cmd = m2[4] || "";
|
|
328640
|
+
if (!isFinite(pid)) continue;
|
|
328641
|
+
if (re.test(cmd)) list.push({ pid, cpu, mem, cmd });
|
|
328642
|
+
}
|
|
328643
|
+
} catch {
|
|
328644
|
+
}
|
|
328645
|
+
return list;
|
|
328646
|
+
}
|
|
328647
|
+
function sampleGpuUtil() {
|
|
328648
|
+
try {
|
|
328649
|
+
const { execSync: es } = __require("node:child_process");
|
|
328650
|
+
const out = es("nvidia-smi --query-gpu=utilization.gpu,memory.used,memory.total --format=csv,noheader,nounits", { encoding: "utf8", timeout: 3e3, stdio: "pipe" });
|
|
328651
|
+
const arr = [];
|
|
328652
|
+
for (const line of out.trim().split("\n")) {
|
|
328653
|
+
const [pctS, usedS, totalS] = line.split(",").map((s2) => s2.trim());
|
|
328654
|
+
const pct = parseInt(pctS, 10);
|
|
328655
|
+
const usedGb = Math.round(parseInt(usedS, 10) / 1024 * 10) / 10;
|
|
328656
|
+
const totalGb = Math.round(parseInt(totalS, 10) / 1024);
|
|
328657
|
+
if (!isNaN(pct) && !isNaN(usedGb) && !isNaN(totalGb)) arr.push({ gpu_pct: pct, vram_used_gb: usedGb, vram_total_gb: totalGb });
|
|
328658
|
+
}
|
|
328659
|
+
return arr;
|
|
328660
|
+
} catch {
|
|
328661
|
+
return null;
|
|
328662
|
+
}
|
|
328663
|
+
}
|
|
328553
328664
|
function startApiServer(options2 = {}) {
|
|
328554
328665
|
if (options2.quiet) setQuiet(true);
|
|
328555
328666
|
const log22 = options2.quiet ? (_msg) => {
|
package/package.json
CHANGED