@vtstech/pi-status 1.0.8 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/package.json +2 -2
  2. package/status.js +33 -64
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vtstech/pi-status",
3
- "version": "1.0.8",
3
+ "version": "1.1.0",
4
4
  "description": "System monitor / status bar extension for Pi Coding Agent",
5
5
  "main": "status.js",
6
6
  "keywords": ["pi-extensions"],
@@ -14,7 +14,7 @@
14
14
  "url": "https://github.com/VTSTech/pi-coding-agent"
15
15
  },
16
16
  "dependencies": {
17
- "@vtstech/pi-shared": "1.0.8"
17
+ "@vtstech/pi-shared": "1.1.0"
18
18
  },
19
19
  "peerDependencies": {
20
20
  "@mariozechner/pi-coding-agent": ">=0.66"
package/status.js CHANGED
@@ -1,11 +1,8 @@
1
1
  // .build-npm/status/status.temp.ts
2
2
  import os from "node:os";
3
- import * as fs from "node:fs";
4
- import * as path from "node:path";
5
- import { execSync } from "node:child_process";
6
- import { getOllamaBaseUrl } from "@vtstech/pi-shared/ollama";
3
+ import { execSync as gitExecSync } from "node:child_process";
4
+ import { getOllamaBaseUrl, fetchModelContextLength, readModelsJson } from "@vtstech/pi-shared/ollama";
7
5
  import { fmtBytes, fmtDur } from "@vtstech/pi-shared/format";
8
- import { readRecentAuditEntries } from "@vtstech/pi-shared/security";
9
6
  function status_temp_default(pi) {
10
7
  let lastResponseTime = null;
11
8
  let agentStartTime = null;
@@ -69,7 +66,7 @@ function status_temp_default(pi) {
69
66
  }
70
67
  function getSwap() {
71
68
  try {
72
- const out = execSync("cat /proc/meminfo", { encoding: "utf-8", timeout: 3e3 });
69
+ const out = gitExecSync("cat /proc/meminfo", { encoding: "utf-8", timeout: 3e3 });
73
70
  const swapTotal2 = Number(out.match(/SwapTotal:\s+(\d+)/)?.[1]) * 1024;
74
71
  const swapFree = Number(out.match(/SwapFree:\s+(\d+)/)?.[1]) * 1024;
75
72
  if (swapTotal2 > 0) return { used: swapTotal2 - swapFree, total: swapTotal2 };
@@ -107,26 +104,9 @@ function status_temp_default(pi) {
107
104
  nativeCtxPromise = (async () => {
108
105
  try {
109
106
  const ollamaBase = getOllamaBaseUrl();
110
- const res = await fetch(`${ollamaBase}/api/show`, {
111
- method: "POST",
112
- headers: { "Content-Type": "application/json" },
113
- body: JSON.stringify({ name: modelId }),
114
- signal: AbortSignal.timeout(5e3)
115
- });
116
- if (!res.ok) return;
117
- const data = await res.json();
118
- for (const key of Object.keys(data?.model_info ?? {})) {
119
- if (key.endsWith(".context_length")) {
120
- const val = data.model_info[key];
121
- if (typeof val === "number") {
122
- footerNativeCtx = val >= 1e3 ? `${(val / 1e3).toFixed(0)}k` : String(val);
123
- return;
124
- }
125
- }
126
- }
127
- const numCtx = data?.model_info?.["num_ctx"];
128
- if (typeof numCtx === "number") {
129
- footerNativeCtx = numCtx >= 1e3 ? `${(numCtx / 1e3).toFixed(0)}k` : String(numCtx);
107
+ const ctx = await fetchModelContextLength(ollamaBase, modelId);
108
+ if (ctx != null) {
109
+ footerNativeCtx = ctx >= 1e3 ? `${(ctx / 1e3).toFixed(0)}k` : String(ctx);
130
110
  }
131
111
  } catch {
132
112
  } finally {
@@ -136,26 +116,33 @@ function status_temp_default(pi) {
136
116
  }
137
117
  return footerNativeCtx;
138
118
  }
139
- function getOllamaLoadedModel() {
140
- const now = Date.now();
141
- if (now - ollamaLoadedLastCheck < OLLAMA_LOADED_INTERVAL) return ollamaLoadedCache;
142
- ollamaLoadedLastCheck = now;
119
+ async function fetchOllamaLoadedModel() {
143
120
  try {
144
121
  const ollamaBase = getOllamaBaseUrl();
145
- const out = execSync(`curl -s "${ollamaBase}/api/ps"`, { encoding: "utf-8", timeout: 5e3 });
146
- if (out.trim()) {
147
- const data = JSON.parse(out.trim());
148
- const models = data?.models || [];
149
- if (Array.isArray(models) && models.length > 0) {
150
- ollamaLoadedCache = models[0].name || models[0].model || "unknown";
151
- return ollamaLoadedCache;
152
- }
122
+ const res = await fetch(`${ollamaBase}/api/ps`, {
123
+ signal: AbortSignal.timeout(5e3)
124
+ });
125
+ if (!res.ok) return "";
126
+ const data = await res.json();
127
+ const models = data?.models || [];
128
+ if (Array.isArray(models) && models.length > 0) {
129
+ return models[0].name || models[0].model || "";
153
130
  }
154
131
  } catch {
155
132
  }
156
- ollamaLoadedCache = "";
157
133
  return "";
158
134
  }
135
+ function getOllamaLoadedModel() {
136
+ const now = Date.now();
137
+ if (now - ollamaLoadedLastCheck < OLLAMA_LOADED_INTERVAL) return ollamaLoadedCache;
138
+ ollamaLoadedLastCheck = now;
139
+ fetchOllamaLoadedModel().then((loaded) => {
140
+ ollamaLoadedCache = loaded;
141
+ }).catch(() => {
142
+ ollamaLoadedCache = "";
143
+ });
144
+ return ollamaLoadedCache;
145
+ }
159
146
  function extractParams(payload) {
160
147
  const params = [];
161
148
  if (payload.temperature !== void 0) params.push(`temp:${payload.temperature}`);
@@ -180,7 +167,7 @@ function status_temp_default(pi) {
180
167
  function getGitBranch() {
181
168
  if (gitBranchCache) return gitBranchCache;
182
169
  try {
183
- const branch = execSync("git rev-parse --abbrev-ref HEAD 2>/dev/null", {
170
+ const branch = gitExecSync("git rev-parse --abbrev-ref HEAD 2>/dev/null", {
184
171
  encoding: "utf-8",
185
172
  timeout: 3e3
186
173
  }).trim();
@@ -189,18 +176,8 @@ function status_temp_default(pi) {
189
176
  }
190
177
  return gitBranchCache;
191
178
  }
192
- function refreshBlockedCount() {
193
- try {
194
- const entries = readRecentAuditEntries(50);
195
- blockedCount = 0;
196
- for (const entry of entries) {
197
- if (entry.blocked === true || entry.safe === false || entry.action === "block") {
198
- blockedCount++;
199
- }
200
- }
201
- } catch {
202
- blockedCount = 0;
203
- }
179
+ function incrementBlockedCount() {
180
+ blockedCount++;
204
181
  }
205
182
  function updateMetrics() {
206
183
  cpuUsage = getCpuUsage();
@@ -216,15 +193,7 @@ function status_temp_default(pi) {
216
193
  hasSwap = false;
217
194
  }
218
195
  ollamaLoaded = getOllamaLoadedModel();
219
- let modelsJson = null;
220
- try {
221
- const raw = fs.readFileSync(
222
- path.join(os.homedir(), ".pi", "agent", "models.json"),
223
- "utf-8"
224
- );
225
- modelsJson = JSON.parse(raw);
226
- } catch {
227
- }
196
+ const modelsJson = readModelsJson();
228
197
  isLocalProvider = modelsJson ? detectLocalProvider(modelsJson) : false;
229
198
  if (currentCtx) {
230
199
  footerModel = currentCtx.model?.id || "";
@@ -241,7 +210,6 @@ function status_temp_default(pi) {
241
210
  getNativeModelCtx(modelId);
242
211
  }
243
212
  }
244
- refreshBlockedCount();
245
213
  }
246
214
  pi.on("session_start", async (_event, ctx) => {
247
215
  currentCtx = ctx;
@@ -251,7 +219,7 @@ function status_temp_default(pi) {
251
219
  ctx.ui.setFooter((tui, theme, footerData) => {
252
220
  tuiRef = tui;
253
221
  const dim = (s) => theme?.fg?.("dim", s) ?? s;
254
- const red = (s) => theme?.fg?.("red", s) ?? s;
222
+ const red = (s) => theme?.fg?.("error", s) ?? s;
255
223
  const yellow = (s) => theme?.fg?.("yellow", s) ?? s;
256
224
  const sep = dim(" \xB7 ");
257
225
  const truncateLine = (line, maxW) => {
@@ -364,6 +332,7 @@ function status_temp_default(pi) {
364
332
  const out = usage.output ?? usage.completionTokens ?? usage.completion_tokens;
365
333
  if (inp != null) lastUpstream = inp;
366
334
  if (out != null) lastDownstream = out;
335
+ if (tuiRef) tuiRef.requestRender();
367
336
  }
368
337
  pi.on("message_end", captureUsage);
369
338
  pi.on("turn_end", captureUsage);
@@ -388,7 +357,7 @@ function status_temp_default(pi) {
388
357
  if (isBlocked) {
389
358
  securityFlashTool = event.tool ?? event.name ?? "unknown";
390
359
  securityFlashUntil = Date.now() + 3e3;
391
- refreshBlockedCount();
360
+ incrementBlockedCount();
392
361
  if (tuiRef) tuiRef.requestRender();
393
362
  }
394
363
  });