@vtstech/pi-status 1.1.5 → 1.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +2 -3
  2. package/package.json +2 -2
  3. package/status.js +8 -14
package/README.md CHANGED
@@ -32,8 +32,7 @@ Slots are updated every 5 seconds (1 second for active tool timing). Render orde
32
32
 
33
33
  | Slot | Description | Condition |
34
34
  |------|-------------|-----------|
35
- | **CtxMax** | Native model context window from Ollama `/api/show` (k-notation) | Local or remote Ollama |
36
- | **RespMax** | Max response/completion tokens with k-notation (e.g., `16k`) | After first provider request |
35
+ | **CtxMax + RespMax** | Combined: native model context window + max response tokens (e.g., `CtxMax:33k RespMax:16.4k`) | Ollama or after first provider request |
37
36
  | **Resp** | Agent loop duration (e.g., `2m3s`) | After first agent cycle |
38
37
  | **CPU%** | Per-core CPU usage delta | Local Ollama only |
39
38
  | **RAM** | Used/total system memory | Local Ollama only |
@@ -42,7 +41,7 @@ Slots are updated every 5 seconds (1 second for active tool timing). Render orde
42
41
  | **SEC** | Security mode indicator (`SEC:BASIC`/`SEC:MAX`) + session-scoped blocked count + 3s flash on block event | Always shown |
43
42
  | **Active tool** | Live elapsed timer with `>` indicator | While a tool is running |
44
43
  | **Prompt** | System prompt size as `chars chr tokens tok` | After first agent start |
45
- | **Pi version** | `pi:0.66.1` (dimmed, always last) | Always shown |
44
+ | **Pi version** | `pi:0.66.1` (dim label + green value, always last) | Always shown |
46
45
 
47
46
  All slots are cleared on `session_shutdown`. Metrics that the framework already provides (model name, session tokens, context usage, thinking level) are intentionally omitted to avoid duplication.
48
47
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vtstech/pi-status",
3
- "version": "1.1.5",
3
+ "version": "1.1.7",
4
4
  "description": "System monitor / status bar extension for Pi Coding Agent",
5
5
  "main": "status.js",
6
6
  "keywords": ["pi-extensions"],
@@ -14,7 +14,7 @@
14
14
  "url": "https://github.com/VTSTech/pi-coding-agent"
15
15
  },
16
16
  "dependencies": {
17
- "@vtstech/pi-shared": "1.1.5"
17
+ "@vtstech/pi-shared": "1.1.7"
18
18
  },
19
19
  "peerDependencies": {
20
20
  "@mariozechner/pi-coding-agent": ">=0.66"
package/status.js CHANGED
@@ -141,21 +141,16 @@ function status_temp_default(pi) {
141
141
  "status-swap",
142
142
  isLocalProvider && hasSwap && swapUsed > 0 ? `${dim2("Swap")} ${green2(fmtBytes(swapUsed) + "/" + fmtBytes(swapTotal))}` : void 0
143
143
  );
144
- ctxUi.setStatus(
145
- "status-native-ctx",
146
- footerNativeCtx ? `${dim2("CtxMax:")}${green2(footerNativeCtx)}` : void 0
147
- );
144
+ const ctxParts = [];
145
+ if (footerNativeCtx) ctxParts.push(`${dim2("CtxMax:")}${green2(footerNativeCtx)}`);
148
146
  if (lastPayload) {
149
147
  const rawMax = lastPayload.max_completion_tokens ?? lastPayload.max_tokens;
150
148
  if (rawMax !== void 0) {
151
149
  const formatted = rawMax >= 1e3 ? `${(rawMax / 1e3).toFixed(rawMax % 1e3 === 0 ? 0 : 1)}k` : String(rawMax);
152
- ctxUi.setStatus("status-resp-max", `${dim2("RespMax:")}${green2(formatted)}`);
153
- } else {
154
- ctxUi.setStatus("status-resp-max", void 0);
150
+ ctxParts.push(`${dim2("RespMax:")}${green2(formatted)}`);
155
151
  }
156
- } else {
157
- ctxUi.setStatus("status-resp-max", void 0);
158
152
  }
153
+ ctxUi.setStatus("status-ctx", ctxParts.length > 0 ? ctxParts.join(" ") : void 0);
159
154
  ctxUi.setStatus(
160
155
  "status-resp",
161
156
  lastResponseTime !== null ? `${dim2("Resp")} ${green2(fmtDur(lastResponseTime))}` : void 0
@@ -181,9 +176,9 @@ function status_temp_default(pi) {
181
176
  } else {
182
177
  ctxUi.setStatus("status-tool", void 0);
183
178
  }
184
- ctxUi.setStatus("system-prompt", cachedPromptText ?? void 0);
179
+ ctxUi.setStatus("status-prompt", cachedPromptText ?? dim2("Prompt: \u2026"));
185
180
  if (versionsText) {
186
- ctxUi.setStatus("status-versions", dim2(versionsText));
181
+ ctxUi.setStatus("status-versions", `${dim2("pi:")}${green2(versionsText.replace(/^pi:/, ""))}`);
187
182
  }
188
183
  }
189
184
  function updateMetrics() {
@@ -240,11 +235,10 @@ function status_temp_default(pi) {
240
235
  ui.setStatus("status-cpu", void 0);
241
236
  ui.setStatus("status-ram", void 0);
242
237
  ui.setStatus("status-swap", void 0);
243
- ui.setStatus("status-native-ctx", void 0);
238
+ ui.setStatus("status-ctx", void 0);
244
239
  ui.setStatus("status-resp", void 0);
245
- ui.setStatus("status-resp-max", void 0);
246
240
  ui.setStatus("status-params", void 0);
247
- ui.setStatus("system-prompt", void 0);
241
+ ui.setStatus("status-prompt", void 0);
248
242
  ui.setStatus("status-sec", void 0);
249
243
  ui.setStatus("status-tool", void 0);
250
244
  ui.setStatus("status-versions", void 0);