@vtstech/pi-ollama-sync 1.1.1 → 1.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/ollama-sync.js +6 -6
  2. package/package.json +2 -2
package/ollama-sync.js CHANGED
@@ -9,7 +9,7 @@ import {
9
9
  getOllamaBaseUrl,
10
10
  EXTENSION_VERSION
11
11
  } from "@vtstech/pi-shared/ollama";
12
- import { section, ok, warn, info, bytesHuman, estimateVram } from "@vtstech/pi-shared/format";
12
+ import { section, ok, warn, info, bytesHuman, estimateMemory } from "@vtstech/pi-shared/format";
13
13
  var BRANDING = [
14
14
  ` \u26A1 Pi Ollama Sync v${EXTENSION_VERSION}`,
15
15
  ` Written by VTSTech`,
@@ -30,7 +30,7 @@ function getProviderConfig(existing) {
30
30
  };
31
31
  }
32
32
  function buildModelEntry(m, contextLength) {
33
- const estimatedSize = estimateVram(m.details.parameter_size, m.details.quantization_level);
33
+ const estimatedSize = estimateMemory(m.details.parameter_size, m.details.quantization_level, contextLength);
34
34
  return {
35
35
  id: m.name,
36
36
  reasoning: isReasoningModel(m.name),
@@ -109,9 +109,9 @@ function ollama_sync_temp_default(pi) {
109
109
  for (const m of newModels) {
110
110
  lines.push(ok(m.id));
111
111
  const ctxStr = m.contextLength != null ? m.contextLength.toLocaleString() : "?";
112
- const sizeStr = m.estimatedSize ? bytesHuman(m.estimatedSize) : "?";
112
+ const sizeStr = m.estimatedSize ? `GPU: ~${bytesHuman(m.estimatedSize.gpu)} \xB7 CPU: ~${bytesHuman(m.estimatedSize.cpu)}` : "?";
113
113
  lines.push(
114
- ` Params: ${m.parameterSize ?? "?"} \xB7 Quant: ${m.quantizationLevel ?? "?"} \xB7 Family: ${m.modelFamily ?? "?"} \xB7 Context: ${ctxStr} \xB7 VRAM: ~${sizeStr}`
114
+ ` Params: ${m.parameterSize ?? "?"} \xB7 Quant: ${m.quantizationLevel ?? "?"} \xB7 Family: ${m.modelFamily ?? "?"} \xB7 Context: ${ctxStr} \xB7 ${sizeStr}`
115
115
  );
116
116
  }
117
117
  if (added.length > 0 || removed.length > 0) {
@@ -187,8 +187,8 @@ function ollama_sync_temp_default(pi) {
187
187
  const modelDetails = newModels.map(
188
188
  (m) => {
189
189
  const ctxStr = m.contextLength ?? "?";
190
- const sizeStr = m.estimatedSize ? bytesHuman(m.estimatedSize) : "?";
191
- return ` \u2022 ${m.id} (${m.parameterSize}, ${m.quantizationLevel}, ctx: ${ctxStr}, ~${sizeStr})`;
190
+ const sizeStr = m.estimatedSize ? `GPU: ~${bytesHuman(m.estimatedSize.gpu)}, CPU: ~${bytesHuman(m.estimatedSize.cpu)}` : "?";
191
+ return ` \u2022 ${m.id} (${m.parameterSize}, ${m.quantizationLevel}, ctx: ${ctxStr}, ${sizeStr})`;
192
192
  }
193
193
  ).join("\n");
194
194
  return {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vtstech/pi-ollama-sync",
3
- "version": "1.1.1",
3
+ "version": "1.1.2",
4
4
  "description": "Ollama model sync extension for Pi Coding Agent",
5
5
  "main": "ollama-sync.js",
6
6
  "keywords": ["pi-extensions"],
@@ -14,7 +14,7 @@
14
14
  "url": "https://github.com/VTSTech/pi-coding-agent"
15
15
  },
16
16
  "dependencies": {
17
- "@vtstech/pi-shared": "1.1.1"
17
+ "@vtstech/pi-shared": "1.1.2"
18
18
  },
19
19
  "peerDependencies": {
20
20
  "@mariozechner/pi-coding-agent": ">=0.66"