picomon 0.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
picomon-0.1.1/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Omar Kamali
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
picomon-0.1.1/PKG-INFO ADDED
@@ -0,0 +1,120 @@
1
+ Metadata-Version: 2.4
2
+ Name: picomon
3
+ Version: 0.1.1
4
+ Summary: Minimal curses dashboard for monitoring AMD GPUs via amd-smi
5
+ Author-email: Omar Kamali <picomon@omarkama.li>
6
+ License: MIT License
7
+
8
+ Copyright (c) 2025 Omar Kamali
9
+
10
+ Permission is hereby granted, free of charge, to any person obtaining a copy
11
+ of this software and associated documentation files (the "Software"), to deal
12
+ in the Software without restriction, including without limitation the rights
13
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14
+ copies of the Software, and to permit persons to whom the Software is
15
+ furnished to do so, subject to the following conditions:
16
+
17
+ The above copyright notice and this permission notice shall be included in all
18
+ copies or substantial portions of the Software.
19
+
20
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
23
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
26
+ SOFTWARE.
27
+
28
+ Project-URL: Homepage, https://github.com/omarkamali/picomon
29
+ Project-URL: Issues, https://github.com/omarkamali/picomon/issues
30
+ Project-URL: Repository, https://github.com/omarkamali/picomon
31
+ Keywords: amd,gpu,monitor,cli,curses
32
+ Classifier: Development Status :: 4 - Beta
33
+ Classifier: Environment :: Console
34
+ Classifier: Intended Audience :: System Administrators
35
+ Classifier: Intended Audience :: Developers
36
+ Classifier: License :: OSI Approved :: MIT License
37
+ Classifier: Operating System :: POSIX
38
+ Classifier: Programming Language :: Python
39
+ Classifier: Programming Language :: Python :: 3
40
+ Classifier: Programming Language :: Python :: 3.9
41
+ Classifier: Programming Language :: Python :: 3.10
42
+ Classifier: Programming Language :: Python :: 3.11
43
+ Classifier: Programming Language :: Python :: 3.12
44
+ Classifier: Topic :: System :: Monitoring
45
+ Requires-Python: >=3.9
46
+ Description-Content-Type: text/markdown
47
+ License-File: LICENSE
48
+ Provides-Extra: dev
49
+ Requires-Dist: pytest>=7; extra == "dev"
50
+ Dynamic: license-file
51
+
52
+ # picomon
53
+
54
+ picomon is a tiny terminal dashboard for monitoring AMD GPUs via `amd-smi`. It polls basic metrics (gfx activity, memory usage, and power) and renders them as sparklines inside a curses UI so you can keep an eye on accelerators without launching a full GUI stack.
55
+
56
+ Homepage: <https://omarkamali.github.io/picomon/>
57
+
58
+ ## Why?
59
+
60
+ I like nvtop but the asserts kept crashing it on some AMD devices. picomon is a lightweight alternative that just polls metrics and renders them as sparklines, trading off ironclad accuracy checks for more reliability.
61
+
62
+ It hasn't been tested on all AMD GPUs. If it fails to run on your GPU, please open a new issue using [this template](https://github.com/omarkamali/picomon/issues/new?assignees=&labels=bug&template=bug_report.md&title=%5BBug%5D).
63
+
64
+ ## Requirements
65
+
66
+ - Python 3.9 or newer
67
+ - The `amd-smi` CLI available on your PATH (if your GPU is properly setup then you already have this)
68
+ - An ANSI-compatible terminal for the curses UI
69
+
70
+ ## Installation
71
+
72
+ ```shell
73
+ pip install picomon
74
+ ```
75
+
76
+ ## Usage
77
+
78
+ After installation, run the CLI:
79
+
80
+ ```
81
+ $ picomon
82
+
83
+ ┌──────────────────────────────────────────┐ ┌──────────────────────────────────────────┐
84
+ │ GPU 0 GFX 42% UMC 21% │ │ GPU 1 GFX 78% UMC 66% │
85
+ │ PWR 135/250W (54%) VRAM 10.0/16.0GB 62% │ │ PWR 210/250W (84%) VRAM 14.5/16.0GB 90% │
86
+ │ │ │ │
87
+ │ GFX ▁▂▂▃▄▄▅▆▆▇█▇▆▅▄▃▂▁ │ │ GFX ▂▃▄▅▆▇██▇▆▅▄▂▂▃▅▆ │
88
+ │ PWR ▁▁▂▂▃▄▄▅▆▇██▇▆▅▄▂▁ │ │ PWR ▂▂▃▄▅▆▇██▇▆▅▄▃▂▂▃ │
89
+ │ VRM ▁▁▂▂▃▄▄▅▆▇███▇▆▅▄▂ │ │ VRM ▂▃▄▅▆▆▇███▇▆▅▄▃▂▂▃ │
90
+ └──────────────────────────────────────────┘ └──────────────────────────────────────────┘
91
+ ```
92
+
93
+ Key bindings:
94
+ - `q` to quit
95
+
96
+ Common flags:
97
+ - `--update-interval` (seconds between refreshes, default 3)
98
+ - `--history-minutes` (rolling window to retain, default 30)
99
+ - `--static-timeout` / `--metric-timeout` (seconds to wait for `amd-smi` responses)
100
+
101
+ These correspond to the `PicomonConfig` dataclass, so you can also import and reuse
102
+ picomon as a library:
103
+
104
+ ```python
105
+ from picomon import PicomonConfig, run_monitor
106
+
107
+ config = PicomonConfig(update_interval=1.5)
108
+ run_monitor(["--update-interval", str(config.update_interval)])
109
+ ```
110
+
111
+ ## Development
112
+
113
+ - Run tests with `pytest` (see the CI workflow for reference)
114
+ - Use `scripts/publish.py` to cut a GitHub release once tags are in place
115
+
116
+ ## License
117
+
118
+ MIT © [Omar Kamali](https://omarkamali.com)
119
+
120
+ Source: <https://github.com/omarkamali/picomon>
@@ -0,0 +1,69 @@
1
+ # picomon
2
+
3
+ picomon is a tiny terminal dashboard for monitoring AMD GPUs via `amd-smi`. It polls basic metrics (gfx activity, memory usage, and power) and renders them as sparklines inside a curses UI so you can keep an eye on accelerators without launching a full GUI stack.
4
+
5
+ Homepage: <https://omarkamali.github.io/picomon/>
6
+
7
+ ## Why?
8
+
9
+ I like nvtop but the asserts kept crashing it on some AMD devices. picomon is a lightweight alternative that just polls metrics and renders them as sparklines, trading off ironclad accuracy checks for more reliability.
10
+
11
+ It hasn't been tested on all AMD GPUs. If it fails to run on your GPU, please open a new issue using [this template](https://github.com/omarkamali/picomon/issues/new?assignees=&labels=bug&template=bug_report.md&title=%5BBug%5D).
12
+
13
+ ## Requirements
14
+
15
+ - Python 3.9 or newer
16
+ - The `amd-smi` CLI available on your PATH (if your GPU is properly setup then you already have this)
17
+ - An ANSI-compatible terminal for the curses UI
18
+
19
+ ## Installation
20
+
21
+ ```shell
22
+ pip install picomon
23
+ ```
24
+
25
+ ## Usage
26
+
27
+ After installation, run the CLI:
28
+
29
+ ```
30
+ $ picomon
31
+
32
+ ┌──────────────────────────────────────────┐ ┌──────────────────────────────────────────┐
33
+ │ GPU 0 GFX 42% UMC 21% │ │ GPU 1 GFX 78% UMC 66% │
34
+ │ PWR 135/250W (54%) VRAM 10.0/16.0GB 62% │ │ PWR 210/250W (84%) VRAM 14.5/16.0GB 90% │
35
+ │ │ │ │
36
+ │ GFX ▁▂▂▃▄▄▅▆▆▇█▇▆▅▄▃▂▁ │ │ GFX ▂▃▄▅▆▇██▇▆▅▄▂▂▃▅▆ │
37
+ │ PWR ▁▁▂▂▃▄▄▅▆▇██▇▆▅▄▂▁ │ │ PWR ▂▂▃▄▅▆▇██▇▆▅▄▃▂▂▃ │
38
+ │ VRM ▁▁▂▂▃▄▄▅▆▇███▇▆▅▄▂ │ │ VRM ▂▃▄▅▆▆▇███▇▆▅▄▃▂▂▃ │
39
+ └──────────────────────────────────────────┘ └──────────────────────────────────────────┘
40
+ ```
41
+
42
+ Key bindings:
43
+ - `q` to quit
44
+
45
+ Common flags:
46
+ - `--update-interval` (seconds between refreshes, default 3)
47
+ - `--history-minutes` (rolling window to retain, default 30)
48
+ - `--static-timeout` / `--metric-timeout` (seconds to wait for `amd-smi` responses)
49
+
50
+ These correspond to the `PicomonConfig` dataclass, so you can also import and reuse
51
+ picomon as a library:
52
+
53
+ ```python
54
+ from picomon import PicomonConfig, run_monitor
55
+
56
+ config = PicomonConfig(update_interval=1.5)
57
+ run_monitor(["--update-interval", str(config.update_interval)])
58
+ ```
59
+
60
+ ## Development
61
+
62
+ - Run tests with `pytest` (see the CI workflow for reference)
63
+ - Use `scripts/publish.py` to cut a GitHub release once tags are in place
64
+
65
+ ## License
66
+
67
+ MIT © [Omar Kamali](https://omarkamali.com)
68
+
69
+ Source: <https://github.com/omarkamali/picomon>
@@ -0,0 +1,47 @@
1
+ [build-system]
2
+ requires = ["setuptools>=65", "wheel"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "picomon"
7
+ version = "0.1.1"
8
+ description = "Minimal curses dashboard for monitoring AMD GPUs via amd-smi"
9
+ readme = "README.md"
10
+ license = { file = "LICENSE" }
11
+ authors = [
12
+ { name = "Omar Kamali", email = "picomon@omarkama.li" }
13
+ ]
14
+ requires-python = ">=3.9"
15
+ keywords = ["amd", "gpu", "monitor", "cli", "curses"]
16
+ classifiers = [
17
+ "Development Status :: 4 - Beta",
18
+ "Environment :: Console",
19
+ "Intended Audience :: System Administrators",
20
+ "Intended Audience :: Developers",
21
+ "License :: OSI Approved :: MIT License",
22
+ "Operating System :: POSIX",
23
+ "Programming Language :: Python",
24
+ "Programming Language :: Python :: 3",
25
+ "Programming Language :: Python :: 3.9",
26
+ "Programming Language :: Python :: 3.10",
27
+ "Programming Language :: Python :: 3.11",
28
+ "Programming Language :: Python :: 3.12",
29
+ "Topic :: System :: Monitoring",
30
+ ]
31
+ dependencies = []
32
+
33
+ [project.urls]
34
+ Homepage = "https://github.com/omarkamali/picomon"
35
+ Issues = "https://github.com/omarkamali/picomon/issues"
36
+ Repository = "https://github.com/omarkamali/picomon"
37
+
38
+ [project.scripts]
39
+ picomon = "picomon.monitor:run"
40
+
41
+ [project.optional-dependencies]
42
+ dev = [
43
+ "pytest>=7",
44
+ ]
45
+
46
+ [tool.setuptools.packages.find]
47
+ where = ["src"]
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,19 @@
1
+ from __future__ import annotations
2
+
3
+ try:
4
+ from importlib import metadata as _metadata
5
+ except ImportError: # pragma: no cover
6
+ _metadata = None # type: ignore[assignment]
7
+
8
+ from .config import PicomonConfig
9
+ from .monitor import run as run_monitor
10
+
11
+ __all__ = ["PicomonConfig", "run_monitor", "__version__"]
12
+
13
+ if _metadata is not None:
14
+ try: # pragma: no cover - best effort in editable installs
15
+ __version__ = _metadata.version("picomon")
16
+ except _metadata.PackageNotFoundError: # type: ignore[attr-defined]
17
+ __version__ = "0.0.0"
18
+ else: # pragma: no cover
19
+ __version__ = "0.0.0"
@@ -0,0 +1,29 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+
5
+
6
+ @dataclass
7
+ class PicomonConfig:
8
+ """Runtime configuration for picomon."""
9
+
10
+ update_interval: float = 3.0
11
+ history_minutes: int = 30
12
+ static_timeout: float = 10.0
13
+ metric_timeout: float = 5.0
14
+
15
+ def __post_init__(self) -> None:
16
+ if self.update_interval <= 0:
17
+ raise ValueError("update_interval must be greater than zero")
18
+ if self.history_minutes <= 0:
19
+ raise ValueError("history_minutes must be greater than zero")
20
+ if self.static_timeout <= 0:
21
+ raise ValueError("static_timeout must be greater than zero")
22
+ if self.metric_timeout <= 0:
23
+ raise ValueError("metric_timeout must be greater than zero")
24
+
25
+ @property
26
+ def max_points(self) -> int:
27
+ """Maximum number of samples to retain per metric."""
28
+
29
+ return max(1, int(self.history_minutes * 60 / self.update_interval))
@@ -0,0 +1,61 @@
1
+ from __future__ import annotations
2
+
3
+ from collections import deque
4
+ from datetime import datetime
5
+ from typing import Deque
6
+
7
+ __all__ = ["GPUHistory", "parse_value_unit"]
8
+
9
+
10
+ def parse_value_unit(value) -> float:
11
+ """Parse amd-smi outputs like {'value': 42, 'unit': '%'} into floats."""
12
+
13
+ if isinstance(value, dict) and "value" in value:
14
+ try:
15
+ return float(value["value"])
16
+ except (TypeError, ValueError):
17
+ return 0.0
18
+ if isinstance(value, (int, float)):
19
+ return float(value)
20
+ try:
21
+ stripped = str(value).strip().rstrip("%WMGBs/")
22
+ return float(stripped)
23
+ except (TypeError, ValueError):
24
+ return 0.0
25
+
26
+
27
+ class GPUHistory:
28
+ """Static info + rolling metric history for one GPU."""
29
+
30
+ def __init__(self, max_points: int):
31
+ self.vram_total_mb: float = 0.0
32
+ self.power_limit_w: float = 0.0
33
+
34
+ self.timestamps: Deque[datetime] = deque(maxlen=max_points)
35
+ self.gfx: Deque[float] = deque(maxlen=max_points)
36
+ self.umc: Deque[float] = deque(maxlen=max_points)
37
+ self.power_w: Deque[float] = deque(maxlen=max_points)
38
+ self.vram_used_mb: Deque[float] = deque(maxlen=max_points)
39
+
40
+ def add_sample(
41
+ self,
42
+ ts: datetime,
43
+ gfx: float,
44
+ umc: float,
45
+ power_w: float,
46
+ vram_used_mb: float,
47
+ ) -> None:
48
+ self.timestamps.append(ts)
49
+ self.gfx.append(gfx)
50
+ self.umc.append(umc)
51
+ self.power_w.append(power_w)
52
+ self.vram_used_mb.append(vram_used_mb)
53
+
54
+ def prune_before(self, cutoff: datetime) -> None:
55
+ while self.timestamps and self.timestamps[0] < cutoff:
56
+ self.timestamps.popleft()
57
+ self.gfx.popleft()
58
+ self.umc.popleft()
59
+ self.power_w.popleft()
60
+ self.vram_used_mb.popleft()
61
+
@@ -0,0 +1,109 @@
1
+ from __future__ import annotations
2
+
3
+ import argparse
4
+ import logging
5
+ from typing import Sequence
6
+
7
+ import curses
8
+
9
+ from .config import PicomonConfig
10
+ from .smi import load_static_info, update_dynamic_info
11
+ from .ui import render_loop
12
+
13
+ __all__ = ["build_parser", "run"]
14
+
15
+ DEFAULT_CONFIG = PicomonConfig()
16
+
17
+ _LOG_LEVELS = {
18
+ "CRITICAL": logging.CRITICAL,
19
+ "ERROR": logging.ERROR,
20
+ "WARNING": logging.WARNING,
21
+ "INFO": logging.INFO,
22
+ "DEBUG": logging.DEBUG,
23
+ }
24
+
25
+
26
+ def build_parser() -> argparse.ArgumentParser:
27
+ parser = argparse.ArgumentParser(
28
+ prog="picomon",
29
+ description="Minimal AMD GPU dashboard with curses UI",
30
+ )
31
+ parser.add_argument(
32
+ "--update-interval",
33
+ type=float,
34
+ default=DEFAULT_CONFIG.update_interval,
35
+ help="Refresh interval in seconds (default: %(default)s)",
36
+ )
37
+ parser.add_argument(
38
+ "--history-minutes",
39
+ type=int,
40
+ default=DEFAULT_CONFIG.history_minutes,
41
+ help="How many minutes of history to retain (default: %(default)s)",
42
+ )
43
+ parser.add_argument(
44
+ "--static-timeout",
45
+ type=float,
46
+ default=DEFAULT_CONFIG.static_timeout,
47
+ help="Timeout (seconds) when collecting static metadata (default: %(default)s)",
48
+ )
49
+ parser.add_argument(
50
+ "--metric-timeout",
51
+ type=float,
52
+ default=DEFAULT_CONFIG.metric_timeout,
53
+ help="Timeout (seconds) when polling metrics (default: %(default)s)",
54
+ )
55
+ parser.add_argument(
56
+ "--log-level",
57
+ default="INFO",
58
+ choices=sorted(_LOG_LEVELS),
59
+ help="Verbosity for logging diagnostics (default: %(default)s)",
60
+ )
61
+ return parser
62
+
63
+
64
+ def _configure_logging(level: str) -> None:
65
+ logging.basicConfig(level=_LOG_LEVELS.get(level.upper(), logging.INFO))
66
+
67
+
68
+ def _config_from_namespace(ns: argparse.Namespace) -> PicomonConfig:
69
+ return PicomonConfig(
70
+ update_interval=ns.update_interval,
71
+ history_minutes=ns.history_minutes,
72
+ static_timeout=ns.static_timeout,
73
+ metric_timeout=ns.metric_timeout,
74
+ )
75
+
76
+
77
+ def run(argv: Sequence[str] | None = None) -> int:
78
+ parser = build_parser()
79
+ args = parser.parse_args(list(argv) if argv is not None else None)
80
+ _configure_logging(args.log_level)
81
+ logger = logging.getLogger("picomon")
82
+
83
+ try:
84
+ config = _config_from_namespace(args)
85
+ except ValueError as exc:
86
+ logger.error("Invalid configuration: %s", exc)
87
+ return 2
88
+
89
+ gpus = load_static_info(config)
90
+ if not gpus:
91
+ logger.error(
92
+ "No GPUs detected via amd-smi static metrics. Check permissions/install."
93
+ )
94
+ return 1
95
+
96
+ update_dynamic_info(config, gpus)
97
+
98
+ def tick() -> None:
99
+ update_dynamic_info(config, gpus)
100
+
101
+ try:
102
+ curses.wrapper(render_loop, config, gpus, tick)
103
+ except KeyboardInterrupt: # pragma: no cover - user initiated
104
+ return 0
105
+ except curses.error as exc:
106
+ logger.error("Curses rendering failed: %s", exc)
107
+ return 1
108
+
109
+ return 0
@@ -0,0 +1,147 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ import logging
5
+ import subprocess
6
+ from datetime import datetime, timedelta
7
+ from typing import Dict, Protocol, Sequence
8
+
9
+ from .config import PicomonConfig
10
+ from .history import GPUHistory, parse_value_unit
11
+
12
+ __all__ = ["load_static_info", "update_dynamic_info"]
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class CommandRunner(Protocol):
18
+ def __call__(self, args: Sequence[str], *, timeout: float) -> str: ...
19
+
20
+
21
+ def _default_runner(args: Sequence[str], *, timeout: float) -> str:
22
+ return subprocess.check_output( # type: ignore[return-value]
23
+ args,
24
+ text=True,
25
+ stderr=subprocess.DEVNULL,
26
+ timeout=timeout,
27
+ )
28
+
29
+
30
+ def _run_json(
31
+ args: Sequence[str], *, timeout: float, runner: CommandRunner
32
+ ) -> dict | None:
33
+ try:
34
+ output = runner(args, timeout=timeout)
35
+ except Exception as exc: # pragma: no cover - just log
36
+ logger.debug("Failed to run %s: %s", " ".join(args), exc)
37
+ return None
38
+
39
+ try:
40
+ return json.loads(output)
41
+ except json.JSONDecodeError as exc: # pragma: no cover - unexpected
42
+ logger.debug("Failed to parse amd-smi json: %s", exc)
43
+ return None
44
+
45
+
46
+ def load_static_info(
47
+ config: PicomonConfig, *, runner: CommandRunner | None = None
48
+ ) -> Dict[int, GPUHistory]:
49
+ """Return GPU histories seeded with static data from amd-smi."""
50
+
51
+ runner = runner or _default_runner
52
+ data = _run_json(
53
+ ["amd-smi", "static", "--vram", "--limit", "--json"],
54
+ timeout=config.static_timeout,
55
+ runner=runner,
56
+ )
57
+ if not data:
58
+ return {}
59
+
60
+ gpus: Dict[int, GPUHistory] = {}
61
+ for entry in data.get("gpu_data", []):
62
+ gpu_id = entry.get("gpu")
63
+ if gpu_id is None:
64
+ continue
65
+ try:
66
+ gpu_idx = int(gpu_id)
67
+ except (TypeError, ValueError):
68
+ continue
69
+
70
+ hist = GPUHistory(config.max_points)
71
+
72
+ vram_block = entry.get("vram", {}) or {}
73
+ size = vram_block.get("size")
74
+ if size is not None:
75
+ hist.vram_total_mb = parse_value_unit(size)
76
+
77
+ limit_block = entry.get("limit", {}) or {}
78
+ pwr = limit_block.get("socket_power") or limit_block.get("max_power")
79
+ if pwr is not None:
80
+ hist.power_limit_w = parse_value_unit(pwr)
81
+
82
+ gpus[gpu_idx] = hist
83
+
84
+ return gpus
85
+
86
+
87
+ def update_dynamic_info(
88
+ config: PicomonConfig,
89
+ gpus: Dict[int, GPUHistory],
90
+ *,
91
+ runner: CommandRunner | None = None,
92
+ timestamp_provider=datetime.now,
93
+ ) -> None:
94
+ """Add a fresh sample for each GPU in-place."""
95
+
96
+ runner = runner or _default_runner
97
+ data = _run_json(
98
+ [
99
+ "amd-smi",
100
+ "metric",
101
+ "--usage",
102
+ "--power",
103
+ "--mem-usage",
104
+ "--json",
105
+ ],
106
+ timeout=config.metric_timeout,
107
+ runner=runner,
108
+ )
109
+ if not data:
110
+ return
111
+
112
+ ts = timestamp_provider()
113
+ for entry in data.get("gpu_data", []):
114
+ gpu_id = entry.get("gpu")
115
+ if gpu_id is None:
116
+ continue
117
+ try:
118
+ gpu_idx = int(gpu_id)
119
+ except (TypeError, ValueError):
120
+ continue
121
+
122
+ hist = gpus.get(gpu_idx)
123
+ if hist is None:
124
+ hist = gpus[gpu_idx] = GPUHistory(config.max_points)
125
+
126
+ usage = entry.get("usage", {}) or {}
127
+ gfx = max(0.0, min(100.0, parse_value_unit(usage.get("gfx_activity", 0))))
128
+ umc = max(0.0, min(100.0, parse_value_unit(usage.get("umc_activity", 0))))
129
+
130
+ power_block = entry.get("power", {}) or {}
131
+ socket_pwr = power_block.get("socket_power") or power_block.get("SOCKET_POWER")
132
+ power_w = parse_value_unit(socket_pwr) if socket_pwr is not None else 0.0
133
+
134
+ mem_usage = entry.get("mem_usage", {}) or {}
135
+ used = mem_usage.get("used_visible_vram") or mem_usage.get("used_vram")
136
+ total = mem_usage.get("total_visible_vram") or mem_usage.get("total_vram")
137
+ vram_used_mb = parse_value_unit(used) if used is not None else 0.0
138
+ if total is not None:
139
+ total_mb = parse_value_unit(total)
140
+ if total_mb > 0:
141
+ hist.vram_total_mb = total_mb
142
+
143
+ hist.add_sample(ts, gfx, umc, power_w, vram_used_mb)
144
+
145
+ cutoff = ts - timedelta(minutes=config.history_minutes)
146
+ for hist in gpus.values():
147
+ hist.prune_before(cutoff)
@@ -0,0 +1,145 @@
1
+ from __future__ import annotations
2
+
3
+ import curses
4
+ import time
5
+ from typing import Callable, Dict
6
+
7
+ from .config import PicomonConfig
8
+ from .history import GPUHistory
9
+
10
+ __all__ = ["sparkline", "draw_gpu_box", "render_loop"]
11
+
12
+ SPARK_BARS = "▁▂▃▄▅▆▇█"
13
+
14
+
15
+ def sparkline(values, width: int, vmax: float | None) -> tuple[str, str]:
16
+ """Return a two-row sparkline string for the provided samples."""
17
+
18
+ if width <= 0:
19
+ return "".ljust(0), "".ljust(0)
20
+ vals = list(values)
21
+ n = len(vals)
22
+ if n == 0:
23
+ return " " * width, " " * width
24
+
25
+ if vmax is None or vmax <= 0:
26
+ vmax = max(vals) if max(vals) > 0 else 1.0
27
+
28
+ nb = len(SPARK_BARS)
29
+ top_row: list[str] = []
30
+ bot_row: list[str] = []
31
+
32
+ for col in range(width):
33
+ if width == 1:
34
+ idx = n - 1
35
+ else:
36
+ idx = int(col * (n - 1) / (width - 1))
37
+ v = vals[idx]
38
+ frac = max(0.0, min(1.0, v / vmax))
39
+
40
+ if frac <= 0.5:
41
+ bot_frac = frac * 2.0
42
+ level = int(round(bot_frac * (nb - 1)))
43
+ bot_ch = SPARK_BARS[level]
44
+ top_ch = " "
45
+ else:
46
+ bot_ch = SPARK_BARS[-1]
47
+ top_frac = (frac - 0.5) * 2.0
48
+ level = int(round(top_frac * (nb - 1)))
49
+ top_ch = SPARK_BARS[level]
50
+
51
+ top_row.append(top_ch)
52
+ bot_row.append(bot_ch)
53
+
54
+ return "".join(top_row), "".join(bot_row)
55
+
56
+
57
+ def draw_gpu_box(win, gpu_id: int, hist: GPUHistory) -> None:
58
+ win.box()
59
+ box_h, box_w = win.getmaxyx()
60
+ inner_w = box_w - 2
61
+ spark_w = max(16, inner_w - 6)
62
+
63
+ if hist.timestamps:
64
+ gfx_now = hist.gfx[-1]
65
+ umc_now = hist.umc[-1]
66
+ pwr_now = hist.power_w[-1]
67
+ vram_now = hist.vram_used_mb[-1]
68
+ else:
69
+ gfx_now = umc_now = pwr_now = vram_now = 0.0
70
+
71
+ pwr_lim = hist.power_limit_w if hist.power_limit_w > 0 else 1.0
72
+ vram_tot = hist.vram_total_mb if hist.vram_total_mb > 0 else 1.0
73
+
74
+ pwr_pct = 100.0 * pwr_now / pwr_lim
75
+ vram_pct = 100.0 * vram_now / vram_tot
76
+
77
+ header = (
78
+ f"GPU {gpu_id} "
79
+ f"GFX {gfx_now:3.0f}% "
80
+ f"UMC {umc_now:3.0f}% "
81
+ f"PWR {pwr_now:4.0f}/{pwr_lim:4.0f}W ({pwr_pct:3.0f}%) "
82
+ f"VRAM {vram_now/1024:4.1f}/{vram_tot/1024:4.1f}GB ({vram_pct:3.0f}%)"
83
+ )
84
+ win.addnstr(1, 1, header, inner_w)
85
+
86
+ top, bot = sparkline(hist.gfx, spark_w, 100.0)
87
+ win.addnstr(2, 1, "GFX ", inner_w)
88
+ win.addnstr(2, 5, top, spark_w)
89
+ win.addnstr(3, 5, bot, spark_w)
90
+
91
+ top, bot = sparkline(hist.power_w, spark_w, hist.power_limit_w or None)
92
+ win.addnstr(4, 1, "PWR ", inner_w)
93
+ win.addnstr(4, 5, top, spark_w)
94
+ win.addnstr(5, 5, bot, spark_w)
95
+
96
+ top, bot = sparkline(hist.vram_used_mb, spark_w, hist.vram_total_mb or None)
97
+ win.addnstr(6, 1, "VRM ", inner_w)
98
+ win.addnstr(6, 5, top, spark_w)
99
+ win.addnstr(7, 5, bot, spark_w)
100
+
101
+
102
+ def render_loop(
103
+ stdscr,
104
+ config: PicomonConfig,
105
+ gpus: Dict[int, GPUHistory],
106
+ tick: Callable[[], None],
107
+ ) -> None:
108
+ curses.curs_set(0)
109
+ stdscr.nodelay(True)
110
+
111
+ while True:
112
+ tick()
113
+
114
+ stdscr.erase()
115
+ max_y, max_x = stdscr.getmaxyx()
116
+
117
+ title = f"picomon (last {config.history_minutes} min) - q to quit"
118
+ stdscr.addnstr(0, 0, title, max_x - 1)
119
+ stdscr.hline(1, 0, curses.ACS_HLINE, max_x - 1)
120
+
121
+ box_h = 9
122
+ total_cols = 2
123
+ col_w = (max_x - 3) // total_cols
124
+ col_x = [1, 1 + col_w + 1]
125
+
126
+ gpu_ids = sorted(gpus.keys())
127
+ for idx, gpu_id in enumerate(gpu_ids):
128
+ col = 0 if idx < 4 else 1
129
+ row_idx = idx if idx < 4 else idx - 4
130
+ top = 2 + row_idx * box_h
131
+ if top + box_h >= max_y:
132
+ continue
133
+
134
+ x = col_x[col]
135
+ win = stdscr.derwin(box_h, col_w, top, x)
136
+ draw_gpu_box(win, gpu_id, gpus[gpu_id])
137
+
138
+ stdscr.refresh()
139
+
140
+ end_time = time.time() + config.update_interval
141
+ while time.time() < end_time:
142
+ ch = stdscr.getch()
143
+ if ch in (ord("q"), ord("Q")):
144
+ return
145
+ time.sleep(0.1)
@@ -0,0 +1,120 @@
1
+ Metadata-Version: 2.4
2
+ Name: picomon
3
+ Version: 0.1.1
4
+ Summary: Minimal curses dashboard for monitoring AMD GPUs via amd-smi
5
+ Author-email: Omar Kamali <picomon@omarkama.li>
6
+ License: MIT License
7
+
8
+ Copyright (c) 2025 Omar Kamali
9
+
10
+ Permission is hereby granted, free of charge, to any person obtaining a copy
11
+ of this software and associated documentation files (the "Software"), to deal
12
+ in the Software without restriction, including without limitation the rights
13
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14
+ copies of the Software, and to permit persons to whom the Software is
15
+ furnished to do so, subject to the following conditions:
16
+
17
+ The above copyright notice and this permission notice shall be included in all
18
+ copies or substantial portions of the Software.
19
+
20
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
23
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
26
+ SOFTWARE.
27
+
28
+ Project-URL: Homepage, https://github.com/omarkamali/picomon
29
+ Project-URL: Issues, https://github.com/omarkamali/picomon/issues
30
+ Project-URL: Repository, https://github.com/omarkamali/picomon
31
+ Keywords: amd,gpu,monitor,cli,curses
32
+ Classifier: Development Status :: 4 - Beta
33
+ Classifier: Environment :: Console
34
+ Classifier: Intended Audience :: System Administrators
35
+ Classifier: Intended Audience :: Developers
36
+ Classifier: License :: OSI Approved :: MIT License
37
+ Classifier: Operating System :: POSIX
38
+ Classifier: Programming Language :: Python
39
+ Classifier: Programming Language :: Python :: 3
40
+ Classifier: Programming Language :: Python :: 3.9
41
+ Classifier: Programming Language :: Python :: 3.10
42
+ Classifier: Programming Language :: Python :: 3.11
43
+ Classifier: Programming Language :: Python :: 3.12
44
+ Classifier: Topic :: System :: Monitoring
45
+ Requires-Python: >=3.9
46
+ Description-Content-Type: text/markdown
47
+ License-File: LICENSE
48
+ Provides-Extra: dev
49
+ Requires-Dist: pytest>=7; extra == "dev"
50
+ Dynamic: license-file
51
+
52
+ # picomon
53
+
54
+ picomon is a tiny terminal dashboard for monitoring AMD GPUs via `amd-smi`. It polls basic metrics (gfx activity, memory usage, and power) and renders them as sparklines inside a curses UI so you can keep an eye on accelerators without launching a full GUI stack.
55
+
56
+ Homepage: <https://omarkamali.github.io/picomon/>
57
+
58
+ ## Why?
59
+
60
+ I like nvtop but the asserts kept crashing it on some AMD devices. picomon is a lightweight alternative that just polls metrics and renders them as sparklines, trading off ironclad accuracy checks for more reliability.
61
+
62
+ It hasn't been tested on all AMD GPUs. If it fails to run on your GPU, please open a new issue using [this template](https://github.com/omarkamali/picomon/issues/new?assignees=&labels=bug&template=bug_report.md&title=%5BBug%5D).
63
+
64
+ ## Requirements
65
+
66
+ - Python 3.9 or newer
67
+ - The `amd-smi` CLI available on your PATH (if your GPU is properly setup then you already have this)
68
+ - An ANSI-compatible terminal for the curses UI
69
+
70
+ ## Installation
71
+
72
+ ```shell
73
+ pip install picomon
74
+ ```
75
+
76
+ ## Usage
77
+
78
+ After installation, run the CLI:
79
+
80
+ ```
81
+ $ picomon
82
+
83
+ ┌──────────────────────────────────────────┐ ┌──────────────────────────────────────────┐
84
+ │ GPU 0 GFX 42% UMC 21% │ │ GPU 1 GFX 78% UMC 66% │
85
+ │ PWR 135/250W (54%) VRAM 10.0/16.0GB 62% │ │ PWR 210/250W (84%) VRAM 14.5/16.0GB 90% │
86
+ │ │ │ │
87
+ │ GFX ▁▂▂▃▄▄▅▆▆▇█▇▆▅▄▃▂▁ │ │ GFX ▂▃▄▅▆▇██▇▆▅▄▂▂▃▅▆ │
88
+ │ PWR ▁▁▂▂▃▄▄▅▆▇██▇▆▅▄▂▁ │ │ PWR ▂▂▃▄▅▆▇██▇▆▅▄▃▂▂▃ │
89
+ │ VRM ▁▁▂▂▃▄▄▅▆▇███▇▆▅▄▂ │ │ VRM ▂▃▄▅▆▆▇███▇▆▅▄▃▂▂▃ │
90
+ └──────────────────────────────────────────┘ └──────────────────────────────────────────┘
91
+ ```
92
+
93
+ Key bindings:
94
+ - `q` to quit
95
+
96
+ Common flags:
97
+ - `--update-interval` (seconds between refreshes, default 3)
98
+ - `--history-minutes` (rolling window to retain, default 30)
99
+ - `--static-timeout` / `--metric-timeout` (seconds to wait for `amd-smi` responses)
100
+
101
+ These correspond to the `PicomonConfig` dataclass, so you can also import and reuse
102
+ picomon as a library:
103
+
104
+ ```python
105
+ from picomon import PicomonConfig, run_monitor
106
+
107
+ config = PicomonConfig(update_interval=1.5)
108
+ run_monitor(["--update-interval", str(config.update_interval)])
109
+ ```
110
+
111
+ ## Development
112
+
113
+ - Run tests with `pytest` (see the CI workflow for reference)
114
+ - Use `scripts/publish.py` to cut a GitHub release once tags are in place
115
+
116
+ ## License
117
+
118
+ MIT © [Omar Kamali](https://omarkamali.com)
119
+
120
+ Source: <https://github.com/omarkamali/picomon>
@@ -0,0 +1,17 @@
1
+ LICENSE
2
+ README.md
3
+ pyproject.toml
4
+ src/picomon/__init__.py
5
+ src/picomon/config.py
6
+ src/picomon/history.py
7
+ src/picomon/monitor.py
8
+ src/picomon/smi.py
9
+ src/picomon/ui.py
10
+ src/picomon.egg-info/PKG-INFO
11
+ src/picomon.egg-info/SOURCES.txt
12
+ src/picomon.egg-info/dependency_links.txt
13
+ src/picomon.egg-info/entry_points.txt
14
+ src/picomon.egg-info/requires.txt
15
+ src/picomon.egg-info/top_level.txt
16
+ tests/test_config.py
17
+ tests/test_smi.py
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ picomon = picomon.monitor:run
@@ -0,0 +1,3 @@
1
+
2
+ [dev]
3
+ pytest>=7
@@ -0,0 +1 @@
1
+ picomon
@@ -0,0 +1,22 @@
1
+ import pytest
2
+
3
+ from picomon.config import PicomonConfig
4
+
5
+
6
+ def test_max_points_derived_from_history_and_interval():
7
+ cfg = PicomonConfig(update_interval=2.0, history_minutes=1)
8
+ assert cfg.max_points == 30
9
+
10
+
11
+ @pytest.mark.parametrize(
12
+ "kwargs",
13
+ [
14
+ {"update_interval": 0},
15
+ {"history_minutes": 0},
16
+ {"static_timeout": 0},
17
+ {"metric_timeout": 0},
18
+ ],
19
+ )
20
+ def test_invalid_values_raise_value_error(kwargs):
21
+ with pytest.raises(ValueError):
22
+ PicomonConfig(**kwargs)
@@ -0,0 +1,83 @@
1
+ import json
2
+ from datetime import datetime
3
+
4
+ from picomon.config import PicomonConfig
5
+ from picomon.smi import load_static_info, update_dynamic_info
6
+
7
+
8
+ def _runner_with_payload(payload: dict[str, object]):
9
+ response = json.dumps(payload)
10
+
11
+ def _runner(args, *, timeout): # type: ignore[override]
12
+ return response
13
+
14
+ return _runner
15
+
16
+
17
+ def test_load_static_info_populates_static_fields():
18
+ config = PicomonConfig(update_interval=1.0, history_minutes=1)
19
+ runner = _runner_with_payload(
20
+ {
21
+ "gpu_data": [
22
+ {
23
+ "gpu": 0,
24
+ "vram": {"size": {"value": 16384, "unit": "MB"}},
25
+ "limit": {"socket_power": {"value": 250, "unit": "W"}},
26
+ }
27
+ ]
28
+ }
29
+ )
30
+
31
+ gpus = load_static_info(config, runner=runner)
32
+
33
+ assert 0 in gpus
34
+ hist = gpus[0]
35
+ assert hist.vram_total_mb == 16384
36
+ assert hist.power_limit_w == 250
37
+
38
+
39
+ def test_update_dynamic_info_appends_samples():
40
+ config = PicomonConfig(update_interval=1.0, history_minutes=1)
41
+ static_runner = _runner_with_payload(
42
+ {
43
+ "gpu_data": [
44
+ {
45
+ "gpu": 0,
46
+ "vram": {"size": {"value": 16384, "unit": "MB"}},
47
+ "limit": {"socket_power": {"value": 250, "unit": "W"}},
48
+ }
49
+ ]
50
+ }
51
+ )
52
+ gpus = load_static_info(config, runner=static_runner)
53
+
54
+ metrics_runner = _runner_with_payload(
55
+ {
56
+ "gpu_data": [
57
+ {
58
+ "gpu": 0,
59
+ "usage": {"gfx_activity": {"value": 40, "unit": "%"}},
60
+ "power": {"socket_power": {"value": 120, "unit": "W"}},
61
+ "mem_usage": {
62
+ "used_visible_vram": {"value": 8192, "unit": "MB"},
63
+ "total_visible_vram": {"value": 16384, "unit": "MB"},
64
+ },
65
+ }
66
+ ]
67
+ }
68
+ )
69
+
70
+ now = datetime(2024, 1, 1, 12, 0, 0)
71
+ update_dynamic_info(
72
+ config,
73
+ gpus,
74
+ runner=metrics_runner,
75
+ timestamp_provider=lambda: now,
76
+ )
77
+
78
+ hist = gpus[0]
79
+ assert hist.timestamps[-1] == now
80
+ assert hist.gfx[-1] == 40
81
+ assert hist.power_w[-1] == 120
82
+ assert hist.vram_used_mb[-1] == 8192
83
+ assert hist.vram_total_mb == 16384