microchip-devtools 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- microchip_devtools/__init__.py +0 -0
- microchip_devtools/_project.py +14 -0
- microchip_devtools/format/__init__.py +0 -0
- microchip_devtools/format/uncrustify.py +119 -0
- microchip_devtools/list_cmds.py +31 -0
- microchip_devtools/mcc/__init__.py +0 -0
- microchip_devtools/mcc/check_peripheral.py +207 -0
- microchip_devtools/mcc/mcc_refresh.py +343 -0
- microchip_devtools/mcc/parse_hardware.py +374 -0
- microchip_devtools/setup_env/__init__.py +0 -0
- microchip_devtools/setup_env/_ui.py +63 -0
- microchip_devtools/setup_env/checks.py +178 -0
- microchip_devtools/setup_env/defaults.py +33 -0
- microchip_devtools/setup_env/runner.py +334 -0
- microchip_devtools/xc32/__init__.py +0 -0
- microchip_devtools/xc32/merge_hex.py +238 -0
- microchip_devtools/xc32/validate_fmt3.py +230 -0
- microchip_devtools-0.1.0.dist-info/METADATA +16 -0
- microchip_devtools-0.1.0.dist-info/RECORD +21 -0
- microchip_devtools-0.1.0.dist-info/WHEEL +4 -0
- microchip_devtools-0.1.0.dist-info/entry_points.txt +10 -0
|
@@ -0,0 +1,374 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
microchip_devtools.mcc.parse_hardware — Parse Harmony component YML and report hardware config.
|
|
4
|
+
|
|
5
|
+
Usage:
|
|
6
|
+
mchp-parse-hardware [--root PATH] [--project-name NAME]
|
|
7
|
+
mchp-parse-hardware --components-dir PATH [--format json] [--output FILE]
|
|
8
|
+
|
|
9
|
+
Exit codes:
|
|
10
|
+
0 — success
|
|
11
|
+
1 — input/configuration error
|
|
12
|
+
2 — parse error in at least one YML file
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
from __future__ import annotations
|
|
16
|
+
|
|
17
|
+
import argparse
|
|
18
|
+
import json
|
|
19
|
+
import re
|
|
20
|
+
import sys
|
|
21
|
+
from dataclasses import dataclass
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
from typing import Any
|
|
24
|
+
|
|
25
|
+
import yaml
|
|
26
|
+
|
|
27
|
+
from microchip_devtools._project import project_name as _env_project_name
|
|
28
|
+
from microchip_devtools._project import project_root as _env_project_root
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@dataclass(frozen=True)
|
|
32
|
+
class SymbolRecord:
|
|
33
|
+
component: str
|
|
34
|
+
symbol: str
|
|
35
|
+
value_raw: str
|
|
36
|
+
source_kind: str
|
|
37
|
+
symbol_type: str
|
|
38
|
+
file_path: str
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _err(msg: str) -> None:
|
|
42
|
+
print(f"[ERROR] {msg}", file=sys.stderr)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def _info(msg: str) -> None:
|
|
46
|
+
print(f"[INFO] {msg}")
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _normalize_bool(value: str) -> bool | None:
|
|
50
|
+
lowered = value.strip().lower()
|
|
51
|
+
if lowered in {"true", "1", "yes", "on"}:
|
|
52
|
+
return True
|
|
53
|
+
if lowered in {"false", "0", "no", "off"}:
|
|
54
|
+
return False
|
|
55
|
+
return None
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _extract_value_meta(symbol_node: dict[str, Any]) -> tuple[str, str]:
|
|
59
|
+
children = symbol_node.get("children", [])
|
|
60
|
+
if not isinstance(children, list):
|
|
61
|
+
return "", "Unknown"
|
|
62
|
+
|
|
63
|
+
for child in children:
|
|
64
|
+
if not isinstance(child, dict):
|
|
65
|
+
continue
|
|
66
|
+
if child.get("type") != "Values":
|
|
67
|
+
continue
|
|
68
|
+
entries = child.get("children", [])
|
|
69
|
+
if not isinstance(entries, list) or not entries:
|
|
70
|
+
return "", "Unknown"
|
|
71
|
+
first = entries[0]
|
|
72
|
+
if not isinstance(first, dict):
|
|
73
|
+
return "", "Unknown"
|
|
74
|
+
attrs = first.get("attributes", {})
|
|
75
|
+
if not isinstance(attrs, dict):
|
|
76
|
+
return "", "Unknown"
|
|
77
|
+
return str(attrs.get("value", "")), str(first.get("type", "Unknown"))
|
|
78
|
+
|
|
79
|
+
return "", "Unknown"
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _load_component_file(path: Path, root: Path) -> tuple[str, list[SymbolRecord]]:
|
|
83
|
+
text = path.read_text(encoding="utf-8")
|
|
84
|
+
payload = yaml.safe_load(text)
|
|
85
|
+
if not isinstance(payload, dict):
|
|
86
|
+
raise ValueError("Top-level YAML node is not a mapping")
|
|
87
|
+
|
|
88
|
+
component = str(payload.get("componentName", path.stem))
|
|
89
|
+
data = payload.get("data", {})
|
|
90
|
+
symbols = data.get("symbols", {}) if isinstance(data, dict) else {}
|
|
91
|
+
if not isinstance(symbols, dict):
|
|
92
|
+
return component, []
|
|
93
|
+
|
|
94
|
+
records: list[SymbolRecord] = []
|
|
95
|
+
for symbol_name, symbol_node in symbols.items():
|
|
96
|
+
if not isinstance(symbol_node, dict):
|
|
97
|
+
continue
|
|
98
|
+
value_raw, source_kind = _extract_value_meta(symbol_node)
|
|
99
|
+
symbol_type = str(symbol_node.get("type", "Unknown"))
|
|
100
|
+
try:
|
|
101
|
+
rel_path = str(path.relative_to(root))
|
|
102
|
+
except ValueError:
|
|
103
|
+
rel_path = str(path)
|
|
104
|
+
records.append(
|
|
105
|
+
SymbolRecord(
|
|
106
|
+
component=component,
|
|
107
|
+
symbol=str(symbol_name),
|
|
108
|
+
value_raw=value_raw,
|
|
109
|
+
source_kind=source_kind,
|
|
110
|
+
symbol_type=symbol_type,
|
|
111
|
+
file_path=rel_path,
|
|
112
|
+
)
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
return component, records
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def _find_all_yml_files(components_dir: Path) -> list[Path]:
|
|
119
|
+
return sorted(p for p in components_dir.rglob("*.yml") if p.is_file())
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def _collect_records(
|
|
123
|
+
components_dir: Path, root: Path
|
|
124
|
+
) -> tuple[list[Path], dict[str, list[SymbolRecord]], list[str]]:
|
|
125
|
+
files = _find_all_yml_files(components_dir)
|
|
126
|
+
by_component: dict[str, list[SymbolRecord]] = {}
|
|
127
|
+
parse_errors: list[str] = []
|
|
128
|
+
|
|
129
|
+
for file_path in files:
|
|
130
|
+
try:
|
|
131
|
+
component, records = _load_component_file(file_path, root)
|
|
132
|
+
except (yaml.YAMLError, OSError, ValueError) as exc:
|
|
133
|
+
try:
|
|
134
|
+
rel = str(file_path.relative_to(root))
|
|
135
|
+
except ValueError:
|
|
136
|
+
rel = str(file_path)
|
|
137
|
+
parse_errors.append(f"{rel}: {exc}")
|
|
138
|
+
continue
|
|
139
|
+
by_component.setdefault(component, []).extend(records)
|
|
140
|
+
|
|
141
|
+
return files, by_component, parse_errors
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def _all_records(by_component: dict[str, list[SymbolRecord]]) -> list[SymbolRecord]:
|
|
145
|
+
items: list[SymbolRecord] = []
|
|
146
|
+
for records in by_component.values():
|
|
147
|
+
items.extend(records)
|
|
148
|
+
return items
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def _is_clock_symbol(symbol: str) -> bool:
|
|
152
|
+
return bool(re.search(r"(CLOCK|FREQ|PLL|OSC|CLKSEL)", symbol, re.IGNORECASE))
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def _is_interrupt_symbol(symbol: str) -> bool:
|
|
156
|
+
return bool(re.search(r"(INTERRUPT|IRQ|IEC)", symbol, re.IGNORECASE))
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def _is_comm_timing_symbol(symbol: str) -> bool:
|
|
160
|
+
return bool(re.search(r"(BAUD|BRG|BITRATE|TSEG|SJW|TIMER_PERIOD|TIME_PERIOD_MS)", symbol, re.IGNORECASE))
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def _is_adc_symbol(component: str, symbol: str) -> bool:
|
|
164
|
+
if component.lower().startswith("adch"):
|
|
165
|
+
return True
|
|
166
|
+
return bool(re.search(r"ADCHS|ADC", symbol, re.IGNORECASE))
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def _build_component_enable_state(records: list[SymbolRecord]) -> str:
|
|
170
|
+
enable_values: list[bool] = []
|
|
171
|
+
for rec in records:
|
|
172
|
+
if "ENABLE" not in rec.symbol.upper():
|
|
173
|
+
continue
|
|
174
|
+
normalized = _normalize_bool(rec.value_raw)
|
|
175
|
+
if normalized is None:
|
|
176
|
+
continue
|
|
177
|
+
enable_values.append(normalized)
|
|
178
|
+
|
|
179
|
+
if enable_values:
|
|
180
|
+
return "enabled" if any(enable_values) else "disabled"
|
|
181
|
+
|
|
182
|
+
clock_like = [
|
|
183
|
+
rec for rec in records if re.search(r"(CLOCK|FREQ|TIMER_PERIOD)", rec.symbol, re.IGNORECASE)
|
|
184
|
+
]
|
|
185
|
+
if clock_like and all(rec.value_raw in {"", "0", "0.0"} for rec in clock_like):
|
|
186
|
+
return "disabled"
|
|
187
|
+
if clock_like:
|
|
188
|
+
return "enabled"
|
|
189
|
+
return "unknown"
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def _extract_pin_map(records: list[SymbolRecord]) -> dict[str, dict[str, str]]:
|
|
193
|
+
pin_data: dict[str, dict[str, str]] = {}
|
|
194
|
+
pattern = re.compile(r"^BSP_PIN_(\d+)_(FUNCTION_NAME|FUNCTION_TYPE|MODE)$")
|
|
195
|
+
for rec in records:
|
|
196
|
+
match = pattern.match(rec.symbol)
|
|
197
|
+
if not match:
|
|
198
|
+
continue
|
|
199
|
+
pin_num = match.group(1)
|
|
200
|
+
field = match.group(2).lower()
|
|
201
|
+
pin_data.setdefault(pin_num, {})[field] = rec.value_raw
|
|
202
|
+
return pin_data
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def _build_report_data(files: list[Path], by_component: dict[str, list[SymbolRecord]], root: Path) -> dict[str, Any]:
|
|
206
|
+
all_recs = _all_records(by_component)
|
|
207
|
+
|
|
208
|
+
clocks = [
|
|
209
|
+
{"component": rec.component, "symbol": rec.symbol, "value": rec.value_raw, "source": rec.source_kind}
|
|
210
|
+
for rec in all_recs if _is_clock_symbol(rec.symbol)
|
|
211
|
+
]
|
|
212
|
+
|
|
213
|
+
comm_timing = [
|
|
214
|
+
{"component": rec.component, "symbol": rec.symbol, "value": rec.value_raw, "source": rec.source_kind}
|
|
215
|
+
for rec in all_recs if _is_comm_timing_symbol(rec.symbol)
|
|
216
|
+
]
|
|
217
|
+
|
|
218
|
+
component_status = [
|
|
219
|
+
{"component": name, "status": _build_component_enable_state(records), "file_count": len({rec.file_path for rec in records})}
|
|
220
|
+
for name, records in sorted(by_component.items())
|
|
221
|
+
]
|
|
222
|
+
|
|
223
|
+
interrupt_status = []
|
|
224
|
+
for name, records in sorted(by_component.items()):
|
|
225
|
+
values = []
|
|
226
|
+
for rec in records:
|
|
227
|
+
if not _is_interrupt_symbol(rec.symbol):
|
|
228
|
+
continue
|
|
229
|
+
normalized = _normalize_bool(rec.value_raw)
|
|
230
|
+
if normalized is None:
|
|
231
|
+
continue
|
|
232
|
+
values.append(normalized)
|
|
233
|
+
state = "enabled" if values and any(values) else ("disabled" if values else "unknown")
|
|
234
|
+
interrupt_status.append({"component": name, "interrupts": state})
|
|
235
|
+
|
|
236
|
+
core_records = by_component.get("core", [])
|
|
237
|
+
pin_map = _extract_pin_map(core_records)
|
|
238
|
+
|
|
239
|
+
adc = [
|
|
240
|
+
{"component": rec.component, "symbol": rec.symbol, "value": rec.value_raw, "source": rec.source_kind}
|
|
241
|
+
for rec in all_recs if _is_adc_symbol(rec.component, rec.symbol)
|
|
242
|
+
]
|
|
243
|
+
|
|
244
|
+
try:
|
|
245
|
+
file_paths = [str(p.relative_to(root)) for p in files]
|
|
246
|
+
except ValueError:
|
|
247
|
+
file_paths = [str(p) for p in files]
|
|
248
|
+
|
|
249
|
+
return {
|
|
250
|
+
"overview": {
|
|
251
|
+
"files_scanned": len(files),
|
|
252
|
+
"components_found": len(by_component),
|
|
253
|
+
"symbols_scanned": len(all_recs),
|
|
254
|
+
"component_files": file_paths,
|
|
255
|
+
},
|
|
256
|
+
"clocks": clocks,
|
|
257
|
+
"peripherals": component_status,
|
|
258
|
+
"interrupts": interrupt_status,
|
|
259
|
+
"pin_map": pin_map,
|
|
260
|
+
"communication_timings": comm_timing,
|
|
261
|
+
"adc": adc,
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
def _format_text(report: dict[str, Any], max_items: int) -> str:
|
|
266
|
+
out: list[str] = []
|
|
267
|
+
|
|
268
|
+
overview = report["overview"]
|
|
269
|
+
out.append("== Hardware Configuration Report ==")
|
|
270
|
+
out.append(f"Files scanned: {overview['files_scanned']}")
|
|
271
|
+
out.append(f"Components found: {overview['components_found']}")
|
|
272
|
+
out.append(f"Symbols scanned: {overview['symbols_scanned']}")
|
|
273
|
+
out.append("")
|
|
274
|
+
|
|
275
|
+
out.append("== Enabled Peripherals ==")
|
|
276
|
+
for item in report["peripherals"]:
|
|
277
|
+
out.append(f"- {item['component']}: {item['status']}")
|
|
278
|
+
out.append("")
|
|
279
|
+
|
|
280
|
+
out.append("== Interrupt Status ==")
|
|
281
|
+
for item in report["interrupts"]:
|
|
282
|
+
out.append(f"- {item['component']}: {item['interrupts']}")
|
|
283
|
+
out.append("")
|
|
284
|
+
|
|
285
|
+
out.append("== Clock-Related Symbols ==")
|
|
286
|
+
for rec in report["clocks"][:max_items]:
|
|
287
|
+
out.append(f"- {rec['component']}.{rec['symbol']} = {rec['value']} (source={rec['source']})")
|
|
288
|
+
if len(report["clocks"]) > max_items:
|
|
289
|
+
out.append(f"... {len(report['clocks']) - max_items} more clock entries")
|
|
290
|
+
out.append("")
|
|
291
|
+
|
|
292
|
+
out.append("== Communication Timing Symbols ==")
|
|
293
|
+
for rec in report["communication_timings"][:max_items]:
|
|
294
|
+
out.append(f"- {rec['component']}.{rec['symbol']} = {rec['value']} (source={rec['source']})")
|
|
295
|
+
if len(report["communication_timings"]) > max_items:
|
|
296
|
+
out.append(f"... {len(report['communication_timings']) - max_items} more timing entries")
|
|
297
|
+
out.append("")
|
|
298
|
+
|
|
299
|
+
out.append("== Pin Mapping (core) ==")
|
|
300
|
+
pin_items = sorted(report["pin_map"].items(), key=lambda x: int(x[0]))
|
|
301
|
+
for pin, fields in pin_items[:max_items]:
|
|
302
|
+
name = fields.get("function_name", "")
|
|
303
|
+
ptype = fields.get("function_type", "")
|
|
304
|
+
mode = fields.get("mode", "")
|
|
305
|
+
out.append(f"- PIN {pin}: name={name}, function={ptype}, mode={mode}")
|
|
306
|
+
if len(pin_items) > max_items:
|
|
307
|
+
out.append(f"... {len(pin_items) - max_items} more pin entries")
|
|
308
|
+
out.append("")
|
|
309
|
+
|
|
310
|
+
out.append("== ADC Symbols ==")
|
|
311
|
+
for rec in report["adc"][:max_items]:
|
|
312
|
+
out.append(f"- {rec['component']}.{rec['symbol']} = {rec['value']} (source={rec['source']})")
|
|
313
|
+
if len(report["adc"]) > max_items:
|
|
314
|
+
out.append(f"... {len(report['adc']) - max_items} more ADC entries")
|
|
315
|
+
|
|
316
|
+
return "\n".join(out)
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
def main(argv: list[str] | None = None) -> int:
|
|
320
|
+
parser = argparse.ArgumentParser(
|
|
321
|
+
description="Parse Harmony component YML files and report hardware configuration."
|
|
322
|
+
)
|
|
323
|
+
parser.add_argument("--root", type=Path, default=None,
|
|
324
|
+
help="Project root (default: $VOLTU_PROJECT_ROOT or cwd)")
|
|
325
|
+
parser.add_argument("--project-name", default=None,
|
|
326
|
+
help="Project name (default: $VOLTU_PROJECT_NAME or folder name)")
|
|
327
|
+
parser.add_argument("--components-dir", type=Path, default=None,
|
|
328
|
+
help="Override components directory path")
|
|
329
|
+
parser.add_argument("--format", choices=["text", "json"], default="text")
|
|
330
|
+
parser.add_argument("--output", type=Path)
|
|
331
|
+
parser.add_argument("--max-items", type=int, default=100)
|
|
332
|
+
|
|
333
|
+
args = parser.parse_args(argv)
|
|
334
|
+
|
|
335
|
+
root = args.root or _env_project_root()
|
|
336
|
+
name = args.project_name or _env_project_name()
|
|
337
|
+
|
|
338
|
+
if args.components_dir is not None:
|
|
339
|
+
components_dir = args.components_dir
|
|
340
|
+
if not components_dir.is_absolute():
|
|
341
|
+
components_dir = root / components_dir
|
|
342
|
+
else:
|
|
343
|
+
components_dir = root / f"firmware/{name}.X/{name}_default/components"
|
|
344
|
+
|
|
345
|
+
if not components_dir.exists() or not components_dir.is_dir():
|
|
346
|
+
_err(f"Components directory not found: {components_dir}")
|
|
347
|
+
return 1
|
|
348
|
+
|
|
349
|
+
_info(f"Scanning YML files in {components_dir}")
|
|
350
|
+
files, by_component, parse_errors = _collect_records(components_dir, root)
|
|
351
|
+
if parse_errors:
|
|
352
|
+
for item in parse_errors:
|
|
353
|
+
_err(f"Parse failed for {item}")
|
|
354
|
+
return 2
|
|
355
|
+
|
|
356
|
+
report = _build_report_data(files, by_component, root)
|
|
357
|
+
if args.format == "json":
|
|
358
|
+
output_text = json.dumps(report, indent=2)
|
|
359
|
+
else:
|
|
360
|
+
output_text = _format_text(report, max_items=args.max_items)
|
|
361
|
+
|
|
362
|
+
if args.output:
|
|
363
|
+
target = args.output if args.output.is_absolute() else root / args.output
|
|
364
|
+
target.parent.mkdir(parents=True, exist_ok=True)
|
|
365
|
+
target.write_text(output_text + "\n", encoding="utf-8")
|
|
366
|
+
_info(f"Report written to {target}")
|
|
367
|
+
else:
|
|
368
|
+
print(output_text)
|
|
369
|
+
|
|
370
|
+
return 0
|
|
371
|
+
|
|
372
|
+
|
|
373
|
+
if __name__ == "__main__":
|
|
374
|
+
sys.exit(main())
|
|
File without changes
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"""Terminal output helpers for setup_env — rich-powered."""
|
|
2
|
+
|
|
3
|
+
import sys
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
from rich.console import Console
|
|
7
|
+
from rich.prompt import Confirm, Prompt
|
|
8
|
+
|
|
9
|
+
console = Console(highlight=False)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def _pass(label: str) -> None:
|
|
13
|
+
console.print(f" [green]✔ PASS[/green] {label}")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _fail(label: str, reason: str) -> None:
|
|
17
|
+
console.print(f" [red]✗ FAIL[/red] {label}")
|
|
18
|
+
console.print(f" [yellow]→[/yellow] {reason}")
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _warn(label: str, reason: str) -> None:
|
|
22
|
+
console.print(f" [yellow]⚠ WARN[/yellow] {label}")
|
|
23
|
+
console.print(f" [yellow]→[/yellow] {reason}")
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _is_interactive() -> bool:
|
|
27
|
+
return sys.stdin.isatty()
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def prompt_path(label: str, env_key: str) -> str | None:
|
|
31
|
+
if not _is_interactive():
|
|
32
|
+
return None
|
|
33
|
+
console.print()
|
|
34
|
+
console.print(f" [yellow]?[/yellow] [bold]{label}[/bold] was not found at the expected location.")
|
|
35
|
+
console.print(f" You can set [bold]{env_key}[/bold] in your shell or .env file.")
|
|
36
|
+
try:
|
|
37
|
+
answer = Prompt.ask(
|
|
38
|
+
" Enter the correct path (or press Enter to skip)",
|
|
39
|
+
default="",
|
|
40
|
+
console=console,
|
|
41
|
+
).strip()
|
|
42
|
+
except (EOFError, KeyboardInterrupt):
|
|
43
|
+
console.print()
|
|
44
|
+
return None
|
|
45
|
+
return answer if answer else None
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def offer_save_to_env(key: str, value: str, env_file: Path) -> None:
|
|
49
|
+
if not _is_interactive():
|
|
50
|
+
return
|
|
51
|
+
try:
|
|
52
|
+
save = Confirm.ask(
|
|
53
|
+
f" Save {key}={value} to .env for future runs?",
|
|
54
|
+
default=False,
|
|
55
|
+
console=console,
|
|
56
|
+
)
|
|
57
|
+
except (EOFError, KeyboardInterrupt):
|
|
58
|
+
console.print()
|
|
59
|
+
return
|
|
60
|
+
if save:
|
|
61
|
+
with env_file.open("a") as f:
|
|
62
|
+
f.write(f"\n{key}={value}\n")
|
|
63
|
+
console.print(" [green]Saved.[/green] You can edit .env at any time to update it.")
|
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
"""Individual prerequisite check functions for Voltu firmware projects."""
|
|
2
|
+
|
|
3
|
+
import shutil
|
|
4
|
+
import subprocess
|
|
5
|
+
import sys
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from microchip_devtools.setup_env._ui import (
|
|
9
|
+
_fail, _pass, _warn, offer_save_to_env, prompt_path,
|
|
10
|
+
)
|
|
11
|
+
from microchip_devtools.setup_env.defaults import PROGRAMMER_VALUES
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def check_python() -> bool:
|
|
15
|
+
label = f"Python >= 3.10 (found {sys.version.split()[0]})"
|
|
16
|
+
if sys.version_info >= (3, 10):
|
|
17
|
+
_pass(label)
|
|
18
|
+
return True
|
|
19
|
+
_fail("Python >= 3.10", f"Found {sys.version.split()[0]}. Install Python 3.10 or newer.")
|
|
20
|
+
return False
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def check_poetry() -> bool:
|
|
24
|
+
binary = shutil.which("poetry")
|
|
25
|
+
if binary is None:
|
|
26
|
+
_fail("Poetry", "Not found on PATH. Install from https://python-poetry.org/docs/")
|
|
27
|
+
return False
|
|
28
|
+
try:
|
|
29
|
+
result = subprocess.run(
|
|
30
|
+
["poetry", "--version"], capture_output=True, text=True, timeout=10
|
|
31
|
+
)
|
|
32
|
+
version = result.stdout.strip() or result.stderr.strip()
|
|
33
|
+
_pass(f"Poetry ({version})")
|
|
34
|
+
return True
|
|
35
|
+
except Exception as exc:
|
|
36
|
+
_fail("Poetry", str(exc))
|
|
37
|
+
return False
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def check_make() -> bool:
|
|
41
|
+
binary = shutil.which("make")
|
|
42
|
+
if binary is None:
|
|
43
|
+
_fail("GNU Make", "Not found on PATH. Install with: sudo apt install make")
|
|
44
|
+
return False
|
|
45
|
+
try:
|
|
46
|
+
result = subprocess.run(
|
|
47
|
+
["make", "--version"], capture_output=True, text=True, timeout=10
|
|
48
|
+
)
|
|
49
|
+
first_line = result.stdout.splitlines()[0] if result.stdout else "make"
|
|
50
|
+
_pass(f"GNU Make ({first_line})")
|
|
51
|
+
return True
|
|
52
|
+
except Exception as exc:
|
|
53
|
+
_fail("GNU Make", str(exc))
|
|
54
|
+
return False
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def check_cppcheck() -> bool:
|
|
58
|
+
binary = shutil.which("cppcheck")
|
|
59
|
+
if binary is None:
|
|
60
|
+
_fail("cppcheck", "Not found on PATH. Install with: sudo apt install cppcheck")
|
|
61
|
+
return False
|
|
62
|
+
try:
|
|
63
|
+
result = subprocess.run(
|
|
64
|
+
["cppcheck", "--version"], capture_output=True, text=True, timeout=10
|
|
65
|
+
)
|
|
66
|
+
version = result.stdout.strip() or result.stderr.strip()
|
|
67
|
+
_pass(f"cppcheck ({version})")
|
|
68
|
+
return True
|
|
69
|
+
except Exception as exc:
|
|
70
|
+
_fail("cppcheck", str(exc))
|
|
71
|
+
return False
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def check_uncrustify() -> bool:
|
|
75
|
+
binary = shutil.which("uncrustify")
|
|
76
|
+
if binary is None:
|
|
77
|
+
_fail("uncrustify", "Not found on PATH. Install with: sudo apt install uncrustify")
|
|
78
|
+
return False
|
|
79
|
+
try:
|
|
80
|
+
result = subprocess.run(
|
|
81
|
+
["uncrustify", "--version"], capture_output=True, text=True, timeout=10
|
|
82
|
+
)
|
|
83
|
+
version = result.stdout.strip() or result.stderr.strip()
|
|
84
|
+
_pass(f"uncrustify ({version})")
|
|
85
|
+
return True
|
|
86
|
+
except Exception as exc:
|
|
87
|
+
_fail("uncrustify", str(exc))
|
|
88
|
+
return False
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def check_xc32(xc32_path: str, env_file: Path) -> bool:
|
|
92
|
+
def _try_path(path: str) -> bool:
|
|
93
|
+
binary = Path(path) / "xc32-gcc"
|
|
94
|
+
if not binary.exists():
|
|
95
|
+
return False
|
|
96
|
+
try:
|
|
97
|
+
result = subprocess.run(
|
|
98
|
+
[str(binary), "--version"], capture_output=True, text=True, timeout=10
|
|
99
|
+
)
|
|
100
|
+
version_line = (result.stdout or result.stderr).splitlines()[0]
|
|
101
|
+
_pass(f"XC32 Toolchain ({version_line})")
|
|
102
|
+
return True
|
|
103
|
+
except Exception:
|
|
104
|
+
return False
|
|
105
|
+
|
|
106
|
+
if _try_path(xc32_path):
|
|
107
|
+
return True
|
|
108
|
+
|
|
109
|
+
_fail(
|
|
110
|
+
"XC32 Toolchain",
|
|
111
|
+
f"xc32-gcc not found at: {xc32_path}\n"
|
|
112
|
+
" Download from https://www.microchip.com/en-us/tools-resources/develop/mplab-xc-compilers",
|
|
113
|
+
)
|
|
114
|
+
new_path = prompt_path("XC32 Toolchain (bin/ directory)", "XC32_PATH")
|
|
115
|
+
if new_path and _try_path(new_path):
|
|
116
|
+
offer_save_to_env("XC32_PATH", new_path, env_file)
|
|
117
|
+
return True
|
|
118
|
+
return False
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def check_dfp(dfp_path: str, env_file: Path) -> bool:
|
|
122
|
+
def _try_path(path: str) -> bool:
|
|
123
|
+
if Path(path).is_dir():
|
|
124
|
+
_pass(f"Device Family Pack ({path})")
|
|
125
|
+
return True
|
|
126
|
+
return False
|
|
127
|
+
|
|
128
|
+
if _try_path(dfp_path):
|
|
129
|
+
return True
|
|
130
|
+
|
|
131
|
+
_fail(
|
|
132
|
+
"Device Family Pack (DFP)",
|
|
133
|
+
f"Directory not found: {dfp_path}\n"
|
|
134
|
+
" Install via MPLAB X → Tools → Packs,\n"
|
|
135
|
+
" or download from https://packs.download.microchip.com/",
|
|
136
|
+
)
|
|
137
|
+
new_path = prompt_path("Device Family Pack root directory", "DFP_PATH")
|
|
138
|
+
if new_path and _try_path(new_path):
|
|
139
|
+
offer_save_to_env("DFP_PATH", new_path, env_file)
|
|
140
|
+
return True
|
|
141
|
+
return False
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def check_programmer(value: str) -> bool:
|
|
145
|
+
valid = set(PROGRAMMER_VALUES.keys())
|
|
146
|
+
if value in valid:
|
|
147
|
+
full_name = PROGRAMMER_VALUES[value]
|
|
148
|
+
_pass(f"Programmer ({value} → {full_name})")
|
|
149
|
+
return True
|
|
150
|
+
opts = " ".join(sorted(valid))
|
|
151
|
+
_fail(
|
|
152
|
+
"Programmer",
|
|
153
|
+
f"Unknown value: {value!r}\n Valid options: {opts}",
|
|
154
|
+
)
|
|
155
|
+
return False
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def check_boot_hex(boot_hex: str, env_file: Path) -> bool:
|
|
159
|
+
def _try_path(path: str) -> bool:
|
|
160
|
+
if Path(path).is_file():
|
|
161
|
+
_pass(f"Boot HEX ({path})")
|
|
162
|
+
return True
|
|
163
|
+
return False
|
|
164
|
+
|
|
165
|
+
if _try_path(boot_hex):
|
|
166
|
+
return True
|
|
167
|
+
|
|
168
|
+
_warn(
|
|
169
|
+
"Boot HEX",
|
|
170
|
+
f"File not found: {boot_hex}\n"
|
|
171
|
+
" Required for the 'flash-with-boot' target. Build the bootloader first,\n"
|
|
172
|
+
" or set BOOT_HEX in .env to point to an existing file.",
|
|
173
|
+
)
|
|
174
|
+
new_path = prompt_path("Bootloader .hex file", "BOOT_HEX")
|
|
175
|
+
if new_path and _try_path(new_path):
|
|
176
|
+
offer_save_to_env("BOOT_HEX", new_path, env_file)
|
|
177
|
+
return True
|
|
178
|
+
return False
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
"""Common environment variable defaults shared across all Voltu firmware projects.
|
|
2
|
+
|
|
3
|
+
Projects add project-specific vars in their own pymake/env.py as PROJECT_DEFAULTS.
|
|
4
|
+
Resolution priority (highest to lowest):
|
|
5
|
+
1. Shell environment variables
|
|
6
|
+
2. .env file
|
|
7
|
+
3. PROJECT_DEFAULTS (project pymake/env.py)
|
|
8
|
+
4. COMMON_DEFAULTS (this file)
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
PROGRAMMER_VALUES: dict[str, str] = {
|
|
12
|
+
"RICE": "REALICE",
|
|
13
|
+
"ICD3": "ICD3",
|
|
14
|
+
"PK3": "PICKIT3",
|
|
15
|
+
"PM3": "PM3",
|
|
16
|
+
"ICD4": "ICD4",
|
|
17
|
+
"ICD5": "ICD5",
|
|
18
|
+
"ICE4": "ICE4",
|
|
19
|
+
"PK4": "PICKIT4",
|
|
20
|
+
"PK5": "PICKIT5",
|
|
21
|
+
"SNAP": "SNAP",
|
|
22
|
+
"PKOB": "PKOB3",
|
|
23
|
+
"PKOB4": "PKOB4",
|
|
24
|
+
"PKBASIC": "PICKITBASIC",
|
|
25
|
+
"J32": "J-32",
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
COMMON_DEFAULTS: dict[str, str] = {
|
|
29
|
+
"XC32_PATH": "/opt/microchip/xc32/v4.60/bin/",
|
|
30
|
+
"DFP_PATH": "~/.mchp_packs/Microchip/PIC32MK-MC_DFP/1.12.263",
|
|
31
|
+
"IPE_CMD": "/opt/microchip/mplabx/v6.25/mplab_platform/mplab_ipe/ipecmd.sh",
|
|
32
|
+
"PROGRAMMER": "PK5",
|
|
33
|
+
}
|