napt 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- napt/__init__.py +91 -0
- napt/build/__init__.py +47 -0
- napt/build/manager.py +1087 -0
- napt/build/packager.py +315 -0
- napt/build/template.py +301 -0
- napt/cli.py +602 -0
- napt/config/__init__.py +42 -0
- napt/config/loader.py +465 -0
- napt/core.py +385 -0
- napt/detection.py +630 -0
- napt/discovery/__init__.py +86 -0
- napt/discovery/api_github.py +445 -0
- napt/discovery/api_json.py +452 -0
- napt/discovery/base.py +244 -0
- napt/discovery/url_download.py +304 -0
- napt/discovery/web_scrape.py +467 -0
- napt/exceptions.py +149 -0
- napt/io/__init__.py +42 -0
- napt/io/download.py +357 -0
- napt/io/upload.py +37 -0
- napt/logging.py +230 -0
- napt/policy/__init__.py +50 -0
- napt/policy/updates.py +126 -0
- napt/psadt/__init__.py +43 -0
- napt/psadt/release.py +309 -0
- napt/requirements.py +566 -0
- napt/results.py +143 -0
- napt/state/__init__.py +58 -0
- napt/state/tracker.py +371 -0
- napt/validation.py +467 -0
- napt/versioning/__init__.py +115 -0
- napt/versioning/keys.py +309 -0
- napt/versioning/msi.py +725 -0
- napt-0.3.1.dist-info/METADATA +114 -0
- napt-0.3.1.dist-info/RECORD +38 -0
- napt-0.3.1.dist-info/WHEEL +4 -0
- napt-0.3.1.dist-info/entry_points.txt +3 -0
- napt-0.3.1.dist-info/licenses/LICENSE +202 -0
napt/config/loader.py
ADDED
|
@@ -0,0 +1,465 @@
|
|
|
1
|
+
# Copyright 2025 Roger Cibrian
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
"""Configuration loading and merging for NAPT.
|
|
16
|
+
|
|
17
|
+
This module implements a sophisticated three-layer configuration system that
|
|
18
|
+
allows organization-wide defaults to be overridden by vendor-specific settings
|
|
19
|
+
and finally by recipe-specific configuration. This design promotes DRY
|
|
20
|
+
(Don't Repeat Yourself) principles and makes recipes easier to maintain.
|
|
21
|
+
|
|
22
|
+
Configuration Layers:
|
|
23
|
+
1. **Organization defaults** (defaults/org.yaml)
|
|
24
|
+
- Base configuration for all apps
|
|
25
|
+
- Defines PSADT settings, update policies, deployment waves, etc.
|
|
26
|
+
- Required if a defaults directory is found
|
|
27
|
+
|
|
28
|
+
2. **Vendor defaults** (defaults/vendors/<Vendor>.yaml)
|
|
29
|
+
- Vendor-specific overrides (e.g., Google-specific settings)
|
|
30
|
+
- Optional; only loaded if vendor is detected
|
|
31
|
+
- Overrides organization defaults
|
|
32
|
+
|
|
33
|
+
3. **Recipe configuration** (recipes/<Vendor>/<app>.yaml)
|
|
34
|
+
- App-specific configuration
|
|
35
|
+
- Always required; defines the app itself
|
|
36
|
+
- Overrides vendor and organization defaults
|
|
37
|
+
|
|
38
|
+
Merge Behavior:
|
|
39
|
+
The loader performs deep merging with "last wins" semantics:
|
|
40
|
+
|
|
41
|
+
- **Dicts**: Recursively merged (keys from overlay override base)
|
|
42
|
+
- **Lists**: Completely replaced (NOT appended/extended)
|
|
43
|
+
- **Scalars**: Overwritten (strings, numbers, booleans)
|
|
44
|
+
|
|
45
|
+
Path Resolution:
|
|
46
|
+
Relative paths in configuration are resolved against the RECIPE FILE location,
|
|
47
|
+
making recipes relocatable and portable. Currently resolved paths:
|
|
48
|
+
|
|
49
|
+
- defaults.psadt.brand_pack.path
|
|
50
|
+
|
|
51
|
+
Dynamic Injection:
|
|
52
|
+
Some fields are injected at load time:
|
|
53
|
+
|
|
54
|
+
- defaults.psadt.app_vars.AppScriptDate: Today's date (YYYY-MM-DD)
|
|
55
|
+
|
|
56
|
+
Error Handling:
|
|
57
|
+
- ConfigError: Recipe file doesn't exist, YAML parse errors, empty files,
|
|
58
|
+
or invalid structure
|
|
59
|
+
- All errors are chained with "from err" for better debugging
|
|
60
|
+
|
|
61
|
+
Example:
|
|
62
|
+
Basic usage:
|
|
63
|
+
```python
|
|
64
|
+
from pathlib import Path
|
|
65
|
+
from napt.config import load_effective_config
|
|
66
|
+
|
|
67
|
+
cfg = load_effective_config(Path("recipes/Google/chrome.yaml"))
|
|
68
|
+
print(cfg["app"]["name"]) # Output: Google Chrome
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
Access merged defaults:
|
|
72
|
+
```python
|
|
73
|
+
psadt_release = cfg["defaults"]["psadt"]["release"]
|
|
74
|
+
print(psadt_release) # Output: latest
|
|
75
|
+
```
|
|
76
|
+
|
|
77
|
+
Override vendor detection:
|
|
78
|
+
```python
|
|
79
|
+
cfg = load_effective_config(
|
|
80
|
+
Path("recipes/Google/chrome.yaml"),
|
|
81
|
+
vendor="CustomVendor"
|
|
82
|
+
)
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
Note:
|
|
86
|
+
- The loader walks upward from the recipe to find defaults/org.yaml
|
|
87
|
+
- Vendor is detected from directory name (recipes/Google/) or recipe content
|
|
88
|
+
- Paths are resolved relative to the recipe, not the working directory
|
|
89
|
+
- Dynamic fields are best-effort (warnings on failure, not errors)
|
|
90
|
+
|
|
91
|
+
"""
|
|
92
|
+
|
|
93
|
+
from __future__ import annotations
|
|
94
|
+
|
|
95
|
+
from dataclasses import dataclass
|
|
96
|
+
from datetime import date
|
|
97
|
+
from pathlib import Path
|
|
98
|
+
from typing import Any
|
|
99
|
+
|
|
100
|
+
import yaml
|
|
101
|
+
|
|
102
|
+
from napt.exceptions import ConfigError
|
|
103
|
+
|
|
104
|
+
# -------------------------------
|
|
105
|
+
# Data types
|
|
106
|
+
# -------------------------------
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
@dataclass(frozen=True)
|
|
110
|
+
class LoadContext:
|
|
111
|
+
"""Metadata describing how the config was resolved.
|
|
112
|
+
Useful for debugging and logging.
|
|
113
|
+
"""
|
|
114
|
+
|
|
115
|
+
recipe_path: Path
|
|
116
|
+
defaults_root: Path | None
|
|
117
|
+
vendor_name: str | None
|
|
118
|
+
org_defaults_path: Path | None
|
|
119
|
+
vendor_defaults_path: Path | None
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
# -------------------------------
|
|
123
|
+
# YAML helpers
|
|
124
|
+
# -------------------------------
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def _load_yaml_file(p: Path) -> Any:
|
|
128
|
+
"""Loads a YAML file and returns the parsed Python object.
|
|
129
|
+
|
|
130
|
+
Args:
|
|
131
|
+
p: Path to the YAML file to load.
|
|
132
|
+
|
|
133
|
+
Returns:
|
|
134
|
+
The parsed Python object from the YAML file.
|
|
135
|
+
|
|
136
|
+
Raises:
|
|
137
|
+
ConfigError: When file does not exist, invalid YAML (parse error), or empty files.
|
|
138
|
+
"""
|
|
139
|
+
if not p.exists():
|
|
140
|
+
raise ConfigError(f"file not found: {p}")
|
|
141
|
+
try:
|
|
142
|
+
with p.open("r", encoding="utf-8") as f:
|
|
143
|
+
data = yaml.safe_load(f)
|
|
144
|
+
except yaml.YAMLError as err:
|
|
145
|
+
raise ConfigError(f"Error parsing YAML: {p}: {err}") from err
|
|
146
|
+
if data is None:
|
|
147
|
+
raise ConfigError(f"YAML file is empty: {p}")
|
|
148
|
+
return data
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
# -------------------------------
|
|
152
|
+
# Merge logic
|
|
153
|
+
# -------------------------------
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def _deep_merge_dicts(base: dict[str, Any], overlay: dict[str, Any]) -> dict[str, Any]:
|
|
157
|
+
"""Deep-merges two dicts with "overlay wins" semantics.
|
|
158
|
+
|
|
159
|
+
Merge behavior:
|
|
160
|
+
|
|
161
|
+
- dict + dict -> deep merge
|
|
162
|
+
- list + list -> overlay REPLACES base (not concatenated)
|
|
163
|
+
- everything else -> overlay overwrites base
|
|
164
|
+
|
|
165
|
+
This function does not mutate inputs; returns a new dict.
|
|
166
|
+
|
|
167
|
+
Args:
|
|
168
|
+
base: The base dictionary.
|
|
169
|
+
overlay: The overlay dictionary that takes precedence.
|
|
170
|
+
|
|
171
|
+
Returns:
|
|
172
|
+
A new dictionary with the merged contents.
|
|
173
|
+
"""
|
|
174
|
+
result: dict[str, Any] = dict(base)
|
|
175
|
+
for k, v in overlay.items():
|
|
176
|
+
if k in result and isinstance(result[k], dict) and isinstance(v, dict):
|
|
177
|
+
result[k] = _deep_merge_dicts(result[k], v)
|
|
178
|
+
else:
|
|
179
|
+
# Replace lists and scalars entirely
|
|
180
|
+
result[k] = v
|
|
181
|
+
return result
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
# -------------------------------
|
|
185
|
+
# Defaults discovery
|
|
186
|
+
# -------------------------------
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def _find_defaults_root(start_dir: Path) -> Path | None:
|
|
190
|
+
"""Walks upward from start_dir looking for a defaults/org.yaml file.
|
|
191
|
+
|
|
192
|
+
Args:
|
|
193
|
+
start_dir: The directory to start searching from.
|
|
194
|
+
|
|
195
|
+
Returns:
|
|
196
|
+
The directory containing defaults/ if found, None otherwise.
|
|
197
|
+
"""
|
|
198
|
+
for parent in [start_dir] + list(start_dir.parents):
|
|
199
|
+
candidate = parent / "defaults" / "org.yaml"
|
|
200
|
+
if candidate.exists():
|
|
201
|
+
return parent / "defaults"
|
|
202
|
+
return None
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def _detect_vendor(recipe_path: Path, recipe_obj: dict[str, Any]) -> str | None:
|
|
206
|
+
"""Determines the vendor name for this recipe.
|
|
207
|
+
|
|
208
|
+
Uses the following priority order:
|
|
209
|
+
|
|
210
|
+
1. Folder name under recipes/ (e.g., recipes/Google/chrome.yaml -> Google)
|
|
211
|
+
2. recipe.app.psadt.app_vars.AppVendor (if present)
|
|
212
|
+
3. None if not found
|
|
213
|
+
|
|
214
|
+
Args:
|
|
215
|
+
recipe_path: Path to the recipe file.
|
|
216
|
+
recipe_obj: The parsed recipe dictionary.
|
|
217
|
+
|
|
218
|
+
Returns:
|
|
219
|
+
The vendor name if detected, None otherwise.
|
|
220
|
+
"""
|
|
221
|
+
# Try directory name one level up from the recipe file
|
|
222
|
+
parent_name = recipe_path.parent.name or None
|
|
223
|
+
|
|
224
|
+
# Try reading from the recipe content
|
|
225
|
+
vendor_from_recipe: str | None = None
|
|
226
|
+
try:
|
|
227
|
+
app = recipe_obj.get("app")
|
|
228
|
+
if app and isinstance(app, dict):
|
|
229
|
+
psadt = app.get("psadt", {})
|
|
230
|
+
app_vars = psadt.get("app_vars", {})
|
|
231
|
+
v = app_vars.get("AppVendor")
|
|
232
|
+
if isinstance(v, str) and v.strip():
|
|
233
|
+
vendor_from_recipe = v
|
|
234
|
+
except Exception:
|
|
235
|
+
vendor_from_recipe = None
|
|
236
|
+
|
|
237
|
+
# Prefer folder naming if it exists; else fallback to recipe
|
|
238
|
+
return parent_name or vendor_from_recipe
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
# -------------------------------
|
|
242
|
+
# Path resolution
|
|
243
|
+
# -------------------------------
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
def _resolve_known_paths(
|
|
247
|
+
cfg: dict[str, Any], recipe_dir: Path, defaults_root: Path | None = None
|
|
248
|
+
) -> None:
|
|
249
|
+
"""Resolves relative path fields inside the merged config.
|
|
250
|
+
|
|
251
|
+
We keep this explicit and conservative to avoid unexpected rewrites.
|
|
252
|
+
Currently handles cfg["defaults"]["psadt"]["brand_pack"]["path"].
|
|
253
|
+
|
|
254
|
+
Brand pack paths are resolved relative to defaults_root (if available),
|
|
255
|
+
otherwise relative to recipe_dir as fallback. Modifies cfg in place.
|
|
256
|
+
|
|
257
|
+
Args:
|
|
258
|
+
cfg: The merged configuration dictionary.
|
|
259
|
+
recipe_dir: Directory containing the recipe file.
|
|
260
|
+
defaults_root: Root directory containing defaults/, if found.
|
|
261
|
+
"""
|
|
262
|
+
try:
|
|
263
|
+
brand_pack = cfg["defaults"]["psadt"]["brand_pack"]
|
|
264
|
+
raw_path = brand_pack.get("path")
|
|
265
|
+
if isinstance(raw_path, str) and raw_path:
|
|
266
|
+
p = Path(raw_path)
|
|
267
|
+
# Resolve only if the path is relative
|
|
268
|
+
if not p.is_absolute():
|
|
269
|
+
# Resolve relative to defaults_root if available, else recipe_dir
|
|
270
|
+
if defaults_root:
|
|
271
|
+
brand_pack["path"] = str((defaults_root / p).resolve())
|
|
272
|
+
else:
|
|
273
|
+
brand_pack["path"] = str((recipe_dir / p).resolve())
|
|
274
|
+
except KeyError:
|
|
275
|
+
# Field missing; nothing to resolve
|
|
276
|
+
pass
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
# -------------------------------
|
|
280
|
+
# Dynamic injection
|
|
281
|
+
# -------------------------------
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
def _inject_dynamic_values(cfg: dict[str, Any]) -> None:
|
|
285
|
+
"""Injects dynamic fields that should be set at load/build time.
|
|
286
|
+
|
|
287
|
+
Currently injects defaults.psadt.app_vars.AppScriptDate with today's date
|
|
288
|
+
in YYYY-MM-DD format.
|
|
289
|
+
|
|
290
|
+
Args:
|
|
291
|
+
cfg: The configuration dictionary to inject values into.
|
|
292
|
+
"""
|
|
293
|
+
today_str = date.today().strftime("%Y-%m-%d")
|
|
294
|
+
try:
|
|
295
|
+
app_vars = (
|
|
296
|
+
cfg.setdefault("defaults", {})
|
|
297
|
+
.setdefault("psadt", {})
|
|
298
|
+
.setdefault("app_vars", {})
|
|
299
|
+
)
|
|
300
|
+
# Do not overwrite if explicitly set in recipe; only set if absent
|
|
301
|
+
app_vars.setdefault("AppScriptDate", today_str)
|
|
302
|
+
except Exception as err:
|
|
303
|
+
# Be defensive but quiet; dynamic injection is best-effort
|
|
304
|
+
from napt.logging import get_global_logger
|
|
305
|
+
|
|
306
|
+
logger = get_global_logger()
|
|
307
|
+
logger.warning("CONFIG", f"Could not inject AppScriptDate: {err}")
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
# -------------------------------
|
|
311
|
+
# Verbose helpers
|
|
312
|
+
# -------------------------------
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
def _print_yaml_content(data: dict[str, Any], indent: int = 0) -> None:
|
|
316
|
+
"""Print YAML content in a readable format for debug mode."""
|
|
317
|
+
import yaml
|
|
318
|
+
|
|
319
|
+
from napt.logging import get_global_logger
|
|
320
|
+
|
|
321
|
+
logger = get_global_logger()
|
|
322
|
+
|
|
323
|
+
# Convert to YAML string and log with indentation
|
|
324
|
+
# The logger.debug() call will only print if debug mode is enabled
|
|
325
|
+
yaml_str = yaml.dump(data, default_flow_style=False, sort_keys=False)
|
|
326
|
+
for line in yaml_str.split("\n"):
|
|
327
|
+
if line.strip(): # Skip empty lines
|
|
328
|
+
logger.debug("CONFIG", " " * indent + line)
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
# -------------------------------
|
|
332
|
+
# Public API
|
|
333
|
+
# -------------------------------
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
def load_effective_config(
|
|
337
|
+
recipe_path: Path,
|
|
338
|
+
*,
|
|
339
|
+
vendor: str | None = None,
|
|
340
|
+
) -> dict[str, Any]:
|
|
341
|
+
"""Loads and merges the effective configuration for a recipe.
|
|
342
|
+
|
|
343
|
+
Performs the following operations:
|
|
344
|
+
|
|
345
|
+
1. Read recipe YAML
|
|
346
|
+
2. Find defaults root by scanning upwards for defaults/org.yaml
|
|
347
|
+
3. Load org defaults (required if defaults root exists)
|
|
348
|
+
4. Determine vendor (param vendor > folder name > recipe contents)
|
|
349
|
+
5. Load vendor defaults if present
|
|
350
|
+
6. Merge: org -> vendor -> recipe (dicts deep-merge, lists replace)
|
|
351
|
+
7. Resolve known relative paths (relative to the recipe directory)
|
|
352
|
+
8. Inject dynamic fields (AppScriptDate = today if absent)
|
|
353
|
+
|
|
354
|
+
Args:
|
|
355
|
+
recipe_path: Path to the recipe YAML file.
|
|
356
|
+
vendor: Optional vendor name override. If not provided, vendor is detected
|
|
357
|
+
from the folder name or recipe contents.
|
|
358
|
+
|
|
359
|
+
Returns:
|
|
360
|
+
A merged configuration dict ready for downstream processors. If no defaults
|
|
361
|
+
were found in the tree, the recipe is returned as-is (with path
|
|
362
|
+
resolution and injection).
|
|
363
|
+
|
|
364
|
+
Raises:
|
|
365
|
+
ConfigError: On YAML parse errors, empty files, invalid structure, or if the recipe file is missing.
|
|
366
|
+
"""
|
|
367
|
+
from napt.logging import get_global_logger
|
|
368
|
+
|
|
369
|
+
logger = get_global_logger()
|
|
370
|
+
recipe_path = recipe_path.resolve()
|
|
371
|
+
recipe_dir = recipe_path.parent
|
|
372
|
+
|
|
373
|
+
logger.verbose("CONFIG", f"Loading recipe: {recipe_path}")
|
|
374
|
+
|
|
375
|
+
# 1) Read recipe
|
|
376
|
+
recipe_obj = _load_yaml_file(recipe_path)
|
|
377
|
+
if not isinstance(recipe_obj, dict):
|
|
378
|
+
raise ConfigError(f"top-level YAML must be a mapping (dict): {recipe_path}")
|
|
379
|
+
|
|
380
|
+
# 2) Find defaults root
|
|
381
|
+
defaults_root = _find_defaults_root(recipe_dir)
|
|
382
|
+
if defaults_root:
|
|
383
|
+
logger.verbose("CONFIG", f"Found defaults root: {defaults_root}")
|
|
384
|
+
|
|
385
|
+
merged: dict[str, Any] = {}
|
|
386
|
+
layers_merged = 0
|
|
387
|
+
|
|
388
|
+
org_defaults_path: Path | None = None
|
|
389
|
+
vendor_name: str | None = vendor
|
|
390
|
+
|
|
391
|
+
if defaults_root:
|
|
392
|
+
# 3) Load org defaults
|
|
393
|
+
org_defaults_path = defaults_root / "org.yaml"
|
|
394
|
+
if org_defaults_path.exists():
|
|
395
|
+
logger.verbose(
|
|
396
|
+
"CONFIG",
|
|
397
|
+
f"Loading: {org_defaults_path.relative_to(defaults_root.parent)}",
|
|
398
|
+
)
|
|
399
|
+
org_defaults = _load_yaml_file(org_defaults_path)
|
|
400
|
+
if isinstance(org_defaults, dict):
|
|
401
|
+
logger.debug("CONFIG", "--- Content from org.yaml ---")
|
|
402
|
+
_print_yaml_content(org_defaults)
|
|
403
|
+
merged = _deep_merge_dicts(merged, org_defaults)
|
|
404
|
+
layers_merged += 1
|
|
405
|
+
|
|
406
|
+
# 4) Determine vendor
|
|
407
|
+
if vendor_name is None:
|
|
408
|
+
vendor_name = _detect_vendor(recipe_path, recipe_obj)
|
|
409
|
+
|
|
410
|
+
if vendor_name:
|
|
411
|
+
logger.verbose("CONFIG", f"Detected vendor: {vendor_name}")
|
|
412
|
+
|
|
413
|
+
# 5) Load vendor defaults if present
|
|
414
|
+
if vendor_name:
|
|
415
|
+
candidate = defaults_root / "vendors" / f"{vendor_name}.yaml"
|
|
416
|
+
if candidate.exists():
|
|
417
|
+
logger.verbose(
|
|
418
|
+
"CONFIG", f"Loading: {candidate.relative_to(defaults_root.parent)}"
|
|
419
|
+
)
|
|
420
|
+
vendor_defaults = _load_yaml_file(candidate)
|
|
421
|
+
if isinstance(vendor_defaults, dict):
|
|
422
|
+
logger.debug("CONFIG", f"--- Content from {vendor_name}.yaml ---")
|
|
423
|
+
_print_yaml_content(vendor_defaults)
|
|
424
|
+
merged = _deep_merge_dicts(merged, vendor_defaults)
|
|
425
|
+
layers_merged += 1
|
|
426
|
+
|
|
427
|
+
# Show recipe content
|
|
428
|
+
logger.verbose("CONFIG", f"Loading: {recipe_path.name}")
|
|
429
|
+
logger.debug("CONFIG", f"--- Content from {recipe_path.name} ---")
|
|
430
|
+
_print_yaml_content(recipe_obj)
|
|
431
|
+
|
|
432
|
+
# 6) Merge recipe on top
|
|
433
|
+
merged = _deep_merge_dicts(merged, recipe_obj)
|
|
434
|
+
layers_merged += 1
|
|
435
|
+
|
|
436
|
+
logger.verbose("CONFIG", f"Deep merging {layers_merged} layer(s)")
|
|
437
|
+
# Show final config structure
|
|
438
|
+
top_level_keys = list(merged.keys())
|
|
439
|
+
logger.verbose(
|
|
440
|
+
"CONFIG",
|
|
441
|
+
(
|
|
442
|
+
f"Final config has {len(top_level_keys)} top-level keys: "
|
|
443
|
+
f"{', '.join(top_level_keys)}"
|
|
444
|
+
),
|
|
445
|
+
)
|
|
446
|
+
# Show the complete merged configuration in debug mode
|
|
447
|
+
logger.debug("CONFIG", "--- Final Merged Configuration ---")
|
|
448
|
+
_print_yaml_content(merged)
|
|
449
|
+
|
|
450
|
+
# 7) Resolve relative paths (branding paths relative to defaults_root)
|
|
451
|
+
_resolve_known_paths(merged, recipe_dir, defaults_root)
|
|
452
|
+
|
|
453
|
+
# 8) Inject dynamic values (e.g., AppScriptDate)
|
|
454
|
+
_inject_dynamic_values(merged)
|
|
455
|
+
|
|
456
|
+
# Optionally attach context for debugging (commented out by default)
|
|
457
|
+
# merged["_load_context"] = LoadContext(
|
|
458
|
+
# recipe_path=recipe_path,
|
|
459
|
+
# defaults_root=defaults_root,
|
|
460
|
+
# vendor_name=vendor_name,
|
|
461
|
+
# org_defaults_path=org_defaults_path,
|
|
462
|
+
# vendor_defaults_path=vendor_defaults_path,
|
|
463
|
+
# ).__dict__
|
|
464
|
+
|
|
465
|
+
return merged
|