napt 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- napt/__init__.py +91 -0
- napt/build/__init__.py +47 -0
- napt/build/manager.py +1087 -0
- napt/build/packager.py +315 -0
- napt/build/template.py +301 -0
- napt/cli.py +602 -0
- napt/config/__init__.py +42 -0
- napt/config/loader.py +465 -0
- napt/core.py +385 -0
- napt/detection.py +630 -0
- napt/discovery/__init__.py +86 -0
- napt/discovery/api_github.py +445 -0
- napt/discovery/api_json.py +452 -0
- napt/discovery/base.py +244 -0
- napt/discovery/url_download.py +304 -0
- napt/discovery/web_scrape.py +467 -0
- napt/exceptions.py +149 -0
- napt/io/__init__.py +42 -0
- napt/io/download.py +357 -0
- napt/io/upload.py +37 -0
- napt/logging.py +230 -0
- napt/policy/__init__.py +50 -0
- napt/policy/updates.py +126 -0
- napt/psadt/__init__.py +43 -0
- napt/psadt/release.py +309 -0
- napt/requirements.py +566 -0
- napt/results.py +143 -0
- napt/state/__init__.py +58 -0
- napt/state/tracker.py +371 -0
- napt/validation.py +467 -0
- napt/versioning/__init__.py +115 -0
- napt/versioning/keys.py +309 -0
- napt/versioning/msi.py +725 -0
- napt-0.3.1.dist-info/METADATA +114 -0
- napt-0.3.1.dist-info/RECORD +38 -0
- napt-0.3.1.dist-info/WHEEL +4 -0
- napt-0.3.1.dist-info/entry_points.txt +3 -0
- napt-0.3.1.dist-info/licenses/LICENSE +202 -0
napt/build/manager.py
ADDED
|
@@ -0,0 +1,1087 @@
|
|
|
1
|
+
# Copyright 2025 Roger Cibrian
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
"""Build manager for PSADT package creation.
|
|
16
|
+
|
|
17
|
+
This module orchestrates the complete build process for creating PSADT
|
|
18
|
+
packages from recipes and downloaded installers.
|
|
19
|
+
|
|
20
|
+
Design Principles:
|
|
21
|
+
- Filesystem is source of truth for version information
|
|
22
|
+
- Entire PSADT Template_v4 structure copied pristine
|
|
23
|
+
- Invoke-AppDeployToolkit.ps1 is generated from template (not copied)
|
|
24
|
+
- Build directories are versioned: {app_id}/{version}/
|
|
25
|
+
- Branding applied by replacing files in root Assets/ directory (v4 structure)
|
|
26
|
+
|
|
27
|
+
Example:
|
|
28
|
+
Basic usage:
|
|
29
|
+
```python
|
|
30
|
+
from pathlib import Path
|
|
31
|
+
from napt.build import build_package
|
|
32
|
+
|
|
33
|
+
result = build_package(
|
|
34
|
+
recipe_path=Path("recipes/Google/chrome.yaml"),
|
|
35
|
+
downloads_dir=Path("downloads"),
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
print(f"Built: {result.build_dir}")
|
|
39
|
+
```
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
from __future__ import annotations
|
|
43
|
+
|
|
44
|
+
import json
|
|
45
|
+
from pathlib import Path
|
|
46
|
+
import shutil
|
|
47
|
+
from typing import Any
|
|
48
|
+
|
|
49
|
+
from napt.config import load_effective_config
|
|
50
|
+
from napt.detection import (
|
|
51
|
+
DetectionConfig,
|
|
52
|
+
generate_detection_script,
|
|
53
|
+
sanitize_filename,
|
|
54
|
+
)
|
|
55
|
+
from napt.exceptions import ConfigError, PackagingError
|
|
56
|
+
from napt.psadt import get_psadt_release
|
|
57
|
+
from napt.requirements import (
|
|
58
|
+
RequirementsConfig,
|
|
59
|
+
generate_requirements_script,
|
|
60
|
+
)
|
|
61
|
+
from napt.results import BuildResult
|
|
62
|
+
from napt.versioning.msi import (
|
|
63
|
+
extract_msi_architecture,
|
|
64
|
+
extract_msi_metadata,
|
|
65
|
+
version_from_msi_product_version,
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def _get_installer_version(
|
|
70
|
+
installer_file: Path, config: dict[str, Any], state_file: Path | None = None
|
|
71
|
+
) -> str:
|
|
72
|
+
"""Get version for the installer file.
|
|
73
|
+
|
|
74
|
+
Priority:
|
|
75
|
+
1. Auto-detect MSI files (`.msi` extension) and extract version
|
|
76
|
+
2. Fall back to known_version from state file
|
|
77
|
+
3. If all else fails, raise an error
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
installer_file: Path to the installer file.
|
|
81
|
+
config: Recipe configuration.
|
|
82
|
+
state_file: Path to state file for fallback version lookup.
|
|
83
|
+
|
|
84
|
+
Returns:
|
|
85
|
+
Extracted version string.
|
|
86
|
+
|
|
87
|
+
Raises:
|
|
88
|
+
PackagingError: If MSI version extraction fails (when explicitly requested).
|
|
89
|
+
ConfigError: If version cannot be determined from any source.
|
|
90
|
+
"""
|
|
91
|
+
from napt.logging import get_global_logger
|
|
92
|
+
|
|
93
|
+
logger = get_global_logger()
|
|
94
|
+
app = config["app"]
|
|
95
|
+
app_id = app.get("id", "unknown")
|
|
96
|
+
|
|
97
|
+
# Priority 1: Auto-detect MSI files and extract version
|
|
98
|
+
if installer_file.suffix.lower() == ".msi":
|
|
99
|
+
logger.verbose(
|
|
100
|
+
"BUILD", f"Auto-detected MSI, extracting version: {installer_file.name}"
|
|
101
|
+
)
|
|
102
|
+
try:
|
|
103
|
+
discovered = version_from_msi_product_version(installer_file)
|
|
104
|
+
logger.verbose("BUILD", f"Extracted version: {discovered.version}")
|
|
105
|
+
return discovered.version
|
|
106
|
+
except Exception as err:
|
|
107
|
+
# MSI extraction failed, fall through to state file
|
|
108
|
+
logger.verbose(
|
|
109
|
+
"BUILD", f"MSI version extraction failed, trying state file: {err}"
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
# Priority 2: Fall back to state file
|
|
113
|
+
if state_file and state_file.exists():
|
|
114
|
+
from napt.state import load_state
|
|
115
|
+
|
|
116
|
+
logger.verbose("BUILD", "Using version from state file")
|
|
117
|
+
state = load_state(state_file)
|
|
118
|
+
app_state = state.get("apps", {}).get(app_id, {})
|
|
119
|
+
known_version = app_state.get("known_version")
|
|
120
|
+
|
|
121
|
+
if known_version:
|
|
122
|
+
logger.verbose("BUILD", f"Using version from state: {known_version}")
|
|
123
|
+
return known_version
|
|
124
|
+
|
|
125
|
+
# No version found - provide error
|
|
126
|
+
raise ConfigError(
|
|
127
|
+
f"Could not determine version for {app_id}. Either:\n"
|
|
128
|
+
f" - Use an MSI installer (auto-detected from file extension)\n"
|
|
129
|
+
f" - Run 'napt discover' first to populate state file with version"
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def _find_installer_file(
|
|
134
|
+
downloads_dir: Path, config: dict[str, Any], state_file: Path | None = None
|
|
135
|
+
) -> Path:
|
|
136
|
+
"""Find the installer file in the downloads directory.
|
|
137
|
+
|
|
138
|
+
Uses multiple strategies to locate the installer:
|
|
139
|
+
1. URL from recipe (for url_download strategy)
|
|
140
|
+
2. URL from state file (for web_scrape, api_github, api_json strategies)
|
|
141
|
+
3. Filename matching by app name/id
|
|
142
|
+
4. Most recent installer (last resort)
|
|
143
|
+
|
|
144
|
+
Args:
|
|
145
|
+
downloads_dir: Downloads directory to search.
|
|
146
|
+
config: Recipe configuration.
|
|
147
|
+
state_file: Optional state file to check for cached URL.
|
|
148
|
+
|
|
149
|
+
Returns:
|
|
150
|
+
Path to the installer file.
|
|
151
|
+
|
|
152
|
+
Raises:
|
|
153
|
+
PackagingError: If installer file cannot be found.
|
|
154
|
+
"""
|
|
155
|
+
from urllib.parse import urlparse
|
|
156
|
+
|
|
157
|
+
from napt.logging import get_global_logger
|
|
158
|
+
|
|
159
|
+
logger = get_global_logger()
|
|
160
|
+
app = config["app"]
|
|
161
|
+
app_id = app.get("id", "")
|
|
162
|
+
source = app.get("source", {})
|
|
163
|
+
url = source.get("url", "")
|
|
164
|
+
|
|
165
|
+
# Strategy 1: Extract filename from recipe URL (for url_download)
|
|
166
|
+
if url:
|
|
167
|
+
parsed = urlparse(url)
|
|
168
|
+
filename = Path(parsed.path).name
|
|
169
|
+
if filename:
|
|
170
|
+
installer_path = downloads_dir / filename
|
|
171
|
+
|
|
172
|
+
if installer_path.exists():
|
|
173
|
+
logger.verbose(
|
|
174
|
+
"BUILD", f"Found installer from recipe URL: {installer_path}"
|
|
175
|
+
)
|
|
176
|
+
return installer_path
|
|
177
|
+
|
|
178
|
+
# Strategy 2: Extract filename from state file URL (for web_scrape, etc.)
|
|
179
|
+
if state_file and state_file.exists():
|
|
180
|
+
try:
|
|
181
|
+
from napt.state import load_state
|
|
182
|
+
|
|
183
|
+
state = load_state(state_file)
|
|
184
|
+
app_state = state.get("apps", {}).get(app_id, {})
|
|
185
|
+
state_url = app_state.get("url", "")
|
|
186
|
+
|
|
187
|
+
if state_url:
|
|
188
|
+
parsed = urlparse(state_url)
|
|
189
|
+
filename = Path(parsed.path).name
|
|
190
|
+
if filename:
|
|
191
|
+
installer_path = downloads_dir / filename
|
|
192
|
+
|
|
193
|
+
if installer_path.exists():
|
|
194
|
+
logger.verbose(
|
|
195
|
+
"BUILD", f"Found installer from state URL: {installer_path}"
|
|
196
|
+
)
|
|
197
|
+
return installer_path
|
|
198
|
+
except Exception as err:
|
|
199
|
+
logger.warning("BUILD", f"Could not check state file: {err}")
|
|
200
|
+
|
|
201
|
+
# Strategy 3: Fallback - Search for installer matching app name/id
|
|
202
|
+
app_name = app.get("name", "").lower()
|
|
203
|
+
|
|
204
|
+
# Try to find installer matching app_id or app_name in filename
|
|
205
|
+
for pattern in ["*.msi", "*.exe"]:
|
|
206
|
+
matches = list(downloads_dir.glob(pattern))
|
|
207
|
+
|
|
208
|
+
# Filter by app name/id if possible
|
|
209
|
+
matching = [
|
|
210
|
+
p
|
|
211
|
+
for p in matches
|
|
212
|
+
if app_id.lower().replace("napt-", "") in p.name.lower()
|
|
213
|
+
or any(word in p.name.lower() for word in app_name.split() if len(word) > 3)
|
|
214
|
+
]
|
|
215
|
+
|
|
216
|
+
if matching:
|
|
217
|
+
installer_path = max(matching, key=lambda p: p.stat().st_mtime)
|
|
218
|
+
logger.verbose("BUILD", f"Found installer matching app: {installer_path}")
|
|
219
|
+
return installer_path
|
|
220
|
+
|
|
221
|
+
# No installer found after trying all strategies
|
|
222
|
+
raise PackagingError(
|
|
223
|
+
f"Cannot locate installer file for {app_id} in {downloads_dir}. "
|
|
224
|
+
f"Tried locating via recipe URL, state file URL, and filename matching, "
|
|
225
|
+
f"but no matching installer found. Verify the installer file exists in {downloads_dir}."
|
|
226
|
+
)
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
def _create_build_directory(base_dir: Path, app_id: str, version: str) -> Path:
|
|
230
|
+
"""Create the build directory structure.
|
|
231
|
+
|
|
232
|
+
Creates the directory structure:
|
|
233
|
+
{base_dir}/{app_id}/{version}/packagefiles/
|
|
234
|
+
|
|
235
|
+
The packagefiles/ subdirectory contains the PSADT files that will be
|
|
236
|
+
packaged into the .intunewin file. Detection scripts are saved as
|
|
237
|
+
siblings to packagefiles/ to prevent them from being included in the
|
|
238
|
+
package.
|
|
239
|
+
|
|
240
|
+
Args:
|
|
241
|
+
base_dir: Base builds directory.
|
|
242
|
+
app_id: Application ID.
|
|
243
|
+
version: Application version.
|
|
244
|
+
|
|
245
|
+
Returns:
|
|
246
|
+
Path to the packagefiles subdirectory where PSADT files will be copied
|
|
247
|
+
(build_dir/packagefiles/).
|
|
248
|
+
|
|
249
|
+
Raises:
|
|
250
|
+
OSError: If directory creation fails.
|
|
251
|
+
"""
|
|
252
|
+
from napt.logging import get_global_logger
|
|
253
|
+
|
|
254
|
+
logger = get_global_logger()
|
|
255
|
+
version_dir = base_dir / app_id / version
|
|
256
|
+
packagefiles_dir = version_dir / "packagefiles"
|
|
257
|
+
|
|
258
|
+
if version_dir.exists():
|
|
259
|
+
logger.verbose("BUILD", f"Build directory exists: {version_dir}")
|
|
260
|
+
logger.verbose("BUILD", "Removing existing build...")
|
|
261
|
+
shutil.rmtree(version_dir)
|
|
262
|
+
|
|
263
|
+
# Create the packagefiles subdirectory
|
|
264
|
+
packagefiles_dir.mkdir(parents=True, exist_ok=True)
|
|
265
|
+
|
|
266
|
+
logger.verbose("BUILD", f"Created build directory: {packagefiles_dir}")
|
|
267
|
+
|
|
268
|
+
return packagefiles_dir
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
def _copy_psadt_pristine(psadt_cache_dir: Path, build_dir: Path) -> None:
|
|
272
|
+
"""Copy PSADT template files from cache to build directory (pristine, unmodified).
|
|
273
|
+
|
|
274
|
+
Copies the entire v4 template structure including:
|
|
275
|
+
- PSAppDeployToolkit/ (module)
|
|
276
|
+
- Invoke-AppDeployToolkit.exe
|
|
277
|
+
- Invoke-AppDeployToolkit.ps1 (template - will be overwritten)
|
|
278
|
+
- Assets/, Config/, Strings/ (default configs)
|
|
279
|
+
- Files/, SupportFiles/ (empty directories for user files)
|
|
280
|
+
- PSAppDeployToolkit.Extensions/
|
|
281
|
+
|
|
282
|
+
Args:
|
|
283
|
+
psadt_cache_dir: Path to cached PSADT version directory (root
|
|
284
|
+
of Template_v4 extraction).
|
|
285
|
+
build_dir: Build directory (packagefiles subdirectory) where PSADT
|
|
286
|
+
should be copied.
|
|
287
|
+
|
|
288
|
+
Raises:
|
|
289
|
+
PackagingError: If PSADT cache directory or required files don't exist.
|
|
290
|
+
OSError: If copy operation fails.
|
|
291
|
+
"""
|
|
292
|
+
from napt.logging import get_global_logger
|
|
293
|
+
|
|
294
|
+
logger = get_global_logger()
|
|
295
|
+
if not psadt_cache_dir.exists():
|
|
296
|
+
raise PackagingError(f"PSADT cache directory not found: {psadt_cache_dir}")
|
|
297
|
+
|
|
298
|
+
logger.verbose("BUILD", f"Copying PSADT template from cache: {psadt_cache_dir}")
|
|
299
|
+
|
|
300
|
+
# Copy all files and directories from the template root
|
|
301
|
+
for item in psadt_cache_dir.iterdir():
|
|
302
|
+
dest = build_dir / item.name
|
|
303
|
+
|
|
304
|
+
if item.is_dir():
|
|
305
|
+
shutil.copytree(item, dest)
|
|
306
|
+
logger.verbose("BUILD", f" Copied directory: {item.name}/")
|
|
307
|
+
else:
|
|
308
|
+
shutil.copy2(item, dest)
|
|
309
|
+
logger.verbose("BUILD", f" Copied file: {item.name}")
|
|
310
|
+
|
|
311
|
+
logger.verbose("BUILD", "[OK] PSADT template copied")
|
|
312
|
+
|
|
313
|
+
|
|
314
|
+
def _copy_installer(installer_file: Path, build_dir: Path) -> None:
|
|
315
|
+
"""Copy installer to the build's Files/ directory.
|
|
316
|
+
|
|
317
|
+
Args:
|
|
318
|
+
installer_file: Path to the installer file.
|
|
319
|
+
build_dir: Build directory (packagefiles subdirectory).
|
|
320
|
+
|
|
321
|
+
Raises:
|
|
322
|
+
OSError: If copy operation fails.
|
|
323
|
+
"""
|
|
324
|
+
from napt.logging import get_global_logger
|
|
325
|
+
|
|
326
|
+
logger = get_global_logger()
|
|
327
|
+
files_dir = build_dir / "Files"
|
|
328
|
+
dest = files_dir / installer_file.name
|
|
329
|
+
|
|
330
|
+
logger.verbose("BUILD", f"Copying installer: {installer_file.name}")
|
|
331
|
+
|
|
332
|
+
shutil.copy2(installer_file, dest)
|
|
333
|
+
|
|
334
|
+
logger.verbose("BUILD", "[OK] Installer copied to Files/")
|
|
335
|
+
|
|
336
|
+
|
|
337
|
+
def _apply_branding(config: dict[str, Any], build_dir: Path) -> None:
|
|
338
|
+
"""Apply custom branding by replacing PSADT assets.
|
|
339
|
+
|
|
340
|
+
Reads the brand_pack configuration and replaces PSADT's default
|
|
341
|
+
assets (logo, banner) with custom ones.
|
|
342
|
+
|
|
343
|
+
Args:
|
|
344
|
+
config: Merged configuration with brand_pack settings.
|
|
345
|
+
build_dir: Build directory (packagefiles subdirectory) containing
|
|
346
|
+
PSAppDeployToolkit/.
|
|
347
|
+
|
|
348
|
+
Raises:
|
|
349
|
+
FileNotFoundError: If branding files don't exist.
|
|
350
|
+
OSError: If file copy operation fails.
|
|
351
|
+
"""
|
|
352
|
+
from napt.logging import get_global_logger
|
|
353
|
+
|
|
354
|
+
logger = get_global_logger()
|
|
355
|
+
brand_pack = config.get("defaults", {}).get("psadt", {}).get("brand_pack")
|
|
356
|
+
|
|
357
|
+
if not brand_pack:
|
|
358
|
+
logger.verbose("BUILD", "No brand pack configured, using PSADT defaults")
|
|
359
|
+
return
|
|
360
|
+
|
|
361
|
+
brand_path = Path(brand_pack.get("path", ""))
|
|
362
|
+
mappings = brand_pack.get("mappings", [])
|
|
363
|
+
|
|
364
|
+
if not brand_path.exists():
|
|
365
|
+
logger.verbose(
|
|
366
|
+
"BUILD", f"Brand pack path not found: {brand_path}, skipping branding"
|
|
367
|
+
)
|
|
368
|
+
return
|
|
369
|
+
|
|
370
|
+
logger.verbose("BUILD", f"Applying branding from: {brand_path}")
|
|
371
|
+
|
|
372
|
+
for mapping in mappings:
|
|
373
|
+
source_pattern = mapping.get("source", "")
|
|
374
|
+
target_path = mapping.get("target", "")
|
|
375
|
+
|
|
376
|
+
if not source_pattern or not target_path:
|
|
377
|
+
continue
|
|
378
|
+
|
|
379
|
+
# Find source files matching pattern
|
|
380
|
+
source_files = list(brand_path.glob(source_pattern))
|
|
381
|
+
|
|
382
|
+
if not source_files:
|
|
383
|
+
logger.verbose("BUILD", f"No files match pattern: {source_pattern}")
|
|
384
|
+
continue
|
|
385
|
+
|
|
386
|
+
# Use first match
|
|
387
|
+
source_file = source_files[0]
|
|
388
|
+
|
|
389
|
+
# Build target path (append extension from source, don't replace)
|
|
390
|
+
# Apply to root Assets directory (v4 template structure)
|
|
391
|
+
target = build_dir / target_path
|
|
392
|
+
target_with_ext = Path(str(target) + source_file.suffix)
|
|
393
|
+
|
|
394
|
+
# Ensure parent directory exists
|
|
395
|
+
target_with_ext.parent.mkdir(parents=True, exist_ok=True)
|
|
396
|
+
|
|
397
|
+
# Copy file
|
|
398
|
+
shutil.copy2(source_file, target_with_ext)
|
|
399
|
+
logger.verbose("BUILD", f" {source_file.name} -> {target_with_ext.name}")
|
|
400
|
+
|
|
401
|
+
logger.verbose("BUILD", "[OK] Branding applied")
|
|
402
|
+
|
|
403
|
+
|
|
404
|
+
def _generate_detection_script(
|
|
405
|
+
installer_file: Path,
|
|
406
|
+
config: dict[str, Any],
|
|
407
|
+
version: str,
|
|
408
|
+
app_id: str,
|
|
409
|
+
build_dir: Path,
|
|
410
|
+
) -> Path:
|
|
411
|
+
"""Generate detection script for Intune Win32 app.
|
|
412
|
+
|
|
413
|
+
Extracts metadata from installer (MSI ProductName for MSIs,
|
|
414
|
+
win32.installed_check.display_name for non-MSI installers), generates PowerShell
|
|
415
|
+
detection script, and saves it as a sibling to the packagefiles directory.
|
|
416
|
+
|
|
417
|
+
Args:
|
|
418
|
+
installer_file: Path to the installer file.
|
|
419
|
+
config: Recipe configuration.
|
|
420
|
+
version: Extracted version string.
|
|
421
|
+
app_id: Application ID.
|
|
422
|
+
build_dir: Build directory (packagefiles subdirectory).
|
|
423
|
+
|
|
424
|
+
Returns:
|
|
425
|
+
Path to the generated detection script.
|
|
426
|
+
|
|
427
|
+
Raises:
|
|
428
|
+
PackagingError: If detection script generation fails.
|
|
429
|
+
ConfigError: If required configuration is missing
|
|
430
|
+
(win32.installed_check.display_name required for non-MSI installers).
|
|
431
|
+
"""
|
|
432
|
+
from napt.logging import get_global_logger
|
|
433
|
+
|
|
434
|
+
logger = get_global_logger()
|
|
435
|
+
app = config["app"]
|
|
436
|
+
|
|
437
|
+
# Get installed_check configuration (merged defaults + app-specific)
|
|
438
|
+
defaults_installed_check = (
|
|
439
|
+
config.get("defaults", {}).get("win32", {}).get("installed_check", {})
|
|
440
|
+
)
|
|
441
|
+
app_installed_check = app.get("win32", {}).get("installed_check", {})
|
|
442
|
+
# Merge: app overrides defaults (shallow merge at top level)
|
|
443
|
+
installed_check_config = {**defaults_installed_check, **app_installed_check}
|
|
444
|
+
|
|
445
|
+
# Merge nested detection config separately
|
|
446
|
+
defaults_detection_nested = defaults_installed_check.get("detection", {})
|
|
447
|
+
app_detection_nested = app_installed_check.get("detection", {})
|
|
448
|
+
detection_nested_config = {**defaults_detection_nested, **app_detection_nested}
|
|
449
|
+
|
|
450
|
+
# Determine AppName for detection
|
|
451
|
+
app_name_for_detection = None
|
|
452
|
+
installer_ext = installer_file.suffix.lower()
|
|
453
|
+
override_msi_display_name = installed_check_config.get(
|
|
454
|
+
"override_msi_display_name", False
|
|
455
|
+
)
|
|
456
|
+
|
|
457
|
+
# Check if display_name is set for MSI without override flag
|
|
458
|
+
if installer_ext == ".msi" and installed_check_config.get("display_name"):
|
|
459
|
+
if not override_msi_display_name:
|
|
460
|
+
logger.warning(
|
|
461
|
+
"DETECTION",
|
|
462
|
+
"win32.installed_check.display_name is set but will be ignored for "
|
|
463
|
+
"MSI installers. MSI ProductName is used as the authoritative source "
|
|
464
|
+
"for registry DisplayName. Set override_msi_display_name: true to use "
|
|
465
|
+
"display_name instead.",
|
|
466
|
+
)
|
|
467
|
+
|
|
468
|
+
# Check if architecture is set for MSI installers (not allowed)
|
|
469
|
+
if installer_ext == ".msi" and installed_check_config.get("architecture"):
|
|
470
|
+
logger.warning(
|
|
471
|
+
"DETECTION",
|
|
472
|
+
"win32.installed_check.architecture is set but will be ignored for MSI "
|
|
473
|
+
"installers. MSI Template is used as the authoritative source for "
|
|
474
|
+
"architecture.",
|
|
475
|
+
)
|
|
476
|
+
|
|
477
|
+
# Check if override_msi_display_name is set for non-MSI installers
|
|
478
|
+
if installer_ext != ".msi" and override_msi_display_name:
|
|
479
|
+
logger.warning(
|
|
480
|
+
"DETECTION",
|
|
481
|
+
"win32.installed_check.override_msi_display_name is set but will be "
|
|
482
|
+
"ignored for non-MSI installers. This flag only applies to MSI installers.",
|
|
483
|
+
)
|
|
484
|
+
|
|
485
|
+
# Determine architecture for detection
|
|
486
|
+
expected_architecture: str = "any" # Default for "any" mode
|
|
487
|
+
|
|
488
|
+
if installer_ext == ".msi":
|
|
489
|
+
if override_msi_display_name:
|
|
490
|
+
# MSI with override: Use display_name instead of ProductName
|
|
491
|
+
if not installed_check_config.get("display_name"):
|
|
492
|
+
raise ConfigError(
|
|
493
|
+
"win32.installed_check.override_msi_display_name is true but "
|
|
494
|
+
"display_name is not set. Set display_name when using "
|
|
495
|
+
"override_msi_display_name."
|
|
496
|
+
)
|
|
497
|
+
app_name_for_detection = installed_check_config["display_name"]
|
|
498
|
+
# Support ${discovered_version} template variable
|
|
499
|
+
if "${discovered_version}" in app_name_for_detection:
|
|
500
|
+
app_name_for_detection = app_name_for_detection.replace(
|
|
501
|
+
"${discovered_version}", version
|
|
502
|
+
)
|
|
503
|
+
logger.verbose(
|
|
504
|
+
"DETECTION",
|
|
505
|
+
f"Using display_name (override): {app_name_for_detection}",
|
|
506
|
+
)
|
|
507
|
+
else:
|
|
508
|
+
# MSI: Use ProductName (required, no fallback - authoritative source)
|
|
509
|
+
try:
|
|
510
|
+
msi_metadata = extract_msi_metadata(installer_file)
|
|
511
|
+
if not msi_metadata.product_name:
|
|
512
|
+
raise ConfigError(
|
|
513
|
+
"MSI ProductName property not found. Cannot generate detection "
|
|
514
|
+
"script. Ensure the MSI file is valid and contains ProductName "
|
|
515
|
+
"property."
|
|
516
|
+
)
|
|
517
|
+
app_name_for_detection = msi_metadata.product_name
|
|
518
|
+
logger.verbose(
|
|
519
|
+
"DETECTION",
|
|
520
|
+
f"Using MSI ProductName for detection: {app_name_for_detection}",
|
|
521
|
+
)
|
|
522
|
+
except ConfigError:
|
|
523
|
+
# Re-raise ConfigError as-is (e.g., ProductName not found)
|
|
524
|
+
raise
|
|
525
|
+
except Exception as err:
|
|
526
|
+
# Wrap other exceptions (extraction failures) as ConfigError
|
|
527
|
+
raise ConfigError(
|
|
528
|
+
f"Failed to extract MSI ProductName. Cannot generate detection "
|
|
529
|
+
f"script. Error: {err}"
|
|
530
|
+
) from err
|
|
531
|
+
|
|
532
|
+
# Auto-detect architecture from MSI Template (always, even with override)
|
|
533
|
+
try:
|
|
534
|
+
expected_architecture = extract_msi_architecture(installer_file)
|
|
535
|
+
logger.verbose(
|
|
536
|
+
"DETECTION",
|
|
537
|
+
f"Auto-detected MSI architecture: {expected_architecture}",
|
|
538
|
+
)
|
|
539
|
+
except ConfigError:
|
|
540
|
+
# Re-raise ConfigError as-is (e.g., unsupported platform)
|
|
541
|
+
raise
|
|
542
|
+
except Exception as err:
|
|
543
|
+
# Wrap other exceptions (extraction failures) as ConfigError
|
|
544
|
+
raise ConfigError(
|
|
545
|
+
f"Failed to extract MSI architecture. Cannot generate detection script. "
|
|
546
|
+
f"Error: {err}"
|
|
547
|
+
) from err
|
|
548
|
+
elif installed_check_config.get("display_name"):
|
|
549
|
+
# Non-MSI: Use explicit display_name (required)
|
|
550
|
+
# Support ${discovered_version} template variable
|
|
551
|
+
app_name_for_detection = installed_check_config["display_name"]
|
|
552
|
+
if "${discovered_version}" in app_name_for_detection:
|
|
553
|
+
app_name_for_detection = app_name_for_detection.replace(
|
|
554
|
+
"${discovered_version}", version
|
|
555
|
+
)
|
|
556
|
+
logger.verbose(
|
|
557
|
+
"DETECTION",
|
|
558
|
+
f"Using win32.installed_check.display_name: {app_name_for_detection}",
|
|
559
|
+
)
|
|
560
|
+
|
|
561
|
+
# Non-MSI: architecture is required
|
|
562
|
+
if installed_check_config.get("architecture"):
|
|
563
|
+
expected_architecture = installed_check_config["architecture"]
|
|
564
|
+
logger.verbose(
|
|
565
|
+
"DETECTION",
|
|
566
|
+
f"Using win32.installed_check.architecture: {expected_architecture}",
|
|
567
|
+
)
|
|
568
|
+
else:
|
|
569
|
+
raise ConfigError(
|
|
570
|
+
"win32.installed_check.architecture is required for non-MSI installers. "
|
|
571
|
+
"Set app.win32.installed_check.architecture in recipe configuration. "
|
|
572
|
+
"Allowed values: x86, x64, arm64, any"
|
|
573
|
+
)
|
|
574
|
+
else:
|
|
575
|
+
# Non-MSI: display_name required
|
|
576
|
+
raise ConfigError(
|
|
577
|
+
"win32.installed_check.display_name is required for non-MSI installers. "
|
|
578
|
+
"Set app.win32.installed_check.display_name in recipe configuration."
|
|
579
|
+
)
|
|
580
|
+
|
|
581
|
+
# Determine if wildcard matching is needed
|
|
582
|
+
use_wildcard = "*" in app_name_for_detection or "?" in app_name_for_detection
|
|
583
|
+
|
|
584
|
+
# Create DetectionConfig from merged configuration
|
|
585
|
+
detection_config_obj = DetectionConfig(
|
|
586
|
+
app_name=app_name_for_detection,
|
|
587
|
+
version=version,
|
|
588
|
+
log_format=installed_check_config.get("log_format", "cmtrace"),
|
|
589
|
+
log_level=installed_check_config.get("log_level", "INFO"),
|
|
590
|
+
log_rotation_mb=installed_check_config.get("log_rotation_mb", 3),
|
|
591
|
+
exact_match=detection_nested_config.get("exact_match", False),
|
|
592
|
+
app_id=app_id,
|
|
593
|
+
is_msi_installer=(installer_ext == ".msi"),
|
|
594
|
+
expected_architecture=expected_architecture,
|
|
595
|
+
use_wildcard=use_wildcard,
|
|
596
|
+
)
|
|
597
|
+
|
|
598
|
+
# Sanitize AppName for filename
|
|
599
|
+
sanitized_app_name = sanitize_filename(app_name_for_detection, app_id)
|
|
600
|
+
sanitized_version = version.replace(
|
|
601
|
+
" ", "-"
|
|
602
|
+
) # Versions shouldn't have spaces, but be safe
|
|
603
|
+
|
|
604
|
+
# Build detection script filename: {AppName}_{Version}-Detection.ps1
|
|
605
|
+
detection_filename = f"{sanitized_app_name}_{sanitized_version}-Detection.ps1"
|
|
606
|
+
|
|
607
|
+
# Save as sibling to packagefiles/ (build_dir.parent is the version directory)
|
|
608
|
+
detection_script_path = build_dir.parent / detection_filename
|
|
609
|
+
|
|
610
|
+
logger.verbose("DETECTION", f"Generating detection script: {detection_filename}")
|
|
611
|
+
logger.verbose("DETECTION", f"AppName: {app_name_for_detection}")
|
|
612
|
+
logger.verbose("DETECTION", f"Version: {version}")
|
|
613
|
+
|
|
614
|
+
# Generate the script
|
|
615
|
+
generate_detection_script(detection_config_obj, detection_script_path)
|
|
616
|
+
|
|
617
|
+
return detection_script_path
|
|
618
|
+
|
|
619
|
+
|
|
620
|
+
def _generate_requirements_script(
|
|
621
|
+
installer_file: Path,
|
|
622
|
+
config: dict[str, Any],
|
|
623
|
+
version: str,
|
|
624
|
+
app_id: str,
|
|
625
|
+
build_dir: Path,
|
|
626
|
+
) -> Path:
|
|
627
|
+
"""Generate requirements script for Intune Win32 app (Update entry).
|
|
628
|
+
|
|
629
|
+
Extracts metadata from installer (MSI ProductName for MSIs,
|
|
630
|
+
win32.installed_check.display_name for non-MSI installers), generates
|
|
631
|
+
PowerShell requirements script, and saves it as a sibling to the
|
|
632
|
+
packagefiles directory.
|
|
633
|
+
|
|
634
|
+
The requirements script determines if an older version is installed,
|
|
635
|
+
making the Update entry applicable only to devices that need updating.
|
|
636
|
+
|
|
637
|
+
Args:
|
|
638
|
+
installer_file: Path to the installer file.
|
|
639
|
+
config: Recipe configuration.
|
|
640
|
+
version: Extracted version string (target version).
|
|
641
|
+
app_id: Application ID.
|
|
642
|
+
build_dir: Build directory (packagefiles subdirectory).
|
|
643
|
+
|
|
644
|
+
Returns:
|
|
645
|
+
Path to the generated requirements script.
|
|
646
|
+
|
|
647
|
+
Raises:
|
|
648
|
+
PackagingError: If requirements script generation fails.
|
|
649
|
+
ConfigError: If required configuration is missing
|
|
650
|
+
(win32.installed_check.display_name required for non-MSI installers).
|
|
651
|
+
"""
|
|
652
|
+
from napt.logging import get_global_logger
|
|
653
|
+
|
|
654
|
+
logger = get_global_logger()
|
|
655
|
+
app = config["app"]
|
|
656
|
+
|
|
657
|
+
# Get installed_check configuration (merged defaults + app-specific)
|
|
658
|
+
defaults_installed_check = (
|
|
659
|
+
config.get("defaults", {}).get("win32", {}).get("installed_check", {})
|
|
660
|
+
)
|
|
661
|
+
app_installed_check = app.get("win32", {}).get("installed_check", {})
|
|
662
|
+
installed_check_config = {**defaults_installed_check, **app_installed_check}
|
|
663
|
+
|
|
664
|
+
# Determine AppName for requirements
|
|
665
|
+
app_name_for_requirements = None
|
|
666
|
+
installer_ext = installer_file.suffix.lower()
|
|
667
|
+
override_msi_display_name = installed_check_config.get(
|
|
668
|
+
"override_msi_display_name", False
|
|
669
|
+
)
|
|
670
|
+
|
|
671
|
+
# Determine architecture for requirements
|
|
672
|
+
expected_architecture: str = "any" # Default for "any" mode
|
|
673
|
+
|
|
674
|
+
if installer_ext == ".msi":
|
|
675
|
+
if override_msi_display_name:
|
|
676
|
+
# MSI with override: Use display_name instead of ProductName
|
|
677
|
+
if not installed_check_config.get("display_name"):
|
|
678
|
+
raise ConfigError(
|
|
679
|
+
"win32.installed_check.override_msi_display_name is true but "
|
|
680
|
+
"display_name is not set. Set display_name when using "
|
|
681
|
+
"override_msi_display_name."
|
|
682
|
+
)
|
|
683
|
+
app_name_for_requirements = installed_check_config["display_name"]
|
|
684
|
+
# Support ${discovered_version} template variable
|
|
685
|
+
if "${discovered_version}" in app_name_for_requirements:
|
|
686
|
+
app_name_for_requirements = app_name_for_requirements.replace(
|
|
687
|
+
"${discovered_version}", version
|
|
688
|
+
)
|
|
689
|
+
logger.verbose(
|
|
690
|
+
"REQUIREMENTS",
|
|
691
|
+
f"Using display_name (override): {app_name_for_requirements}",
|
|
692
|
+
)
|
|
693
|
+
else:
|
|
694
|
+
# MSI: Use ProductName (required, no fallback - authoritative source)
|
|
695
|
+
try:
|
|
696
|
+
msi_metadata = extract_msi_metadata(installer_file)
|
|
697
|
+
if not msi_metadata.product_name:
|
|
698
|
+
raise ConfigError(
|
|
699
|
+
"MSI ProductName property not found. Cannot generate "
|
|
700
|
+
"requirements script. Ensure the MSI file is valid and "
|
|
701
|
+
"contains ProductName property."
|
|
702
|
+
)
|
|
703
|
+
app_name_for_requirements = msi_metadata.product_name
|
|
704
|
+
logger.verbose(
|
|
705
|
+
"REQUIREMENTS",
|
|
706
|
+
f"Using MSI ProductName for requirements: {app_name_for_requirements}",
|
|
707
|
+
)
|
|
708
|
+
except ConfigError:
|
|
709
|
+
raise
|
|
710
|
+
except Exception as err:
|
|
711
|
+
raise ConfigError(
|
|
712
|
+
f"Failed to extract MSI ProductName. Cannot generate requirements "
|
|
713
|
+
f"script. Error: {err}"
|
|
714
|
+
) from err
|
|
715
|
+
|
|
716
|
+
# Auto-detect architecture from MSI Template (always, even with override)
|
|
717
|
+
try:
|
|
718
|
+
expected_architecture = extract_msi_architecture(installer_file)
|
|
719
|
+
logger.verbose(
|
|
720
|
+
"REQUIREMENTS",
|
|
721
|
+
f"Auto-detected MSI architecture: {expected_architecture}",
|
|
722
|
+
)
|
|
723
|
+
except ConfigError:
|
|
724
|
+
# Re-raise ConfigError as-is (e.g., unsupported platform)
|
|
725
|
+
raise
|
|
726
|
+
except Exception as err:
|
|
727
|
+
# Wrap other exceptions (extraction failures) as ConfigError
|
|
728
|
+
raise ConfigError(
|
|
729
|
+
f"Failed to extract MSI architecture. Cannot generate requirements "
|
|
730
|
+
f"script. Error: {err}"
|
|
731
|
+
) from err
|
|
732
|
+
elif installed_check_config.get("display_name"):
|
|
733
|
+
# Non-MSI: Use explicit display_name (required)
|
|
734
|
+
# Support ${discovered_version} template variable
|
|
735
|
+
app_name_for_requirements = installed_check_config["display_name"]
|
|
736
|
+
if "${discovered_version}" in app_name_for_requirements:
|
|
737
|
+
app_name_for_requirements = app_name_for_requirements.replace(
|
|
738
|
+
"${discovered_version}", version
|
|
739
|
+
)
|
|
740
|
+
logger.verbose(
|
|
741
|
+
"REQUIREMENTS",
|
|
742
|
+
f"Using win32.installed_check.display_name: {app_name_for_requirements}",
|
|
743
|
+
)
|
|
744
|
+
|
|
745
|
+
# Non-MSI: architecture is required
|
|
746
|
+
if installed_check_config.get("architecture"):
|
|
747
|
+
expected_architecture = installed_check_config["architecture"]
|
|
748
|
+
logger.verbose(
|
|
749
|
+
"REQUIREMENTS",
|
|
750
|
+
f"Using win32.installed_check.architecture: {expected_architecture}",
|
|
751
|
+
)
|
|
752
|
+
else:
|
|
753
|
+
raise ConfigError(
|
|
754
|
+
"win32.installed_check.architecture is required for non-MSI installers. "
|
|
755
|
+
"Set app.win32.installed_check.architecture in recipe configuration. "
|
|
756
|
+
"Allowed values: x86, x64, arm64, any"
|
|
757
|
+
)
|
|
758
|
+
else:
|
|
759
|
+
# Non-MSI: display_name required
|
|
760
|
+
raise ConfigError(
|
|
761
|
+
"win32.installed_check.display_name is required for non-MSI installers. "
|
|
762
|
+
"Set app.win32.installed_check.display_name in recipe configuration."
|
|
763
|
+
)
|
|
764
|
+
|
|
765
|
+
# Determine if wildcard matching is needed
|
|
766
|
+
use_wildcard = "*" in app_name_for_requirements or "?" in app_name_for_requirements
|
|
767
|
+
|
|
768
|
+
# Create RequirementsConfig from merged configuration
|
|
769
|
+
requirements_config_obj = RequirementsConfig(
|
|
770
|
+
app_name=app_name_for_requirements,
|
|
771
|
+
version=version,
|
|
772
|
+
log_format=installed_check_config.get("log_format", "cmtrace"),
|
|
773
|
+
log_level=installed_check_config.get("log_level", "INFO"),
|
|
774
|
+
log_rotation_mb=installed_check_config.get("log_rotation_mb", 3),
|
|
775
|
+
app_id=app_id,
|
|
776
|
+
is_msi_installer=(installer_ext == ".msi"),
|
|
777
|
+
expected_architecture=expected_architecture,
|
|
778
|
+
use_wildcard=use_wildcard,
|
|
779
|
+
)
|
|
780
|
+
|
|
781
|
+
# Sanitize AppName for filename
|
|
782
|
+
sanitized_app_name = sanitize_filename(app_name_for_requirements, app_id)
|
|
783
|
+
sanitized_version = version.replace(" ", "-")
|
|
784
|
+
|
|
785
|
+
# Build requirements script filename: {AppName}_{Version}-Requirements.ps1
|
|
786
|
+
requirements_filename = f"{sanitized_app_name}_{sanitized_version}-Requirements.ps1"
|
|
787
|
+
|
|
788
|
+
# Save as sibling to packagefiles/ (build_dir.parent is the version directory)
|
|
789
|
+
requirements_script_path = build_dir.parent / requirements_filename
|
|
790
|
+
|
|
791
|
+
logger.verbose(
|
|
792
|
+
"REQUIREMENTS", f"Generating requirements script: {requirements_filename}"
|
|
793
|
+
)
|
|
794
|
+
logger.verbose("REQUIREMENTS", f"AppName: {app_name_for_requirements}")
|
|
795
|
+
logger.verbose("REQUIREMENTS", f"Version: {version}")
|
|
796
|
+
|
|
797
|
+
# Generate the script
|
|
798
|
+
generate_requirements_script(requirements_config_obj, requirements_script_path)
|
|
799
|
+
|
|
800
|
+
return requirements_script_path
|
|
801
|
+
|
|
802
|
+
|
|
803
|
+
def _write_build_manifest(
|
|
804
|
+
build_dir: Path,
|
|
805
|
+
app_id: str,
|
|
806
|
+
app_name: str,
|
|
807
|
+
version: str,
|
|
808
|
+
build_types: str,
|
|
809
|
+
detection_script_path: Path | None,
|
|
810
|
+
requirements_script_path: Path | None,
|
|
811
|
+
) -> Path:
|
|
812
|
+
"""Write build manifest JSON to the build output directory.
|
|
813
|
+
|
|
814
|
+
The manifest provides metadata about what was built, enabling downstream
|
|
815
|
+
tools (like napt upload) to understand the build output without needing
|
|
816
|
+
to re-derive paths or configuration.
|
|
817
|
+
|
|
818
|
+
Args:
|
|
819
|
+
build_dir: Build directory (packagefiles subdirectory).
|
|
820
|
+
app_id: Application ID.
|
|
821
|
+
app_name: Application display name.
|
|
822
|
+
version: Application version.
|
|
823
|
+
build_types: The build_types setting used ("both", "app_only", "update_only").
|
|
824
|
+
detection_script_path: Path to detection script, or None if not generated.
|
|
825
|
+
requirements_script_path: Path to requirements script, or None if not generated.
|
|
826
|
+
|
|
827
|
+
Returns:
|
|
828
|
+
Path to the generated manifest file.
|
|
829
|
+
|
|
830
|
+
Raises:
|
|
831
|
+
OSError: If the manifest file cannot be written.
|
|
832
|
+
|
|
833
|
+
"""
|
|
834
|
+
from napt.logging import get_global_logger
|
|
835
|
+
|
|
836
|
+
logger = get_global_logger()
|
|
837
|
+
|
|
838
|
+
# Build manifest content
|
|
839
|
+
manifest = {
|
|
840
|
+
"app_id": app_id,
|
|
841
|
+
"app_name": app_name,
|
|
842
|
+
"version": version,
|
|
843
|
+
"win32_build_types": build_types,
|
|
844
|
+
}
|
|
845
|
+
|
|
846
|
+
# Add script paths (relative to version directory for portability)
|
|
847
|
+
version_dir = build_dir.parent
|
|
848
|
+
if detection_script_path:
|
|
849
|
+
manifest["detection_script_path"] = detection_script_path.name
|
|
850
|
+
if requirements_script_path:
|
|
851
|
+
manifest["requirements_script_path"] = requirements_script_path.name
|
|
852
|
+
|
|
853
|
+
# Write manifest to version directory (sibling to packagefiles/)
|
|
854
|
+
manifest_path = version_dir / "build-manifest.json"
|
|
855
|
+
|
|
856
|
+
try:
|
|
857
|
+
manifest_json = json.dumps(manifest, indent=2)
|
|
858
|
+
manifest_path.write_text(manifest_json, encoding="utf-8")
|
|
859
|
+
logger.verbose("BUILD", f"Build manifest written to: {manifest_path}")
|
|
860
|
+
except OSError as err:
|
|
861
|
+
raise OSError(
|
|
862
|
+
f"Failed to write build manifest to {manifest_path}: {err}"
|
|
863
|
+
) from err
|
|
864
|
+
|
|
865
|
+
return manifest_path
|
|
866
|
+
|
|
867
|
+
|
|
868
|
+
def build_package(
|
|
869
|
+
recipe_path: Path,
|
|
870
|
+
downloads_dir: Path | None = None,
|
|
871
|
+
output_dir: Path | None = None,
|
|
872
|
+
) -> BuildResult:
|
|
873
|
+
"""Build a PSADT package from a recipe and downloaded installer.
|
|
874
|
+
|
|
875
|
+
This is the main entry point for the build process. It:
|
|
876
|
+
|
|
877
|
+
1. Loads the recipe configuration
|
|
878
|
+
2. Finds the downloaded installer
|
|
879
|
+
3. Extracts version from installer (filesystem is truth)
|
|
880
|
+
4. Gets/downloads PSADT release
|
|
881
|
+
5. Creates build directory structure
|
|
882
|
+
6. Copies PSADT files (pristine)
|
|
883
|
+
7. Generates Invoke-AppDeployToolkit.ps1 from template
|
|
884
|
+
8. Copies installer to Files/
|
|
885
|
+
9. Applies custom branding
|
|
886
|
+
10. Generates detection script (always; used by App entry and by Update entry)
|
|
887
|
+
11. Generates requirements script (when build_types is "both" or "update_only")
|
|
888
|
+
|
|
889
|
+
Args:
|
|
890
|
+
recipe_path: Path to the recipe YAML file.
|
|
891
|
+
downloads_dir: Directory containing the downloaded
|
|
892
|
+
installer. Default: Path("downloads")
|
|
893
|
+
output_dir: Base directory for build output.
|
|
894
|
+
Default: From config or Path("builds")
|
|
895
|
+
|
|
896
|
+
Returns:
|
|
897
|
+
Build result containing app metadata, build paths, PSADT version, and
|
|
898
|
+
generated script paths.
|
|
899
|
+
|
|
900
|
+
Raises:
|
|
901
|
+
FileNotFoundError: If recipe or installer doesn't exist.
|
|
902
|
+
PackagingError: If build process fails or script generation fails
|
|
903
|
+
(when fail_on_error=true in win32.installed_check config).
|
|
904
|
+
ConfigError: If required configuration is missing.
|
|
905
|
+
|
|
906
|
+
Example:
|
|
907
|
+
Basic build:
|
|
908
|
+
```python
|
|
909
|
+
result = build_package(Path("recipes/Google/chrome.yaml"))
|
|
910
|
+
print(result.build_dir) # builds/napt-chrome/141.0.7390.123
|
|
911
|
+
print(result.build_types) # "both"
|
|
912
|
+
```
|
|
913
|
+
|
|
914
|
+
Custom output directory:
|
|
915
|
+
```python
|
|
916
|
+
result = build_package(
|
|
917
|
+
Path("recipes/Google/chrome.yaml"),
|
|
918
|
+
output_dir=Path("custom/builds")
|
|
919
|
+
)
|
|
920
|
+
```
|
|
921
|
+
|
|
922
|
+
Note:
|
|
923
|
+
Requires installer to be downloaded first (run 'napt discover').
|
|
924
|
+
Version extracted from installer file, not state cache.
|
|
925
|
+
Overwrites existing build directory if it exists.
|
|
926
|
+
PSADT files are copied pristine from cache.
|
|
927
|
+
Invoke-AppDeployToolkit.ps1 is generated (not copied).
|
|
928
|
+
Scripts are generated as siblings to the packagefiles directory
|
|
929
|
+
(not included in .intunewin package - must be uploaded separately to Intune).
|
|
930
|
+
Script generation can be configured as non-fatal via
|
|
931
|
+
win32.installed_check.fail_on_error setting in recipe configuration.
|
|
932
|
+
Detection script is always generated.
|
|
933
|
+
The build_types setting controls requirements script only: "both" (default)
|
|
934
|
+
generates detection and requirements, "app_only" generates only detection,
|
|
935
|
+
"update_only" generates detection and requirements.
|
|
936
|
+
"""
|
|
937
|
+
from napt.logging import get_global_logger
|
|
938
|
+
|
|
939
|
+
logger = get_global_logger()
|
|
940
|
+
# Load configuration
|
|
941
|
+
logger.step(1, 8, "Loading configuration...")
|
|
942
|
+
config = load_effective_config(recipe_path)
|
|
943
|
+
|
|
944
|
+
app = config["app"]
|
|
945
|
+
app_id = app.get("id", "unknown-app")
|
|
946
|
+
app_name = app.get("name", "Unknown App")
|
|
947
|
+
|
|
948
|
+
# Set defaults
|
|
949
|
+
if downloads_dir is None:
|
|
950
|
+
downloads_dir = Path("downloads")
|
|
951
|
+
|
|
952
|
+
if output_dir is None:
|
|
953
|
+
output_dir = Path(
|
|
954
|
+
config.get("defaults", {}).get("build", {}).get("output_dir", "builds")
|
|
955
|
+
)
|
|
956
|
+
|
|
957
|
+
# Find installer file
|
|
958
|
+
logger.step(2, 8, "Finding installer...")
|
|
959
|
+
state_file = Path("state/versions.json") # Default state file location
|
|
960
|
+
installer_file = _find_installer_file(downloads_dir, config, state_file)
|
|
961
|
+
|
|
962
|
+
# Extract version from installer or state (filesystem + state are truth)
|
|
963
|
+
logger.step(3, 8, "Determining version...")
|
|
964
|
+
version = _get_installer_version(installer_file, config, state_file)
|
|
965
|
+
|
|
966
|
+
logger.verbose("BUILD", f"Building {app_name} v{version}")
|
|
967
|
+
|
|
968
|
+
# Get PSADT release
|
|
969
|
+
logger.step(4, 8, "Getting PSADT release...")
|
|
970
|
+
psadt_config = config.get("defaults", {}).get("psadt", {})
|
|
971
|
+
release_spec = psadt_config.get("release", "latest")
|
|
972
|
+
cache_dir = Path(psadt_config.get("cache_dir", "cache/psadt"))
|
|
973
|
+
|
|
974
|
+
psadt_cache_dir = get_psadt_release(release_spec, cache_dir)
|
|
975
|
+
psadt_version = psadt_cache_dir.name # Directory name is the version
|
|
976
|
+
|
|
977
|
+
logger.verbose("BUILD", f"Using PSADT {psadt_version}")
|
|
978
|
+
|
|
979
|
+
# Create build directory
|
|
980
|
+
logger.step(5, 8, "Creating build structure...")
|
|
981
|
+
build_dir = _create_build_directory(output_dir, app_id, version)
|
|
982
|
+
|
|
983
|
+
# Copy PSADT files (pristine)
|
|
984
|
+
_copy_psadt_pristine(psadt_cache_dir, build_dir)
|
|
985
|
+
|
|
986
|
+
# Generate Invoke-AppDeployToolkit.ps1
|
|
987
|
+
from .template import generate_invoke_script
|
|
988
|
+
|
|
989
|
+
template_path = psadt_cache_dir / "Invoke-AppDeployToolkit.ps1"
|
|
990
|
+
invoke_script = generate_invoke_script(
|
|
991
|
+
template_path, config, version, psadt_version
|
|
992
|
+
)
|
|
993
|
+
|
|
994
|
+
# Write generated script
|
|
995
|
+
script_dest = build_dir / "Invoke-AppDeployToolkit.ps1"
|
|
996
|
+
script_dest.write_text(invoke_script, encoding="utf-8")
|
|
997
|
+
logger.verbose("BUILD", "[OK] Generated Invoke-AppDeployToolkit.ps1")
|
|
998
|
+
|
|
999
|
+
# Copy installer
|
|
1000
|
+
_copy_installer(installer_file, build_dir)
|
|
1001
|
+
|
|
1002
|
+
# Apply branding
|
|
1003
|
+
logger.step(6, 8, "Applying branding...")
|
|
1004
|
+
_apply_branding(config, build_dir)
|
|
1005
|
+
|
|
1006
|
+
# Get build_types configuration
|
|
1007
|
+
defaults_win32 = config.get("defaults", {}).get("win32", {})
|
|
1008
|
+
app_win32 = app.get("win32", {})
|
|
1009
|
+
build_types = app_win32.get(
|
|
1010
|
+
"build_types", defaults_win32.get("build_types", "both")
|
|
1011
|
+
)
|
|
1012
|
+
|
|
1013
|
+
# Get fail_on_error from win32.installed_check config
|
|
1014
|
+
defaults_ic = defaults_win32.get("installed_check", {})
|
|
1015
|
+
app_ic = app_win32.get("installed_check", {})
|
|
1016
|
+
fail_on_error = app_ic.get("fail_on_error", defaults_ic.get("fail_on_error", True))
|
|
1017
|
+
|
|
1018
|
+
detection_script_path = None
|
|
1019
|
+
requirements_script_path = None
|
|
1020
|
+
|
|
1021
|
+
# Generate detection script (always; needed for App and Update entries)
|
|
1022
|
+
logger.step(7, 8, "Generating detection script...")
|
|
1023
|
+
try:
|
|
1024
|
+
detection_script_path = _generate_detection_script(
|
|
1025
|
+
installer_file, config, version, app_id, build_dir
|
|
1026
|
+
)
|
|
1027
|
+
logger.verbose("BUILD", "[OK] Detection script generated")
|
|
1028
|
+
except Exception as err:
|
|
1029
|
+
if fail_on_error:
|
|
1030
|
+
raise PackagingError(
|
|
1031
|
+
f"Detection script generation failed (fail_on_error=true): {err}"
|
|
1032
|
+
) from err
|
|
1033
|
+
else:
|
|
1034
|
+
logger.warning(
|
|
1035
|
+
"BUILD",
|
|
1036
|
+
f"Detection script generation failed (non-fatal): {err}",
|
|
1037
|
+
)
|
|
1038
|
+
logger.verbose("BUILD", "Continuing build without detection script...")
|
|
1039
|
+
|
|
1040
|
+
# Generate requirements script (for "both" or "update_only")
|
|
1041
|
+
if build_types in ("both", "update_only"):
|
|
1042
|
+
logger.step(8, 8, "Generating requirements script...")
|
|
1043
|
+
try:
|
|
1044
|
+
requirements_script_path = _generate_requirements_script(
|
|
1045
|
+
installer_file, config, version, app_id, build_dir
|
|
1046
|
+
)
|
|
1047
|
+
logger.verbose("BUILD", "[OK] Requirements script generated")
|
|
1048
|
+
except Exception as err:
|
|
1049
|
+
if fail_on_error:
|
|
1050
|
+
raise PackagingError(
|
|
1051
|
+
f"Requirements script generation failed (fail_on_error=true): {err}"
|
|
1052
|
+
) from err
|
|
1053
|
+
else:
|
|
1054
|
+
logger.warning(
|
|
1055
|
+
"BUILD",
|
|
1056
|
+
f"Requirements script generation failed (non-fatal): {err}",
|
|
1057
|
+
)
|
|
1058
|
+
logger.verbose(
|
|
1059
|
+
"BUILD", "Continuing build without requirements script..."
|
|
1060
|
+
)
|
|
1061
|
+
else:
|
|
1062
|
+
logger.step(8, 8, "Skipping requirements script (build_types=app_only)...")
|
|
1063
|
+
|
|
1064
|
+
# Write build manifest
|
|
1065
|
+
_write_build_manifest(
|
|
1066
|
+
build_dir=build_dir,
|
|
1067
|
+
app_id=app_id,
|
|
1068
|
+
app_name=app_name,
|
|
1069
|
+
version=version,
|
|
1070
|
+
build_types=build_types,
|
|
1071
|
+
detection_script_path=detection_script_path,
|
|
1072
|
+
requirements_script_path=requirements_script_path,
|
|
1073
|
+
)
|
|
1074
|
+
|
|
1075
|
+
logger.verbose("BUILD", f"[OK] Build complete: {build_dir}")
|
|
1076
|
+
|
|
1077
|
+
return BuildResult(
|
|
1078
|
+
app_id=app_id,
|
|
1079
|
+
app_name=app_name,
|
|
1080
|
+
version=version,
|
|
1081
|
+
build_dir=build_dir,
|
|
1082
|
+
psadt_version=psadt_version,
|
|
1083
|
+
status="success",
|
|
1084
|
+
build_types=build_types,
|
|
1085
|
+
detection_script_path=detection_script_path,
|
|
1086
|
+
requirements_script_path=requirements_script_path,
|
|
1087
|
+
)
|