autopkg-wrapper 2024.2.4__py3-none-any.whl → 2026.2.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- autopkg_wrapper/__init__.py +0 -1
- autopkg_wrapper/autopkg_wrapper.py +254 -157
- autopkg_wrapper/models/recipe.py +139 -0
- autopkg_wrapper/notifier/__init__.py +0 -1
- autopkg_wrapper/notifier/slack.py +2 -2
- autopkg_wrapper/utils/__init__.py +0 -1
- autopkg_wrapper/utils/args.py +151 -15
- autopkg_wrapper/utils/git_functions.py +73 -7
- autopkg_wrapper/utils/recipe_batching.py +41 -0
- autopkg_wrapper/utils/recipe_ordering.py +149 -0
- autopkg_wrapper/utils/report_processor.py +674 -0
- autopkg_wrapper-2026.2.6.dist-info/METADATA +107 -0
- autopkg_wrapper-2026.2.6.dist-info/RECORD +17 -0
- {autopkg_wrapper-2024.2.4.dist-info → autopkg_wrapper-2026.2.6.dist-info}/WHEEL +1 -1
- autopkg_wrapper-2026.2.6.dist-info/entry_points.txt +2 -0
- autopkg_wrapper-2024.2.4.dist-info/METADATA +0 -57
- autopkg_wrapper-2024.2.4.dist-info/RECORD +0 -13
- autopkg_wrapper-2024.2.4.dist-info/entry_points.txt +0 -4
- {autopkg_wrapper-2024.2.4.dist-info → autopkg_wrapper-2026.2.6.dist-info/licenses}/LICENSE +0 -0
autopkg_wrapper/__init__.py
CHANGED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = "2024.2.4"
|
|
@@ -1,127 +1,49 @@
|
|
|
1
1
|
#!/usr/bin/env python3
|
|
2
|
+
import json
|
|
2
3
|
import logging
|
|
3
4
|
import plistlib
|
|
4
|
-
import subprocess
|
|
5
5
|
import sys
|
|
6
|
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
6
7
|
from pathlib import Path
|
|
7
8
|
|
|
8
9
|
import autopkg_wrapper.utils.git_functions as git
|
|
10
|
+
from autopkg_wrapper.models.recipe import Recipe
|
|
9
11
|
from autopkg_wrapper.notifier import slack
|
|
10
12
|
from autopkg_wrapper.utils.args import setup_args
|
|
11
13
|
from autopkg_wrapper.utils.logging import setup_logger
|
|
14
|
+
from autopkg_wrapper.utils.recipe_batching import build_recipe_batches, recipe_type_for
|
|
15
|
+
from autopkg_wrapper.utils.recipe_ordering import order_recipe_list
|
|
16
|
+
from autopkg_wrapper.utils.report_processor import process_reports
|
|
12
17
|
|
|
13
18
|
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
self.error = False
|
|
18
|
-
self.results = {}
|
|
19
|
-
self.updated = False
|
|
20
|
-
self.verified = None
|
|
21
|
-
self.pr_url = None
|
|
22
|
-
|
|
23
|
-
self._keys = None
|
|
24
|
-
self._has_run = False
|
|
25
|
-
|
|
26
|
-
@property
|
|
27
|
-
def name(self):
|
|
28
|
-
name = self.filename.split(".")[0]
|
|
19
|
+
def get_override_repo_info(args):
|
|
20
|
+
if args.overrides_repo_path:
|
|
21
|
+
recipe_override_dirs = args.overrides_repo_path
|
|
29
22
|
|
|
30
|
-
|
|
23
|
+
else:
|
|
24
|
+
logging.debug("Trying to determine overrides dir from default paths")
|
|
31
25
|
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
cmd = " ".join(cmd)
|
|
35
|
-
logging.debug(f"cmd: {str(cmd)}")
|
|
26
|
+
if args.autopkg_prefs:
|
|
27
|
+
autopkg_prefs_path = Path(args.autopkg_prefs).resolve()
|
|
36
28
|
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
if p_status == 0:
|
|
43
|
-
self.verified = True
|
|
29
|
+
if autopkg_prefs_path.suffix == ".json":
|
|
30
|
+
with open(autopkg_prefs_path, "r") as f:
|
|
31
|
+
autopkg_prefs = json.load(f)
|
|
32
|
+
elif autopkg_prefs_path.suffix == ".plist":
|
|
33
|
+
autopkg_prefs = plistlib.loads(autopkg_prefs_path.read_bytes())
|
|
44
34
|
else:
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
return self.verified
|
|
49
|
-
|
|
50
|
-
def update_trust_info(self):
|
|
51
|
-
cmd = ["/usr/local/bin/autopkg", "update-trust-info", self.filename]
|
|
52
|
-
cmd = " ".join(cmd)
|
|
53
|
-
logging.debug(f"cmd: {str(cmd)}")
|
|
54
|
-
|
|
55
|
-
# Fail loudly if this exits 0
|
|
56
|
-
try:
|
|
57
|
-
subprocess.check_call(cmd, shell=True)
|
|
58
|
-
except subprocess.CalledProcessError as e:
|
|
59
|
-
logging.error(e.stderr)
|
|
60
|
-
raise e
|
|
61
|
-
|
|
62
|
-
def _parse_report(self, report):
|
|
63
|
-
with open(report, "rb") as f:
|
|
64
|
-
report_data = plistlib.load(f)
|
|
65
|
-
|
|
66
|
-
failed_items = report_data.get("failures", [])
|
|
67
|
-
imported_items = []
|
|
68
|
-
if report_data["summary_results"]:
|
|
69
|
-
# This means something happened
|
|
70
|
-
munki_results = report_data["summary_results"].get(
|
|
71
|
-
"munki_importer_summary_result", {}
|
|
35
|
+
user_home = Path.home()
|
|
36
|
+
autopkg_prefs_path = (
|
|
37
|
+
user_home / "Library/Preferences/com.github.autopkg.plist"
|
|
72
38
|
)
|
|
73
|
-
imported_items.extend(munki_results.get("data_rows", []))
|
|
74
|
-
|
|
75
|
-
return {"imported": imported_items, "failed": failed_items}
|
|
76
|
-
|
|
77
|
-
def run(self):
|
|
78
|
-
if self.verified is False:
|
|
79
|
-
self.error = True
|
|
80
|
-
self.results["failed"] = True
|
|
81
|
-
self.results["imported"] = ""
|
|
82
|
-
else:
|
|
83
|
-
report = Path("/tmp/autopkg.plist")
|
|
84
|
-
report.touch(exist_ok=True)
|
|
85
|
-
|
|
86
|
-
try:
|
|
87
|
-
cmd = [
|
|
88
|
-
"/usr/local/bin/autopkg",
|
|
89
|
-
"run",
|
|
90
|
-
self.filename,
|
|
91
|
-
"-vv",
|
|
92
|
-
"--post",
|
|
93
|
-
"io.github.hjuutilainen.VirusTotalAnalyzer/VirusTotalAnalyzer",
|
|
94
|
-
"--report-plist",
|
|
95
|
-
str(report),
|
|
96
|
-
]
|
|
97
|
-
cmd = " ".join(cmd)
|
|
98
|
-
logging.debug(f"cmd: {str(cmd)}")
|
|
99
|
-
|
|
100
|
-
subprocess.check_call(cmd, shell=True)
|
|
101
|
-
|
|
102
|
-
except subprocess.CalledProcessError:
|
|
103
|
-
self.error = True
|
|
104
|
-
|
|
105
|
-
self._has_run = True
|
|
106
|
-
self.results = self._parse_report(report)
|
|
107
|
-
if not self.results["failed"] and not self.error:
|
|
108
|
-
self.updated = True
|
|
109
|
-
|
|
110
|
-
return self.results
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
def get_override_repo_info(args):
|
|
114
|
-
if args.autopkg_overrides_repo_path:
|
|
115
|
-
recipe_override_dirs = args.autopkg_overrides_repo_path
|
|
116
|
-
|
|
117
|
-
else:
|
|
118
|
-
logging.debug("Trying to determine overrides dir from default paths")
|
|
119
|
-
user_home = Path.home()
|
|
120
|
-
autopkg_prefs_path = user_home / "Library/Preferences/com.github.autopkg.plist"
|
|
121
39
|
|
|
122
|
-
|
|
123
|
-
|
|
40
|
+
if autopkg_prefs_path.is_file():
|
|
41
|
+
autopkg_prefs = plistlib.loads(
|
|
42
|
+
autopkg_prefs_path.resolve().read_bytes()
|
|
43
|
+
)
|
|
124
44
|
|
|
45
|
+
logging.debug(f"autopkg prefs path: {autopkg_prefs_path}")
|
|
46
|
+
logging.debug(f"autopkg prefs: {autopkg_prefs}")
|
|
125
47
|
recipe_override_dirs = Path(autopkg_prefs["RECIPE_OVERRIDE_DIRS"]).resolve()
|
|
126
48
|
|
|
127
49
|
if Path(recipe_override_dirs / ".git").is_dir():
|
|
@@ -132,8 +54,10 @@ def get_override_repo_info(args):
|
|
|
132
54
|
logging.debug(f"Override Repo Path: {override_repo_path}")
|
|
133
55
|
|
|
134
56
|
override_repo_git_work_tree = f"--work-tree={override_repo_path}"
|
|
135
|
-
override_repo_git_git_dir = f"--git-dir={override_repo_path /
|
|
136
|
-
override_repo_url, override_repo_remote_ref = git.get_repo_info(
|
|
57
|
+
override_repo_git_git_dir = f"--git-dir={override_repo_path / '.git'}"
|
|
58
|
+
override_repo_url, override_repo_remote_ref = git.get_repo_info(
|
|
59
|
+
override_repo_git_git_dir
|
|
60
|
+
)
|
|
137
61
|
|
|
138
62
|
git_info = {
|
|
139
63
|
"override_repo_path": override_repo_path,
|
|
@@ -150,62 +74,153 @@ def get_override_repo_info(args):
|
|
|
150
74
|
return git_info
|
|
151
75
|
|
|
152
76
|
|
|
153
|
-
def update_recipe_repo(recipe, git_info):
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
77
|
+
def update_recipe_repo(recipe, git_info, disable_recipe_trust_check, args):
|
|
78
|
+
logging.debug(f"recipe.verified: {recipe.verified}")
|
|
79
|
+
logging.debug(f"disable_recipe_trust_check: {disable_recipe_trust_check}")
|
|
80
|
+
|
|
81
|
+
match recipe.verified:
|
|
82
|
+
case True:
|
|
83
|
+
logging.debug("Not updating repo as recipe has been verified")
|
|
84
|
+
return
|
|
85
|
+
case False | None if disable_recipe_trust_check:
|
|
86
|
+
logging.debug("Not updating repo as recipe verification has been disabled")
|
|
87
|
+
return
|
|
88
|
+
case False:
|
|
89
|
+
logging.debug("Updating repo as recipe verification failed")
|
|
90
|
+
current_branch = git.get_current_branch(git_info)
|
|
91
|
+
|
|
92
|
+
if args.disable_git_commands:
|
|
93
|
+
logging.info(
|
|
94
|
+
"Not runing git commands as --disable-git-commands has been set"
|
|
95
|
+
)
|
|
96
|
+
return
|
|
97
|
+
|
|
98
|
+
if current_branch != git_info["override_trust_branch"]:
|
|
99
|
+
logging.debug(
|
|
100
|
+
f"override_trust_branch: {git_info['override_trust_branch']}"
|
|
101
|
+
)
|
|
102
|
+
git.create_branch(git_info)
|
|
103
|
+
|
|
104
|
+
git.stage_recipe(git_info)
|
|
105
|
+
git.commit_recipe(
|
|
106
|
+
git_info, message=f"Updating Trust Info for {recipe.name}"
|
|
107
|
+
)
|
|
108
|
+
git.pull_branch(git_info)
|
|
109
|
+
git.push_branch(git_info)
|
|
158
110
|
|
|
159
|
-
|
|
160
|
-
git.create_branch(git_info)
|
|
111
|
+
return
|
|
161
112
|
|
|
162
|
-
if recipe.verified is False:
|
|
163
|
-
git.stage_recipe(git_info)
|
|
164
|
-
git.commit_recipe(git_info, message=f"Updating Trust Info for {recipe.name}")
|
|
165
|
-
git.pull_branch(git_info)
|
|
166
|
-
git.push_branch(git_info)
|
|
167
113
|
|
|
114
|
+
def parse_recipe_list(recipes, recipe_file, post_processors, args):
|
|
115
|
+
"""Parse recipe inputs into a common list of recipe names.
|
|
168
116
|
|
|
169
|
-
|
|
170
|
-
|
|
117
|
+
The arguments assume that `recipes` and `recipe_file` are mutually exclusive.
|
|
118
|
+
If `args.recipe_processing_order` is provided, the list is re-ordered before
|
|
119
|
+
creating `Recipe` objects.
|
|
120
|
+
"""
|
|
171
121
|
recipe_list = None
|
|
172
122
|
|
|
173
|
-
logging.
|
|
174
|
-
logging.
|
|
175
|
-
|
|
176
|
-
if
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
123
|
+
logging.info(f"Recipes: {recipes}") if recipes else None
|
|
124
|
+
logging.info(f"Recipe List: {recipe_file}") if recipe_file else None
|
|
125
|
+
|
|
126
|
+
if recipe_file:
|
|
127
|
+
if recipe_file.suffix == ".json":
|
|
128
|
+
with open(recipe_file, "r") as f:
|
|
129
|
+
recipe_list = json.load(f)
|
|
130
|
+
elif recipe_file.suffix in {".yaml", ".yml"}:
|
|
131
|
+
from ruamel.yaml import YAML
|
|
132
|
+
|
|
133
|
+
yaml = YAML(typ="safe")
|
|
134
|
+
with open(recipe_file, "r", encoding="utf-8") as f:
|
|
135
|
+
recipe_list = yaml.load(f)
|
|
136
|
+
elif recipe_file.suffix == ".txt":
|
|
137
|
+
with open(recipe_file, "r") as f:
|
|
138
|
+
recipe_list = f.read().splitlines()
|
|
139
|
+
if recipes:
|
|
140
|
+
if isinstance(recipes, list):
|
|
141
|
+
recipe_list = recipes
|
|
142
|
+
elif isinstance(recipes, str):
|
|
143
|
+
if recipes.find(",") != -1:
|
|
144
|
+
# Assuming recipes separated by commas
|
|
145
|
+
recipe_list = [
|
|
146
|
+
recipe.strip() for recipe in recipes.split(",") if recipe
|
|
147
|
+
]
|
|
148
|
+
else:
|
|
149
|
+
# Assuming recipes separated by space
|
|
150
|
+
recipe_list = [
|
|
151
|
+
recipe.strip() for recipe in recipes.split(" ") if recipe
|
|
152
|
+
]
|
|
185
153
|
|
|
186
154
|
if recipe_list is None:
|
|
187
155
|
logging.error(
|
|
188
|
-
"""Please provide
|
|
189
|
-
--recipes
|
|
190
|
-
--recipe-
|
|
191
|
-
Comma separated list in the
|
|
156
|
+
"""Please provide recipes to run via the following methods:
|
|
157
|
+
--recipes recipe_one.download recipe_two.download
|
|
158
|
+
--recipe-file path/to/recipe_list.json
|
|
159
|
+
Comma separated list in the AW_RECIPES env variable"""
|
|
192
160
|
)
|
|
193
161
|
sys.exit(1)
|
|
194
162
|
|
|
195
|
-
|
|
163
|
+
if args.recipe_processing_order:
|
|
164
|
+
recipe_list = order_recipe_list(
|
|
165
|
+
recipe_list=recipe_list, order=args.recipe_processing_order
|
|
166
|
+
)
|
|
196
167
|
|
|
197
|
-
|
|
168
|
+
logging.info(f"Processing {len(recipe_list)} recipes.")
|
|
169
|
+
recipe_map = [Recipe(name, post_processors=post_processors) for name in recipe_list]
|
|
198
170
|
|
|
171
|
+
return recipe_map
|
|
199
172
|
|
|
200
|
-
def process_recipe(recipe, override_trust):
|
|
201
|
-
if override_trust:
|
|
202
|
-
recipe.verify_trust_info()
|
|
203
|
-
logging.debug(f"Recipe Verification: {recipe.verified}")
|
|
204
173
|
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
174
|
+
def parse_post_processors(post_processors):
|
|
175
|
+
"""Parsing list of post_processors"""
|
|
176
|
+
logging.debug("Parsing post processors")
|
|
177
|
+
|
|
178
|
+
post_processors_list = None
|
|
179
|
+
|
|
180
|
+
match post_processors:
|
|
181
|
+
case None:
|
|
182
|
+
logging.debug("No post processors defined")
|
|
183
|
+
case []:
|
|
184
|
+
logging.debug("Found an empty list for post processors")
|
|
185
|
+
case list():
|
|
186
|
+
post_processors_list = post_processors
|
|
187
|
+
case str() if post_processors.find(",") != -1:
|
|
188
|
+
post_processors_list = [
|
|
189
|
+
post_processor.strip()
|
|
190
|
+
for post_processor in post_processors.split(",")
|
|
191
|
+
if post_processor.strip()
|
|
192
|
+
]
|
|
193
|
+
case str():
|
|
194
|
+
post_processors_list = [
|
|
195
|
+
post_processor.strip()
|
|
196
|
+
for post_processor in post_processors.split(" ")
|
|
197
|
+
if post_processor.strip()
|
|
198
|
+
]
|
|
199
|
+
|
|
200
|
+
logging.info(
|
|
201
|
+
f"Post Processors List: {post_processors_list}"
|
|
202
|
+
) if post_processors_list else None
|
|
203
|
+
|
|
204
|
+
return post_processors_list
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
def process_recipe(recipe, disable_recipe_trust_check, args):
|
|
208
|
+
if disable_recipe_trust_check:
|
|
209
|
+
logging.debug("Setting Recipe verification to None")
|
|
210
|
+
recipe.verified = None
|
|
211
|
+
else:
|
|
212
|
+
logging.debug("Checking Recipe verification")
|
|
213
|
+
recipe.verify_trust_info(args)
|
|
214
|
+
|
|
215
|
+
match recipe.verified:
|
|
216
|
+
case False | None if disable_recipe_trust_check:
|
|
217
|
+
logging.debug("Running Recipe without verification")
|
|
218
|
+
recipe.run(args)
|
|
219
|
+
case True:
|
|
220
|
+
logging.debug("Running Recipe after successful verification")
|
|
221
|
+
recipe.run(args)
|
|
222
|
+
case False:
|
|
223
|
+
recipe.update_trust_info(args)
|
|
209
224
|
|
|
210
225
|
return recipe
|
|
211
226
|
|
|
@@ -217,16 +232,98 @@ def main():
|
|
|
217
232
|
|
|
218
233
|
override_repo_info = get_override_repo_info(args)
|
|
219
234
|
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
235
|
+
post_processors_list = parse_post_processors(post_processors=args.post_processors)
|
|
236
|
+
recipe_list = parse_recipe_list(
|
|
237
|
+
recipes=args.recipes,
|
|
238
|
+
recipe_file=args.recipe_file,
|
|
239
|
+
post_processors=post_processors_list,
|
|
240
|
+
args=args,
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
failed_recipes = []
|
|
244
|
+
|
|
245
|
+
# Run recipes concurrently using a thread pool to parallelize subprocess calls
|
|
246
|
+
max_workers = max(1, int(getattr(args, "concurrency", 1)))
|
|
247
|
+
logging.info(f"Running recipes with concurrency={max_workers}")
|
|
248
|
+
|
|
249
|
+
def run_one(r: Recipe):
|
|
250
|
+
logging.info(f"Processing Recipe: {r.name}")
|
|
251
|
+
process_recipe(
|
|
252
|
+
recipe=r,
|
|
253
|
+
disable_recipe_trust_check=args.disable_recipe_trust_check,
|
|
254
|
+
args=args,
|
|
255
|
+
)
|
|
256
|
+
# Git updates and notifications are applied serially after all recipes finish
|
|
257
|
+
return r
|
|
227
258
|
|
|
228
|
-
|
|
259
|
+
if args.recipe_processing_order:
|
|
260
|
+
batches = build_recipe_batches(
|
|
261
|
+
recipe_list=recipe_list,
|
|
262
|
+
recipe_processing_order=args.recipe_processing_order,
|
|
263
|
+
)
|
|
264
|
+
for batch in batches:
|
|
265
|
+
batch_type = recipe_type_for(batch[0]) if batch else ""
|
|
266
|
+
logging.info(
|
|
267
|
+
f"Running {len(batch)} recipes for type={batch_type or 'unknown'}"
|
|
268
|
+
)
|
|
269
|
+
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
|
270
|
+
futures = [executor.submit(run_one, r) for r in batch]
|
|
271
|
+
for fut in as_completed(futures):
|
|
272
|
+
r = fut.result()
|
|
273
|
+
if r.error or r.results.get("failed"):
|
|
274
|
+
failed_recipes.append(r)
|
|
275
|
+
else:
|
|
276
|
+
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
|
277
|
+
futures = [executor.submit(run_one, r) for r in recipe_list]
|
|
278
|
+
for fut in as_completed(futures):
|
|
279
|
+
r = fut.result()
|
|
280
|
+
if r.error or r.results.get("failed"):
|
|
281
|
+
failed_recipes.append(r)
|
|
282
|
+
|
|
283
|
+
# Apply git updates serially to avoid branch/commit conflicts when concurrency > 1
|
|
284
|
+
for r in recipe_list:
|
|
285
|
+
update_recipe_repo(
|
|
286
|
+
git_info=override_repo_info,
|
|
287
|
+
recipe=r,
|
|
288
|
+
disable_recipe_trust_check=args.disable_recipe_trust_check,
|
|
289
|
+
args=args,
|
|
290
|
+
)
|
|
229
291
|
|
|
292
|
+
# Send notifications serially to simplify rate limiting and ordering
|
|
293
|
+
if args.slack_token:
|
|
294
|
+
for r in recipe_list:
|
|
295
|
+
slack.send_notification(recipe=r, token=args.slack_token)
|
|
296
|
+
|
|
297
|
+
# Optionally open a PR for updated trust information
|
|
298
|
+
if args.create_pr and recipe_list:
|
|
299
|
+
# Choose a representative recipe for the PR title/body
|
|
300
|
+
rep_recipe = next(
|
|
301
|
+
(r for r in recipe_list if r.updated is True or r.verified is False),
|
|
302
|
+
recipe_list[0],
|
|
303
|
+
)
|
|
304
|
+
pr_url = git.create_pull_request(git_info=override_repo_info, recipe=rep_recipe)
|
|
305
|
+
logging.info(f"Created Pull Request for trust info updates: {pr_url}")
|
|
230
306
|
|
|
231
|
-
|
|
232
|
-
|
|
307
|
+
# Create GitHub issue for failed recipes
|
|
308
|
+
if args.create_issues and failed_recipes and args.github_token:
|
|
309
|
+
issue_url = git.create_issue_for_failed_recipes(
|
|
310
|
+
git_info=override_repo_info, failed_recipes=failed_recipes
|
|
311
|
+
)
|
|
312
|
+
logging.info(f"Created GitHub issue for failed recipes: {issue_url}")
|
|
313
|
+
|
|
314
|
+
# Optionally process reports after running recipes
|
|
315
|
+
if getattr(args, "process_reports", False):
|
|
316
|
+
rc = process_reports(
|
|
317
|
+
zip_file=getattr(args, "reports_zip", None),
|
|
318
|
+
extract_dir=getattr(
|
|
319
|
+
args, "reports_extract_dir", "autopkg_reports_summary/reports"
|
|
320
|
+
),
|
|
321
|
+
reports_dir=(getattr(args, "reports_dir", None) or "/private/tmp/autopkg"),
|
|
322
|
+
environment="",
|
|
323
|
+
run_date=getattr(args, "reports_run_date", ""),
|
|
324
|
+
out_dir=getattr(args, "reports_out_dir", "autopkg_reports_summary/summary"),
|
|
325
|
+
debug=bool(getattr(args, "debug", False)),
|
|
326
|
+
strict=bool(getattr(args, "reports_strict", False)),
|
|
327
|
+
)
|
|
328
|
+
if rc:
|
|
329
|
+
sys.exit(rc)
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import plistlib
|
|
5
|
+
import subprocess
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from itertools import chain
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class Recipe(object):
|
|
12
|
+
def __init__(self, name: str, post_processors: list = None):
|
|
13
|
+
self.filename = name
|
|
14
|
+
self.error = False
|
|
15
|
+
self.results = {}
|
|
16
|
+
self.updated = False
|
|
17
|
+
self.verified = None
|
|
18
|
+
self.pr_url = None
|
|
19
|
+
self.post_processors = post_processors
|
|
20
|
+
|
|
21
|
+
self._keys = None
|
|
22
|
+
self._has_run = False
|
|
23
|
+
|
|
24
|
+
@property
|
|
25
|
+
def name(self):
|
|
26
|
+
name = self.filename.split(".")[0]
|
|
27
|
+
|
|
28
|
+
return name
|
|
29
|
+
|
|
30
|
+
def verify_trust_info(self, args):
|
|
31
|
+
verbose_output = ["-vvvv"] if args.debug else []
|
|
32
|
+
prefs_file = (
|
|
33
|
+
["--prefs", args.autopkg_prefs.as_posix()] if args.autopkg_prefs else []
|
|
34
|
+
)
|
|
35
|
+
autopkg_bin = getattr(args, "autopkg_bin", "/usr/local/bin/autopkg")
|
|
36
|
+
cmd = (
|
|
37
|
+
[autopkg_bin, "verify-trust-info", self.filename]
|
|
38
|
+
+ verbose_output
|
|
39
|
+
+ prefs_file
|
|
40
|
+
)
|
|
41
|
+
logging.debug(f"cmd: {cmd}")
|
|
42
|
+
|
|
43
|
+
result = subprocess.run(cmd, capture_output=True, text=True)
|
|
44
|
+
if result.returncode == 0:
|
|
45
|
+
self.verified = True
|
|
46
|
+
else:
|
|
47
|
+
self.results["message"] = (result.stderr or "").strip()
|
|
48
|
+
self.verified = False
|
|
49
|
+
return self.verified
|
|
50
|
+
|
|
51
|
+
def update_trust_info(self, args):
|
|
52
|
+
prefs_file = (
|
|
53
|
+
["--prefs", args.autopkg_prefs.as_posix()] if args.autopkg_prefs else []
|
|
54
|
+
)
|
|
55
|
+
autopkg_bin = getattr(args, "autopkg_bin", "/usr/local/bin/autopkg")
|
|
56
|
+
cmd = [autopkg_bin, "update-trust-info", self.filename] + prefs_file
|
|
57
|
+
logging.debug(f"cmd: {cmd}")
|
|
58
|
+
|
|
59
|
+
# Fail loudly if this exits 0
|
|
60
|
+
try:
|
|
61
|
+
subprocess.check_call(cmd)
|
|
62
|
+
except subprocess.CalledProcessError as e:
|
|
63
|
+
logging.error(str(e))
|
|
64
|
+
raise e
|
|
65
|
+
|
|
66
|
+
def _parse_report(self, report):
|
|
67
|
+
with open(report, "rb") as f:
|
|
68
|
+
report_data = plistlib.load(f)
|
|
69
|
+
|
|
70
|
+
failed_items = report_data.get("failures", [])
|
|
71
|
+
imported_items = []
|
|
72
|
+
if report_data["summary_results"]:
|
|
73
|
+
# This means something happened
|
|
74
|
+
munki_results = report_data["summary_results"].get(
|
|
75
|
+
"munki_importer_summary_result", {}
|
|
76
|
+
)
|
|
77
|
+
imported_items.extend(munki_results.get("data_rows", []))
|
|
78
|
+
|
|
79
|
+
return {"imported": imported_items, "failed": failed_items}
|
|
80
|
+
|
|
81
|
+
def run(self, args):
|
|
82
|
+
if self.verified is False:
|
|
83
|
+
self.error = True
|
|
84
|
+
self.results["failed"] = True
|
|
85
|
+
self.results["imported"] = ""
|
|
86
|
+
else:
|
|
87
|
+
report_dir = Path("/private/tmp/autopkg")
|
|
88
|
+
report_time = datetime.now().strftime("%Y-%m-%dT%H-%M-%S")
|
|
89
|
+
report_name = Path(f"{self.name}-{report_time}.plist")
|
|
90
|
+
|
|
91
|
+
report_dir.mkdir(parents=True, exist_ok=True)
|
|
92
|
+
report = report_dir / report_name
|
|
93
|
+
report.touch(exist_ok=True)
|
|
94
|
+
|
|
95
|
+
try:
|
|
96
|
+
prefs_file = (
|
|
97
|
+
["--prefs", args.autopkg_prefs.as_posix()]
|
|
98
|
+
if args.autopkg_prefs
|
|
99
|
+
else []
|
|
100
|
+
)
|
|
101
|
+
verbose_output = ["-vvvv"] if args.debug else []
|
|
102
|
+
post_processor_cmd = (
|
|
103
|
+
list(
|
|
104
|
+
chain.from_iterable(
|
|
105
|
+
[
|
|
106
|
+
("--post", processor)
|
|
107
|
+
for processor in self.post_processors
|
|
108
|
+
]
|
|
109
|
+
)
|
|
110
|
+
)
|
|
111
|
+
if self.post_processors
|
|
112
|
+
else []
|
|
113
|
+
)
|
|
114
|
+
autopkg_bin = getattr(args, "autopkg_bin", "/usr/local/bin/autopkg")
|
|
115
|
+
cmd = (
|
|
116
|
+
[autopkg_bin, "run", self.filename, "--report-plist", report]
|
|
117
|
+
+ verbose_output
|
|
118
|
+
+ prefs_file
|
|
119
|
+
+ post_processor_cmd
|
|
120
|
+
)
|
|
121
|
+
logging.debug(f"cmd: {cmd}")
|
|
122
|
+
|
|
123
|
+
result = subprocess.run(cmd, capture_output=True, text=True)
|
|
124
|
+
if result.returncode == 0:
|
|
125
|
+
report_info = self._parse_report(report)
|
|
126
|
+
self.results = report_info
|
|
127
|
+
else:
|
|
128
|
+
self.error = True
|
|
129
|
+
self.results["failed"] = True
|
|
130
|
+
self.results["message"] = (result.stderr or "").strip()
|
|
131
|
+
self.results["imported"] = ""
|
|
132
|
+
except Exception as e: # pylint: disable=broad-exception-caught
|
|
133
|
+
logging.error(f"Recipe run failed: {e}")
|
|
134
|
+
self.error = True
|
|
135
|
+
self.results["failed"] = True
|
|
136
|
+
self.results["message"] = (result.stderr or "").strip()
|
|
137
|
+
self.results["imported"] = ""
|
|
138
|
+
|
|
139
|
+
return self
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = "0.0.0"
|
|
@@ -5,7 +5,7 @@ import requests
|
|
|
5
5
|
|
|
6
6
|
|
|
7
7
|
def send_notification(recipe, token):
|
|
8
|
-
logging.debug("
|
|
8
|
+
logging.debug("Preparing Slack notification")
|
|
9
9
|
|
|
10
10
|
if token is None:
|
|
11
11
|
logging.error("Skipping Slack Notification as no SLACK_WEBHOOK_TOKEN defined!")
|
|
@@ -19,7 +19,7 @@ def send_notification(recipe, token):
|
|
|
19
19
|
if not recipe.results["failed"]:
|
|
20
20
|
task_description = "Unknown error"
|
|
21
21
|
else:
|
|
22
|
-
task_description = ("Error: {} \
|
|
22
|
+
task_description = ("Error: {} \nTraceback: {} \n").format(
|
|
23
23
|
recipe.results["failed"][0]["message"],
|
|
24
24
|
recipe.results["failed"][0]["traceback"],
|
|
25
25
|
)
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = "0.0.0"
|