autopkg-wrapper 2025.11.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- autopkg_wrapper/__init__.py +0 -0
- autopkg_wrapper/autopkg_wrapper.py +392 -0
- autopkg_wrapper/notifier/__init__.py +0 -0
- autopkg_wrapper/notifier/slack.py +61 -0
- autopkg_wrapper/utils/__init__.py +0 -0
- autopkg_wrapper/utils/args.py +147 -0
- autopkg_wrapper/utils/git_functions.py +160 -0
- autopkg_wrapper/utils/logging.py +12 -0
- autopkg_wrapper-2025.11.1.dist-info/METADATA +54 -0
- autopkg_wrapper-2025.11.1.dist-info/RECORD +13 -0
- autopkg_wrapper-2025.11.1.dist-info/WHEEL +4 -0
- autopkg_wrapper-2025.11.1.dist-info/entry_points.txt +2 -0
- autopkg_wrapper-2025.11.1.dist-info/licenses/LICENSE +31 -0
|
File without changes
|
|
@@ -0,0 +1,392 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
import json
|
|
3
|
+
import logging
|
|
4
|
+
import plistlib
|
|
5
|
+
import subprocess
|
|
6
|
+
import sys
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
from itertools import chain
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
import autopkg_wrapper.utils.git_functions as git
|
|
12
|
+
from autopkg_wrapper.notifier import slack
|
|
13
|
+
from autopkg_wrapper.utils.args import setup_args
|
|
14
|
+
from autopkg_wrapper.utils.logging import setup_logger
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class Recipe(object):
|
|
18
|
+
def __init__(self, name: str, post_processors: list = None):
|
|
19
|
+
self.filename = name
|
|
20
|
+
self.error = False
|
|
21
|
+
self.results = {}
|
|
22
|
+
self.updated = False
|
|
23
|
+
self.verified = None
|
|
24
|
+
self.pr_url = None
|
|
25
|
+
self.post_processors = post_processors
|
|
26
|
+
|
|
27
|
+
self._keys = None
|
|
28
|
+
self._has_run = False
|
|
29
|
+
|
|
30
|
+
@property
|
|
31
|
+
def name(self):
|
|
32
|
+
name = self.filename.split(".")[0]
|
|
33
|
+
|
|
34
|
+
return name
|
|
35
|
+
|
|
36
|
+
def verify_trust_info(self, args):
|
|
37
|
+
verbose_output = ["-vvvv"] if args.debug else None
|
|
38
|
+
prefs_file = (
|
|
39
|
+
["--prefs", args.autopkg_prefs.as_posix()] if args.autopkg_prefs else None
|
|
40
|
+
)
|
|
41
|
+
cmd = ["/usr/local/bin/autopkg", "verify-trust-info", self.filename]
|
|
42
|
+
cmd = cmd + verbose_output if verbose_output else cmd
|
|
43
|
+
cmd = cmd + prefs_file if prefs_file else cmd
|
|
44
|
+
cmd = " ".join(cmd)
|
|
45
|
+
logging.debug(f"cmd: {str(cmd)}")
|
|
46
|
+
|
|
47
|
+
p = subprocess.Popen(
|
|
48
|
+
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True
|
|
49
|
+
)
|
|
50
|
+
(output, err) = p.communicate()
|
|
51
|
+
p_status = p.wait()
|
|
52
|
+
if p_status == 0:
|
|
53
|
+
self.verified = True
|
|
54
|
+
else:
|
|
55
|
+
err = err.decode()
|
|
56
|
+
self.results["message"] = err
|
|
57
|
+
self.verified = False
|
|
58
|
+
return self.verified
|
|
59
|
+
|
|
60
|
+
def update_trust_info(self, args):
|
|
61
|
+
prefs_file = (
|
|
62
|
+
["--prefs", args.autopkg_prefs.as_posix()] if args.autopkg_prefs else None
|
|
63
|
+
)
|
|
64
|
+
cmd = ["/usr/local/bin/autopkg", "update-trust-info", self.filename]
|
|
65
|
+
cmd = cmd + prefs_file if prefs_file else cmd
|
|
66
|
+
cmd = " ".join(cmd)
|
|
67
|
+
logging.debug(f"cmd: {str(cmd)}")
|
|
68
|
+
|
|
69
|
+
# Fail loudly if this exits 0
|
|
70
|
+
try:
|
|
71
|
+
subprocess.check_call(cmd, shell=True)
|
|
72
|
+
except subprocess.CalledProcessError as e:
|
|
73
|
+
logging.error(e.stderr)
|
|
74
|
+
raise e
|
|
75
|
+
|
|
76
|
+
def _parse_report(self, report):
|
|
77
|
+
with open(report, "rb") as f:
|
|
78
|
+
report_data = plistlib.load(f)
|
|
79
|
+
|
|
80
|
+
failed_items = report_data.get("failures", [])
|
|
81
|
+
imported_items = []
|
|
82
|
+
if report_data["summary_results"]:
|
|
83
|
+
# This means something happened
|
|
84
|
+
munki_results = report_data["summary_results"].get(
|
|
85
|
+
"munki_importer_summary_result", {}
|
|
86
|
+
)
|
|
87
|
+
imported_items.extend(munki_results.get("data_rows", []))
|
|
88
|
+
|
|
89
|
+
return {"imported": imported_items, "failed": failed_items}
|
|
90
|
+
|
|
91
|
+
def run(self, args):
|
|
92
|
+
if self.verified is False:
|
|
93
|
+
self.error = True
|
|
94
|
+
self.results["failed"] = True
|
|
95
|
+
self.results["imported"] = ""
|
|
96
|
+
else:
|
|
97
|
+
report_dir = Path("/tmp/autopkg")
|
|
98
|
+
report_time = datetime.now().strftime("%Y-%m-%dT%H-%M-%S")
|
|
99
|
+
report_name = Path(f"{self.name}-{report_time}.plist")
|
|
100
|
+
|
|
101
|
+
report_dir.mkdir(parents=True, exist_ok=True)
|
|
102
|
+
report = report_dir / report_name
|
|
103
|
+
report.touch(exist_ok=True)
|
|
104
|
+
|
|
105
|
+
try:
|
|
106
|
+
prefs_file = (
|
|
107
|
+
["--prefs", args.autopkg_prefs.as_posix()]
|
|
108
|
+
if args.autopkg_prefs
|
|
109
|
+
else None
|
|
110
|
+
)
|
|
111
|
+
verbose_output = ["-vvvv"] if args.debug else None
|
|
112
|
+
post_processor_cmd = (
|
|
113
|
+
list(
|
|
114
|
+
chain.from_iterable(
|
|
115
|
+
[
|
|
116
|
+
("--post", processor)
|
|
117
|
+
for processor in self.post_processors
|
|
118
|
+
]
|
|
119
|
+
)
|
|
120
|
+
)
|
|
121
|
+
if self.post_processors
|
|
122
|
+
else None
|
|
123
|
+
)
|
|
124
|
+
cmd = [
|
|
125
|
+
"/usr/local/bin/autopkg",
|
|
126
|
+
"run",
|
|
127
|
+
self.filename,
|
|
128
|
+
"--report-plist",
|
|
129
|
+
str(report),
|
|
130
|
+
]
|
|
131
|
+
cmd = cmd + post_processor_cmd if post_processor_cmd else cmd
|
|
132
|
+
cmd = cmd + verbose_output if verbose_output else cmd
|
|
133
|
+
cmd = cmd + prefs_file if prefs_file else cmd
|
|
134
|
+
cmd = " ".join(cmd)
|
|
135
|
+
|
|
136
|
+
logging.debug(f"cmd: {str(cmd)}")
|
|
137
|
+
|
|
138
|
+
subprocess.check_call(cmd, shell=True)
|
|
139
|
+
|
|
140
|
+
except subprocess.CalledProcessError:
|
|
141
|
+
self.error = True
|
|
142
|
+
|
|
143
|
+
self._has_run = True
|
|
144
|
+
self.results = self._parse_report(report)
|
|
145
|
+
if not self.results["failed"] and not self.error:
|
|
146
|
+
self.updated = True
|
|
147
|
+
|
|
148
|
+
return self.results
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def get_override_repo_info(args):
|
|
152
|
+
if args.overrides_repo_path:
|
|
153
|
+
recipe_override_dirs = args.overrides_repo_path
|
|
154
|
+
|
|
155
|
+
else:
|
|
156
|
+
logging.debug("Trying to determine overrides dir from default paths")
|
|
157
|
+
|
|
158
|
+
if args.autopkg_prefs:
|
|
159
|
+
autopkg_prefs_path = Path(args.autopkg_prefs).resolve()
|
|
160
|
+
|
|
161
|
+
if autopkg_prefs_path.suffix == ".json":
|
|
162
|
+
with open(autopkg_prefs_path, "r") as f:
|
|
163
|
+
autopkg_prefs = json.load(f)
|
|
164
|
+
elif autopkg_prefs_path.suffix == ".plist":
|
|
165
|
+
autopkg_prefs = plistlib.loads(autopkg_prefs_path.read_bytes())
|
|
166
|
+
else:
|
|
167
|
+
user_home = Path.home()
|
|
168
|
+
autopkg_prefs_path = (
|
|
169
|
+
user_home / "Library/Preferences/com.github.autopkg.plist"
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
if autopkg_prefs_path.is_file():
|
|
173
|
+
autopkg_prefs = plistlib.loads(
|
|
174
|
+
autopkg_prefs_path.resolve().read_bytes()
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
logging.debug(f"autopkg prefs path: {autopkg_prefs_path}")
|
|
178
|
+
logging.debug(f"autopkg prefs: {autopkg_prefs}")
|
|
179
|
+
recipe_override_dirs = Path(autopkg_prefs["RECIPE_OVERRIDE_DIRS"]).resolve()
|
|
180
|
+
|
|
181
|
+
if Path(recipe_override_dirs / ".git").is_dir():
|
|
182
|
+
override_repo_path = recipe_override_dirs
|
|
183
|
+
elif Path(recipe_override_dirs.parent / ".git").is_dir():
|
|
184
|
+
override_repo_path = recipe_override_dirs.parent
|
|
185
|
+
|
|
186
|
+
logging.debug(f"Override Repo Path: {override_repo_path}")
|
|
187
|
+
|
|
188
|
+
override_repo_git_work_tree = f"--work-tree={override_repo_path}"
|
|
189
|
+
override_repo_git_git_dir = f"--git-dir={override_repo_path / '.git'}"
|
|
190
|
+
override_repo_url, override_repo_remote_ref = git.get_repo_info(
|
|
191
|
+
override_repo_git_git_dir
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
git_info = {
|
|
195
|
+
"override_repo_path": override_repo_path,
|
|
196
|
+
"override_repo_url": override_repo_url,
|
|
197
|
+
"override_repo_remote_ref": override_repo_remote_ref,
|
|
198
|
+
"__work_tree": override_repo_git_work_tree,
|
|
199
|
+
"__git_dir": override_repo_git_git_dir,
|
|
200
|
+
"override_trust_branch": args.branch_name,
|
|
201
|
+
"github_token": args.github_token,
|
|
202
|
+
"create_pr": args.create_pr,
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
logging.debug(git_info)
|
|
206
|
+
return git_info
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def update_recipe_repo(recipe, git_info, disable_recipe_trust_check, args):
|
|
210
|
+
logging.debug(f"recipe.verified: {recipe.verified}")
|
|
211
|
+
logging.debug(f"disable_recipe_trust_check: {disable_recipe_trust_check}")
|
|
212
|
+
|
|
213
|
+
match recipe.verified:
|
|
214
|
+
case True:
|
|
215
|
+
logging.debug("Not updating repo as recipe has been verified")
|
|
216
|
+
return
|
|
217
|
+
case False | None if disable_recipe_trust_check:
|
|
218
|
+
logging.debug("Not updating repo as recipe verification has been disabled")
|
|
219
|
+
return
|
|
220
|
+
case False:
|
|
221
|
+
logging.debug("Updating repo as recipe verification failed")
|
|
222
|
+
current_branch = git.get_current_branch(git_info)
|
|
223
|
+
|
|
224
|
+
if args.disable_git_commands:
|
|
225
|
+
logging.info(
|
|
226
|
+
"Not runing git commands as --disable-git-commands has been set"
|
|
227
|
+
)
|
|
228
|
+
return
|
|
229
|
+
|
|
230
|
+
if current_branch != git_info["override_trust_branch"]:
|
|
231
|
+
logging.debug(
|
|
232
|
+
f"override_trust_branch: {git_info['override_trust_branch']}"
|
|
233
|
+
)
|
|
234
|
+
git.create_branch(git_info)
|
|
235
|
+
|
|
236
|
+
git.stage_recipe(git_info)
|
|
237
|
+
git.commit_recipe(
|
|
238
|
+
git_info, message=f"Updating Trust Info for {recipe.name}"
|
|
239
|
+
)
|
|
240
|
+
git.pull_branch(git_info)
|
|
241
|
+
git.push_branch(git_info)
|
|
242
|
+
|
|
243
|
+
return
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
def parse_recipe_list(recipes, recipe_file, post_processors, args):
|
|
247
|
+
"""Parsing list of recipes into a common format"""
|
|
248
|
+
recipe_list = None
|
|
249
|
+
|
|
250
|
+
logging.info(f"Recipes: {recipes}") if recipes else None
|
|
251
|
+
logging.info(f"Recipe List: {recipe_file}") if recipe_file else None
|
|
252
|
+
|
|
253
|
+
if recipe_file:
|
|
254
|
+
if recipe_file.suffix == ".json":
|
|
255
|
+
with open(recipe_file, "r") as f:
|
|
256
|
+
recipe_list = json.load(f)
|
|
257
|
+
elif recipe_file.suffix == ".txt":
|
|
258
|
+
with open(recipe_file, "r") as f:
|
|
259
|
+
recipe_list = f.read().splitlines()
|
|
260
|
+
if recipes:
|
|
261
|
+
if isinstance(recipes, list):
|
|
262
|
+
recipe_list = recipes
|
|
263
|
+
elif isinstance(recipes, str):
|
|
264
|
+
if recipes.find(",") != -1:
|
|
265
|
+
# Assuming recipes separated by commas
|
|
266
|
+
recipe_list = [
|
|
267
|
+
recipe.strip() for recipe in recipes.split(",") if recipe
|
|
268
|
+
]
|
|
269
|
+
else:
|
|
270
|
+
# Assuming recipes separated by space
|
|
271
|
+
recipe_list = [
|
|
272
|
+
recipe.strip() for recipe in recipes.split(" ") if recipe
|
|
273
|
+
]
|
|
274
|
+
|
|
275
|
+
if recipe_list is None:
|
|
276
|
+
logging.error(
|
|
277
|
+
"""Please provide recipes to run via the following methods:
|
|
278
|
+
--recipes recipe_one.download recipe_two.download
|
|
279
|
+
--recipe-file path/to/recipe_list.json
|
|
280
|
+
Comma separated list in the AUTOPKG_RECIPES env variable"""
|
|
281
|
+
)
|
|
282
|
+
sys.exit(1)
|
|
283
|
+
|
|
284
|
+
logging.info(f"Processing the following recipes: {recipe_list}")
|
|
285
|
+
recipe_map = [Recipe(name, post_processors=post_processors) for name in recipe_list]
|
|
286
|
+
|
|
287
|
+
return recipe_map
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
def parse_post_processors(post_processors):
|
|
291
|
+
"""Parsing list of post_processors"""
|
|
292
|
+
logging.debug("Parsing post processors")
|
|
293
|
+
|
|
294
|
+
post_processors_list = None
|
|
295
|
+
|
|
296
|
+
match post_processors:
|
|
297
|
+
case None:
|
|
298
|
+
logging.debug("No post processors defined")
|
|
299
|
+
case []:
|
|
300
|
+
logging.debug("Found an empty list for post processors")
|
|
301
|
+
case list():
|
|
302
|
+
post_processors_list = post_processors
|
|
303
|
+
case str() if post_processors.find(",") != -1:
|
|
304
|
+
post_processors_list = [
|
|
305
|
+
post_processor.strip()
|
|
306
|
+
for post_processor in post_processors.split(",")
|
|
307
|
+
if post_processor.strip()
|
|
308
|
+
]
|
|
309
|
+
case str():
|
|
310
|
+
post_processors_list = [
|
|
311
|
+
post_processor.strip()
|
|
312
|
+
for post_processor in post_processors.split(" ")
|
|
313
|
+
if post_processor.strip()
|
|
314
|
+
]
|
|
315
|
+
|
|
316
|
+
logging.info(
|
|
317
|
+
f"Post Processors List: {post_processors_list}"
|
|
318
|
+
) if post_processors_list else None
|
|
319
|
+
|
|
320
|
+
return post_processors_list
|
|
321
|
+
|
|
322
|
+
|
|
323
|
+
def process_recipe(recipe, disable_recipe_trust_check, args):
|
|
324
|
+
if disable_recipe_trust_check:
|
|
325
|
+
logging.debug("Setting Recipe verification to None")
|
|
326
|
+
recipe.verified = None
|
|
327
|
+
else:
|
|
328
|
+
logging.debug("Checking Recipe verification")
|
|
329
|
+
recipe.verify_trust_info(args)
|
|
330
|
+
|
|
331
|
+
match recipe.verified:
|
|
332
|
+
case False | None if disable_recipe_trust_check:
|
|
333
|
+
logging.debug("Running Recipe without verification")
|
|
334
|
+
recipe.run(args)
|
|
335
|
+
case True:
|
|
336
|
+
logging.debug("Running Recipe after successful verification")
|
|
337
|
+
recipe.run(args)
|
|
338
|
+
case False:
|
|
339
|
+
recipe.update_trust_info(args)
|
|
340
|
+
|
|
341
|
+
return recipe
|
|
342
|
+
|
|
343
|
+
|
|
344
|
+
def main():
|
|
345
|
+
args = setup_args()
|
|
346
|
+
setup_logger(args.debug if args.debug else False)
|
|
347
|
+
logging.info("Running autopkg_wrapper")
|
|
348
|
+
|
|
349
|
+
override_repo_info = get_override_repo_info(args)
|
|
350
|
+
|
|
351
|
+
post_processors_list = parse_post_processors(post_processors=args.post_processors)
|
|
352
|
+
recipe_list = parse_recipe_list(
|
|
353
|
+
recipes=args.recipes,
|
|
354
|
+
recipe_file=args.recipe_file,
|
|
355
|
+
post_processors=post_processors_list,
|
|
356
|
+
args=args,
|
|
357
|
+
)
|
|
358
|
+
|
|
359
|
+
failed_recipes = []
|
|
360
|
+
|
|
361
|
+
for recipe in recipe_list:
|
|
362
|
+
logging.info(f"Processing Recipe: {recipe.name}")
|
|
363
|
+
process_recipe(
|
|
364
|
+
recipe=recipe,
|
|
365
|
+
disable_recipe_trust_check=args.disable_recipe_trust_check,
|
|
366
|
+
args=args,
|
|
367
|
+
)
|
|
368
|
+
update_recipe_repo(
|
|
369
|
+
git_info=override_repo_info,
|
|
370
|
+
recipe=recipe,
|
|
371
|
+
disable_recipe_trust_check=args.disable_recipe_trust_check,
|
|
372
|
+
args=args,
|
|
373
|
+
)
|
|
374
|
+
slack.send_notification(
|
|
375
|
+
recipe=recipe, token=args.slack_token
|
|
376
|
+
) if args.slack_token else None
|
|
377
|
+
|
|
378
|
+
if recipe.error or recipe.results.get("failed"):
|
|
379
|
+
failed_recipes.append(recipe)
|
|
380
|
+
|
|
381
|
+
recipe.pr_url = (
|
|
382
|
+
git.create_pull_request(git_info=override_repo_info, recipe=recipe)
|
|
383
|
+
if args.create_pr
|
|
384
|
+
else None
|
|
385
|
+
)
|
|
386
|
+
|
|
387
|
+
# Create GitHub issue for failed recipes
|
|
388
|
+
if args.create_issues and failed_recipes and args.github_token:
|
|
389
|
+
issue_url = git.create_issue_for_failed_recipes(
|
|
390
|
+
git_info=override_repo_info, failed_recipes=failed_recipes
|
|
391
|
+
)
|
|
392
|
+
logging.info(f"Created GitHub issue for failed recipes: {issue_url}")
|
|
File without changes
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
|
|
4
|
+
import requests
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def send_notification(recipe, token):
|
|
8
|
+
logging.debug("Skipping Slack notification as DEBUG is enabled!")
|
|
9
|
+
|
|
10
|
+
if token is None:
|
|
11
|
+
logging.error("Skipping Slack Notification as no SLACK_WEBHOOK_TOKEN defined!")
|
|
12
|
+
return
|
|
13
|
+
|
|
14
|
+
if recipe.verified is False:
|
|
15
|
+
task_title = f"{recipe.name} failed trust verification"
|
|
16
|
+
task_description = recipe.results["message"]
|
|
17
|
+
elif recipe.error:
|
|
18
|
+
task_title = f"Failed to import {recipe.name}"
|
|
19
|
+
if not recipe.results["failed"]:
|
|
20
|
+
task_description = "Unknown error"
|
|
21
|
+
else:
|
|
22
|
+
task_description = ("Error: {} \nTraceback: {} \n").format(
|
|
23
|
+
recipe.results["failed"][0]["message"],
|
|
24
|
+
recipe.results["failed"][0]["traceback"],
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
if "No releases found for repo" in task_description:
|
|
28
|
+
return
|
|
29
|
+
elif recipe.updated:
|
|
30
|
+
task_title = f"{recipe.name} has been uploaded to Jamf"
|
|
31
|
+
task_description = f"It's time to test {recipe.name}!"
|
|
32
|
+
else:
|
|
33
|
+
return
|
|
34
|
+
|
|
35
|
+
response = requests.post(
|
|
36
|
+
token,
|
|
37
|
+
data=json.dumps(
|
|
38
|
+
{
|
|
39
|
+
"attachments": [
|
|
40
|
+
{
|
|
41
|
+
"username": "Autopkg",
|
|
42
|
+
"as_user": True,
|
|
43
|
+
"title": task_title,
|
|
44
|
+
"color": "warning"
|
|
45
|
+
if not recipe.verified
|
|
46
|
+
else "good"
|
|
47
|
+
if not recipe.error
|
|
48
|
+
else "danger",
|
|
49
|
+
"text": task_description,
|
|
50
|
+
"mrkdwn_in": ["text"],
|
|
51
|
+
}
|
|
52
|
+
]
|
|
53
|
+
}
|
|
54
|
+
),
|
|
55
|
+
headers={"Content-Type": "application/json"},
|
|
56
|
+
)
|
|
57
|
+
if response.status_code != 200:
|
|
58
|
+
raise ValueError(
|
|
59
|
+
"Request to slack returned an error %s, the response is:\n%s"
|
|
60
|
+
% (response.status_code, response.text)
|
|
61
|
+
)
|
|
File without changes
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import os
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def validate_file(arg):
|
|
8
|
+
file_path = Path(arg).resolve()
|
|
9
|
+
file_exists = file_path.exists()
|
|
10
|
+
|
|
11
|
+
if file_exists:
|
|
12
|
+
return file_path
|
|
13
|
+
else:
|
|
14
|
+
message = f"Error! This is not valid file: {arg}"
|
|
15
|
+
raise argparse.ArgumentTypeError(message)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def validate_directory(arg):
|
|
19
|
+
dir_path = Path(arg).resolve()
|
|
20
|
+
dir_exists = dir_path.is_dir()
|
|
21
|
+
|
|
22
|
+
if dir_exists:
|
|
23
|
+
return dir_path
|
|
24
|
+
else:
|
|
25
|
+
message = f"Error! This is not valid directory: {arg}"
|
|
26
|
+
raise argparse.ArgumentTypeError(message)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def validate_bool(arg):
|
|
30
|
+
if isinstance(arg, bool):
|
|
31
|
+
return arg
|
|
32
|
+
elif isinstance(arg, str) and arg.lower() in ["0", "false", "no", "f"]:
|
|
33
|
+
return False
|
|
34
|
+
elif isinstance(arg, str) and arg.lower() in ["1", "true", "yes", "t"]:
|
|
35
|
+
return True
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def find_github_token():
|
|
39
|
+
if os.getenv("GITHUB_TOKEN", None):
|
|
40
|
+
return os.getenv("GITHUB_TOKEN")
|
|
41
|
+
elif os.getenv("GH_TOKEN", None):
|
|
42
|
+
return os.getenv("GH_TOKEN")
|
|
43
|
+
else:
|
|
44
|
+
return None
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def setup_args():
|
|
48
|
+
parser = argparse.ArgumentParser(description="Run autopkg recipes")
|
|
49
|
+
recipe_arguments = parser.add_mutually_exclusive_group()
|
|
50
|
+
recipe_arguments.add_argument(
|
|
51
|
+
"--recipe-file",
|
|
52
|
+
type=validate_file,
|
|
53
|
+
default=os.getenv("AW_RECIPE_FILE", None),
|
|
54
|
+
help="Provide the list of recipes to run via a JSON file for easier management.",
|
|
55
|
+
)
|
|
56
|
+
recipe_arguments.add_argument(
|
|
57
|
+
"--recipes",
|
|
58
|
+
nargs="*",
|
|
59
|
+
default=os.getenv("AW_RECIPES", None),
|
|
60
|
+
help="""
|
|
61
|
+
Recipes to run via CLI flag or environment variable. If the '--recipes' flag is used, simply
|
|
62
|
+
provide a space-separated list on the command line:
|
|
63
|
+
`autopkg-wrapper --recipes recipe_one.download recipe_two.download`
|
|
64
|
+
Alternatively, you can provide a space/comma-separated list in the 'AW_RECIPES' environment
|
|
65
|
+
variable:
|
|
66
|
+
`export AW_RECIPES="recipe_one.download recipe_two.download"`
|
|
67
|
+
`export AW_RECIPES="recipe_one.pkg,recipe_two.pkg"`
|
|
68
|
+
`autopkg-wrapper`
|
|
69
|
+
""",
|
|
70
|
+
)
|
|
71
|
+
parser.add_argument(
|
|
72
|
+
"--debug",
|
|
73
|
+
default=validate_bool(os.getenv("AW_DEBUG", False)),
|
|
74
|
+
action="store_true",
|
|
75
|
+
help="Enable debug logging when running script",
|
|
76
|
+
)
|
|
77
|
+
parser.add_argument(
|
|
78
|
+
"--disable-recipe-trust-check",
|
|
79
|
+
action="store_true",
|
|
80
|
+
help="""
|
|
81
|
+
If this option is used, recipe trust verification will not be run prior to a recipe run.
|
|
82
|
+
This does not set FAIL_RECIPES_WITHOUT_TRUST_INFO to No. You will need to set that outside
|
|
83
|
+
of this application.
|
|
84
|
+
""",
|
|
85
|
+
)
|
|
86
|
+
parser.add_argument(
|
|
87
|
+
"--disable-git-commands",
|
|
88
|
+
action="store_true",
|
|
89
|
+
help="""
|
|
90
|
+
If this option is used, git commands won't be run
|
|
91
|
+
""",
|
|
92
|
+
)
|
|
93
|
+
parser.add_argument(
|
|
94
|
+
"--slack-token",
|
|
95
|
+
default=os.getenv("SLACK_WEBHOOK_TOKEN", None),
|
|
96
|
+
help=argparse.SUPPRESS,
|
|
97
|
+
)
|
|
98
|
+
parser.add_argument("--github-token", default=find_github_token())
|
|
99
|
+
parser.add_argument(
|
|
100
|
+
"--branch-name",
|
|
101
|
+
default=os.getenv(
|
|
102
|
+
"AW_TRUST_BRANCH",
|
|
103
|
+
f"fix/update_trust_information/{datetime.now().strftime('%Y-%m-%dT%H-%M-%S')}",
|
|
104
|
+
),
|
|
105
|
+
help="""
|
|
106
|
+
Branch name to be used recipe overrides have failed their trust verification and need to be updated.
|
|
107
|
+
By default, this will be in the format of \"fix/update_trust_information/YYYY-MM-DDTHH-MM-SS\"
|
|
108
|
+
""",
|
|
109
|
+
)
|
|
110
|
+
parser.add_argument(
|
|
111
|
+
"--create-pr",
|
|
112
|
+
default=os.getenv("AW_CREATE_PR", False),
|
|
113
|
+
action="store_true",
|
|
114
|
+
help="If enabled, autopkg_wrapper will open a PR for updated trust information",
|
|
115
|
+
)
|
|
116
|
+
parser.add_argument(
|
|
117
|
+
"--create-issues",
|
|
118
|
+
action="store_true",
|
|
119
|
+
help="Create a GitHub issue for recipes that fail during processing",
|
|
120
|
+
)
|
|
121
|
+
parser.add_argument(
|
|
122
|
+
"--overrides-repo-path",
|
|
123
|
+
default=os.getenv("AW_OVERRIDES_REPO_PATH", None),
|
|
124
|
+
type=validate_directory,
|
|
125
|
+
help="""
|
|
126
|
+
The path on disk to the git repository containing the autopkg overrides directory.
|
|
127
|
+
If none is provided, we will try to determine it for you.
|
|
128
|
+
""",
|
|
129
|
+
)
|
|
130
|
+
parser.add_argument(
|
|
131
|
+
"--post-processors",
|
|
132
|
+
default=os.getenv("AW_POST_PROCESSORS", None),
|
|
133
|
+
nargs="*",
|
|
134
|
+
help="""
|
|
135
|
+
One or more autopkg post processors to run after each recipe execution
|
|
136
|
+
""",
|
|
137
|
+
)
|
|
138
|
+
parser.add_argument(
|
|
139
|
+
"--autopkg-prefs",
|
|
140
|
+
default=os.getenv("AW_AUTOPKG_PREFS_FILE", None),
|
|
141
|
+
type=validate_file,
|
|
142
|
+
help="""
|
|
143
|
+
Path to the autopkg preferences you'd like to use
|
|
144
|
+
""",
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
return parser.parse_args()
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import subprocess
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
|
|
5
|
+
from github import Github
|
|
6
|
+
|
|
7
|
+
# git_info = {
|
|
8
|
+
# "override_repo_path": override_repo_path,
|
|
9
|
+
# "override_repo_url": override_repo_url,
|
|
10
|
+
# "override_repo_remote_ref": override_repo_remote_ref,
|
|
11
|
+
# "__work_tree": override_repo_git_work_tree,
|
|
12
|
+
# "__git_dir": override_repo_git_git_dir,
|
|
13
|
+
# "override_trust_branch": args.branch_name,
|
|
14
|
+
# "github_token": args.github_token,
|
|
15
|
+
# "create_pr": args.create_pr,
|
|
16
|
+
# }
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def git_run(*args):
|
|
20
|
+
return subprocess.run(["git"] + list(args), text=True, capture_output=True)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def get_repo_info(override_repo_git_git_dir):
|
|
24
|
+
repo_url = (
|
|
25
|
+
git_run(override_repo_git_git_dir, "config", "--get", "remote.origin.url")
|
|
26
|
+
.stdout.strip()
|
|
27
|
+
.split(".git")[0]
|
|
28
|
+
)
|
|
29
|
+
remote_repo_ref = repo_url.split("https://github.com/")[1]
|
|
30
|
+
|
|
31
|
+
logging.debug(f"Repo URL: {repo_url}")
|
|
32
|
+
logging.debug(f"Remote Repo Ref: {remote_repo_ref}")
|
|
33
|
+
return repo_url, remote_repo_ref
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def get_current_branch(git_info):
|
|
37
|
+
current_branch = git_run(git_info["__git_dir"], "rev-parse", "--abbrev-ref", "HEAD")
|
|
38
|
+
current_branch = current_branch.stdout.strip()
|
|
39
|
+
|
|
40
|
+
logging.debug(f"Current Branch: {current_branch}")
|
|
41
|
+
return current_branch
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def create_branch(git_info):
|
|
45
|
+
new_branch = git_run(
|
|
46
|
+
git_info["__git_dir"], "checkout", "-b", git_info["override_trust_branch"]
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
logging.debug(f"Git Branch: {new_branch}")
|
|
50
|
+
return new_branch
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def stage_recipe(git_info):
|
|
54
|
+
add = git_run(git_info["__git_dir"], git_info["__work_tree"], "add", "-u")
|
|
55
|
+
|
|
56
|
+
logging.debug(f"Git Add: {add}")
|
|
57
|
+
return add
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def commit_recipe(git_info, message):
|
|
61
|
+
commit = git_run(
|
|
62
|
+
git_info["__git_dir"], git_info["__work_tree"], "commit", "-m", message
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
logging.debug(f"Git Commit: {commit}")
|
|
66
|
+
return commit
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def pull_branch(git_info):
|
|
70
|
+
pull = git_run(
|
|
71
|
+
git_info["__git_dir"],
|
|
72
|
+
git_info["__work_tree"],
|
|
73
|
+
"pull",
|
|
74
|
+
"--rebase",
|
|
75
|
+
"origin",
|
|
76
|
+
git_info["override_trust_branch"],
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
logging.debug(f"Git Branch: {pull}")
|
|
80
|
+
return pull
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def push_branch(git_info):
|
|
84
|
+
push = git_run(
|
|
85
|
+
git_info["__git_dir"],
|
|
86
|
+
git_info["__work_tree"],
|
|
87
|
+
"push",
|
|
88
|
+
"-u",
|
|
89
|
+
"origin",
|
|
90
|
+
git_info["override_trust_branch"],
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
logging.debug(f"Git Push: {push}")
|
|
94
|
+
return push
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def create_pull_request(git_info, recipe):
|
|
98
|
+
title = f"Update Trust Information: {recipe.name}"
|
|
99
|
+
body = f"""
|
|
100
|
+
Recipe Verification information is out-of-date for {recipe.name}.
|
|
101
|
+
Please review and merge the updated trust information for this override.
|
|
102
|
+
"""
|
|
103
|
+
|
|
104
|
+
g = Github(git_info["github_token"])
|
|
105
|
+
repo = g.get_repo(git_info["override_repo_remote_ref"])
|
|
106
|
+
pr = repo.create_pull(
|
|
107
|
+
title=title, body=body, head=git_info["override_trust_branch"], base="main"
|
|
108
|
+
)
|
|
109
|
+
pr_url = f"{git_info['override_repo_url']}/pull/{pr.number}"
|
|
110
|
+
|
|
111
|
+
logging.debug(f"PR URL: {pr_url}")
|
|
112
|
+
return pr_url
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def create_issue_for_failed_recipes(git_info, failed_recipes):
|
|
116
|
+
"""
|
|
117
|
+
Creates a GitHub issue listing all recipes that failed during the run.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
git_info (dict): Dictionary containing Git repository information
|
|
121
|
+
failed_recipes (list): List of Recipe objects that failed during processing
|
|
122
|
+
|
|
123
|
+
Returns:
|
|
124
|
+
str: URL of the created GitHub issue, or None if no issue was created
|
|
125
|
+
"""
|
|
126
|
+
|
|
127
|
+
if not failed_recipes:
|
|
128
|
+
logging.debug("No failed recipes to report")
|
|
129
|
+
return None
|
|
130
|
+
|
|
131
|
+
g = Github(git_info["github_token"])
|
|
132
|
+
repo = g.get_repo(git_info["override_repo_remote_ref"])
|
|
133
|
+
|
|
134
|
+
# Create issue title and body
|
|
135
|
+
current_date = datetime.now().strftime("%Y-%m-%d")
|
|
136
|
+
title = f"AutoPkg Recipe Failures - {current_date}"
|
|
137
|
+
|
|
138
|
+
body = "## Recipe Failure Details:\n\n"
|
|
139
|
+
for recipe in failed_recipes:
|
|
140
|
+
body += f"#### {recipe.name}\n"
|
|
141
|
+
|
|
142
|
+
if recipe.results.get("failed"):
|
|
143
|
+
for failure in recipe.results.get("failed", []):
|
|
144
|
+
body += f"- {failure.get('message', 'Unknown error')}\n"
|
|
145
|
+
|
|
146
|
+
body += "\n"
|
|
147
|
+
|
|
148
|
+
body += "\nThis issue was automatically generated by autopkg-wrapper."
|
|
149
|
+
|
|
150
|
+
# Create the issue
|
|
151
|
+
issue = repo.create_issue(
|
|
152
|
+
title=title,
|
|
153
|
+
body=body,
|
|
154
|
+
labels=["autopkg-failure"],
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
issue_url = f"{git_info['override_repo_url']}/issues/{issue.number}"
|
|
158
|
+
logging.debug(f"Issue URL: {issue_url}")
|
|
159
|
+
|
|
160
|
+
return issue_url
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def setup_logger(debug=False):
|
|
5
|
+
log_level = logging.DEBUG if debug else logging.INFO
|
|
6
|
+
logging.basicConfig(
|
|
7
|
+
level=log_level,
|
|
8
|
+
format="%(filename)s - %(funcName)s - %(levelname)s: %(message)s",
|
|
9
|
+
datefmt="%Y-%m-%d %H:%M:%S",
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
logging.debug("Debug logging is now enabled")
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: autopkg-wrapper
|
|
3
|
+
Version: 2025.11.1
|
|
4
|
+
Summary: A package used to execute some autopkg functions, primarily within the context of a GitHub Actions runner.
|
|
5
|
+
Project-URL: Repository, https://github.com/smithjw/autopkg-wrapper
|
|
6
|
+
Author-email: James Smith <james@smithjw.me>
|
|
7
|
+
License-Expression: BSD-3-Clause
|
|
8
|
+
License-File: LICENSE
|
|
9
|
+
Requires-Python: ~=3.12.0
|
|
10
|
+
Requires-Dist: chardet
|
|
11
|
+
Requires-Dist: idna
|
|
12
|
+
Requires-Dist: pygithub
|
|
13
|
+
Requires-Dist: requests
|
|
14
|
+
Requires-Dist: ruamel-yaml
|
|
15
|
+
Requires-Dist: toml
|
|
16
|
+
Requires-Dist: urllib3
|
|
17
|
+
Description-Content-Type: text/markdown
|
|
18
|
+
|
|
19
|
+
# autopkg-wrapper
|
|
20
|
+
|
|
21
|
+
`autopkg_wrapper` is a small package that can be used to run [`autopkg`](https://github.com/autopkg/autopkg) within CI/CD environments such as GitHub Actions.
|
|
22
|
+
|
|
23
|
+
The easiest way to run it is by installing with pip.
|
|
24
|
+
|
|
25
|
+
```shell
|
|
26
|
+
pip install autopkg-wrapper
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
## Command Line Parameters
|
|
30
|
+
|
|
31
|
+
```shell
|
|
32
|
+
-h, --help Show this help message and exit
|
|
33
|
+
--recipe-file RECIPE_FILE Path to a list of recipes to run (cannot be run with --recipes)
|
|
34
|
+
--recipes [RECIPES ...] Recipes to run with autopkg (cannot be run with --recipe-file)
|
|
35
|
+
--debug Enable debug logging when running script
|
|
36
|
+
--override-trust If set recipe override trust verification will be disabled. (Default: True)
|
|
37
|
+
--github-token GITHUB_TOKEN A token used to publish a PR to your GitHub repo if overrides require their trust to be updated
|
|
38
|
+
--branch-name BRANCH_NAME Branch name to be used where recipe overrides have failed their trust verification and need to be updated.
|
|
39
|
+
By default, this will be in the format of "fix/update_trust_information/YYYY-MM-DDTHH-MM-SS"
|
|
40
|
+
--create-pr If enabled, autopkg_wrapper will open a PR for updated trust information
|
|
41
|
+
--autopkg-prefs AW_AUTOPKG_PREFS_FILE
|
|
42
|
+
Path to the autopkg preferences you'd like to use
|
|
43
|
+
--autopkg-overrides-repo-path AUTOPKG_OVERRIDES_REPO_PATH
|
|
44
|
+
The path on disk to the git repository containing the autopkg overrides directory. If none is provided, we will try to determine it for you.
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
## Example
|
|
48
|
+
|
|
49
|
+
An example folder structure and GitHub Actions Workflow is available within the [`actions-demo`](actions-demo)
|
|
50
|
+
|
|
51
|
+
## Credits
|
|
52
|
+
|
|
53
|
+
- [`autopkg_tools` from Facebook](https://github.com/facebook/IT-CPE/tree/main/legacy/autopkg_tools)
|
|
54
|
+
- [`autopkg_tools` from Facebook, modified by Gusto](https://github.com/Gusto/it-cpe-opensource/tree/main/autopkg)
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
autopkg_wrapper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
autopkg_wrapper/autopkg_wrapper.py,sha256=dF8BGhk1IpP4w6lRtJqgpY-VK9vkoOiD0jidIUaSn9M,13457
|
|
3
|
+
autopkg_wrapper/notifier/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
+
autopkg_wrapper/notifier/slack.py,sha256=aPxQDGd5zPxSsu3mEqalNOF0ly0QnYog0ieHokd5-OY,1979
|
|
5
|
+
autopkg_wrapper/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
+
autopkg_wrapper/utils/args.py,sha256=6sghB9DWZmv7VLUR9uJA5WkhxsZ08Ri1qoUY5rxydjY,4883
|
|
7
|
+
autopkg_wrapper/utils/git_functions.py,sha256=Ojsq-wQsw7Gezq9pYDTtXF9SxrK9b9Cfap3mbJyVgdw,4456
|
|
8
|
+
autopkg_wrapper/utils/logging.py,sha256=3knpMViO_zAU8WM5bSImQaz5M01vMFk_raB4lt1cbvo,324
|
|
9
|
+
autopkg_wrapper-2025.11.1.dist-info/METADATA,sha256=qWyqCoaK3d4ttaNwIEFRrYeqQE1BKznnOP8twLpUJYc,2528
|
|
10
|
+
autopkg_wrapper-2025.11.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
11
|
+
autopkg_wrapper-2025.11.1.dist-info/entry_points.txt,sha256=TVIcOt7OozzX1c00pwMGbBysaHg_v_N3mO3juoFqPpo,73
|
|
12
|
+
autopkg_wrapper-2025.11.1.dist-info/licenses/LICENSE,sha256=PpNOQjZGcsKFuA0wU16YU7PueVxqPX4OnyZ7TlLQlq4,1602
|
|
13
|
+
autopkg_wrapper-2025.11.1.dist-info/RECORD,,
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
BSD-3-Clause
|
|
2
|
+
|
|
3
|
+
Copyright (c) Facebook, Inc. and its affiliates.
|
|
4
|
+
Copyright (c) tig <https://6fx.eu/>.
|
|
5
|
+
Copyright (c) Gusto, Inc.
|
|
6
|
+
Copyright (c) 2024, James Smith
|
|
7
|
+
|
|
8
|
+
Redistribution and use in source and binary forms, with or without
|
|
9
|
+
modification, are permitted provided that the following conditions are met:
|
|
10
|
+
|
|
11
|
+
1. Redistributions of source code must retain the above copyright notice, this
|
|
12
|
+
list of conditions and the following disclaimer.
|
|
13
|
+
|
|
14
|
+
2. Redistributions in binary form must reproduce the above copyright notice,
|
|
15
|
+
this list of conditions and the following disclaimer in the documentation
|
|
16
|
+
and/or other materials provided with the distribution.
|
|
17
|
+
|
|
18
|
+
3. Neither the name of the copyright holder nor the names of its
|
|
19
|
+
contributors may be used to endorse or promote products derived from
|
|
20
|
+
this software without specific prior written permission.
|
|
21
|
+
|
|
22
|
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
23
|
+
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
24
|
+
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
25
|
+
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
|
26
|
+
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
|
27
|
+
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
28
|
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
29
|
+
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
|
30
|
+
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
31
|
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|