shrinkwrap-tool 2026.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. shrinkwrap/__init__.py +1 -0
  2. shrinkwrap/__main__.py +4 -0
  3. shrinkwrap/commands/__init__.py +0 -0
  4. shrinkwrap/commands/build.py +91 -0
  5. shrinkwrap/commands/buildall.py +180 -0
  6. shrinkwrap/commands/clean.py +161 -0
  7. shrinkwrap/commands/inspect.py +235 -0
  8. shrinkwrap/commands/process.py +106 -0
  9. shrinkwrap/commands/run.py +311 -0
  10. shrinkwrap/config/FVP_Base_RevC-2xAEMvA-base.yaml +98 -0
  11. shrinkwrap/config/FVP_Base_RevC-2xAEMvA-rme.yaml +42 -0
  12. shrinkwrap/config/arch/v8.0.yaml +22 -0
  13. shrinkwrap/config/arch/v8.1.yaml +26 -0
  14. shrinkwrap/config/arch/v8.2.yaml +28 -0
  15. shrinkwrap/config/arch/v8.3.yaml +25 -0
  16. shrinkwrap/config/arch/v8.4.yaml +26 -0
  17. shrinkwrap/config/arch/v8.5.yaml +29 -0
  18. shrinkwrap/config/arch/v8.6.yaml +28 -0
  19. shrinkwrap/config/arch/v8.7.yaml +24 -0
  20. shrinkwrap/config/arch/v8.8.yaml +31 -0
  21. shrinkwrap/config/arch/v8.9.yaml +32 -0
  22. shrinkwrap/config/arch/v9.0.yaml +29 -0
  23. shrinkwrap/config/arch/v9.1.yaml +25 -0
  24. shrinkwrap/config/arch/v9.2.yaml +29 -0
  25. shrinkwrap/config/arch/v9.3.yaml +23 -0
  26. shrinkwrap/config/arch/v9.4.yaml +21 -0
  27. shrinkwrap/config/arch/v9.5.yaml +20 -0
  28. shrinkwrap/config/bootwrapper.yaml +76 -0
  29. shrinkwrap/config/buildroot-cca.yaml +113 -0
  30. shrinkwrap/config/buildroot.yaml +54 -0
  31. shrinkwrap/config/cca-3world.yaml +215 -0
  32. shrinkwrap/config/cca-4world.yaml +57 -0
  33. shrinkwrap/config/cca-edk2.yaml +58 -0
  34. shrinkwrap/config/debug/rmm.yaml +15 -0
  35. shrinkwrap/config/debug/tfa.yaml +18 -0
  36. shrinkwrap/config/debug/tftf.yaml +17 -0
  37. shrinkwrap/config/dt-base.yaml +115 -0
  38. shrinkwrap/config/edk2-base.yaml +59 -0
  39. shrinkwrap/config/ffa-hafnium-optee.yaml +45 -0
  40. shrinkwrap/config/ffa-optee.yaml +30 -0
  41. shrinkwrap/config/ffa-tftf.yaml +26 -0
  42. shrinkwrap/config/hafnium-base.yaml +51 -0
  43. shrinkwrap/config/kvm-unit-tests.yaml +32 -0
  44. shrinkwrap/config/kvmtool-base.yaml +33 -0
  45. shrinkwrap/config/linux-base.yaml +80 -0
  46. shrinkwrap/config/ns-edk2-base.yaml +83 -0
  47. shrinkwrap/config/ns-edk2-optee.yaml +41 -0
  48. shrinkwrap/config/ns-edk2.yaml +49 -0
  49. shrinkwrap/config/ns-preload.yaml +98 -0
  50. shrinkwrap/config/optee-base.yaml +37 -0
  51. shrinkwrap/config/rfa-base.yaml +49 -0
  52. shrinkwrap/config/rfa.yaml +47 -0
  53. shrinkwrap/config/rmm-base.yaml +24 -0
  54. shrinkwrap/config/rust.yaml +31 -0
  55. shrinkwrap/config/test/cca.yaml +47 -0
  56. shrinkwrap/config/tfa-base.yaml +45 -0
  57. shrinkwrap/config/tfa-rme.yaml +36 -0
  58. shrinkwrap/config/tftf-base.yaml +32 -0
  59. shrinkwrap/shrinkwrap_main.py +133 -0
  60. shrinkwrap/utils/__init__.py +0 -0
  61. shrinkwrap/utils/clivars.py +16 -0
  62. shrinkwrap/utils/config.py +1166 -0
  63. shrinkwrap/utils/graph.py +263 -0
  64. shrinkwrap/utils/label.py +153 -0
  65. shrinkwrap/utils/logger.py +160 -0
  66. shrinkwrap/utils/process.py +230 -0
  67. shrinkwrap/utils/runtime.py +192 -0
  68. shrinkwrap/utils/ssh_agent.py +98 -0
  69. shrinkwrap/utils/tty.py +46 -0
  70. shrinkwrap/utils/vars.py +14 -0
  71. shrinkwrap/utils/workspace.py +59 -0
  72. shrinkwrap_tool-2026.2.1.dist-info/METADATA +63 -0
  73. shrinkwrap_tool-2026.2.1.dist-info/RECORD +77 -0
  74. shrinkwrap_tool-2026.2.1.dist-info/WHEEL +5 -0
  75. shrinkwrap_tool-2026.2.1.dist-info/entry_points.txt +2 -0
  76. shrinkwrap_tool-2026.2.1.dist-info/licenses/license.rst +41 -0
  77. shrinkwrap_tool-2026.2.1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1166 @@
1
+ # Copyright (c) 2022, Arm Limited.
2
+ # SPDX-License-Identifier: MIT
3
+
4
+ import graphlib
5
+ import io
6
+ import json
7
+ import os
8
+ import re
9
+ import textwrap
10
+ import yaml
11
+ from shrinkwrap import __version__
12
+ import shrinkwrap.utils.clivars as uclivars
13
+ import shrinkwrap.utils.workspace as workspace
14
+ from urllib.parse import urlparse
15
+
16
+ _default_image = 'docker.io/shrinkwraptool/base-slim'
17
+
18
+ def _get_image(configs, args):
19
+ """
20
+ Determine the image to use
21
+ """
22
+ if args.image is not None:
23
+ # An image was specified on the command line, just use it !
24
+ return args.image
25
+ else:
26
+ # No image forced on the command line, use one required by one of the configs,
27
+ # but make sure that if multiple config require an image, this is the same.
28
+ image = None
29
+ for c in configs:
30
+ if c['image'] is not None:
31
+ if image is None:
32
+ image = c['image']
33
+ elif c['image'] == image:
34
+ pass
35
+ else:
36
+ raise Exception('Unsupported case of different images requested.')
37
+ # No image required in the configs or from the command line, use the default one
38
+ return image if image else _default_image
39
+
40
+
41
+ def get_image(configs, args):
42
+ image = _get_image(configs, args)
43
+ parts = image.split(":")
44
+ if len(parts) not in [1, 2]:
45
+ raise Exception('Invalid image path.')
46
+ image = parts[0]
47
+ tag = parts[1] if len(parts) == 2 else __version__
48
+ return f"{image}:{tag}"
49
+
50
+
51
+ def _component_normalize(component, name):
52
+ """
53
+ Fills in any missing lists or dictionaries with empty ones.
54
+ """
55
+ component.setdefault('repo', {})
56
+
57
+ if len(component['repo']) > 0 and \
58
+ all(not isinstance(v, dict) for v in component['repo'].values()):
59
+ component['repo'] = {'.': component['repo']}
60
+
61
+ for repo in component['repo'].values():
62
+ repo.setdefault('remote', None)
63
+ repo.setdefault('revision', None)
64
+ repo.setdefault('project', None)
65
+
66
+ component.setdefault('sourcedir', None)
67
+ component.setdefault('builddir', None)
68
+ component.setdefault('toolchain', None)
69
+ component.setdefault('stderrfilt', None)
70
+ component.setdefault('prebuild', [])
71
+ component.setdefault('build', [])
72
+ component.setdefault('postbuild', [])
73
+ component.setdefault('params', {})
74
+ component.setdefault('sync', None)
75
+ if component['sync'] == False:
76
+ component['sync'] = 'false'
77
+ elif component['sync'] == True:
78
+ component['sync'] = 'true'
79
+
80
+ component['artifacts'] = { key:
81
+ { 'path': val, 'base': None, 'export': True } if isinstance(val, str) else
82
+ { 'path': val['path'], 'base': val.get('rename'),
83
+ 'export': val.get('export', True) } if isinstance(val, dict) else
84
+ val for key, val in component.get('artifacts', {}).items() }
85
+
86
+ return component
87
+
88
+
89
+ def _build_normalize(build):
90
+ """
91
+ Fills in any missing lists or dictionaries with empty ones.
92
+ """
93
+ if len(build) == 0:
94
+ build['__dummy'] = {}
95
+
96
+ for name, component in build.items():
97
+ _component_normalize(component, name)
98
+
99
+
100
+ def _buildex_normalize(buildex):
101
+ """
102
+ Fills in any missing lists or dictionaries with empty ones.
103
+ """
104
+ buildex.setdefault('btvars', {})
105
+
106
+
107
+ def _run_normalize(run):
108
+ """
109
+ Fills in any missing lists or dictionaries with empty ones.
110
+ """
111
+ run.setdefault('name', None)
112
+ run.setdefault('rtvars', {})
113
+ run.setdefault('params', {})
114
+ run.setdefault('prerun', [])
115
+ run.setdefault('run', [])
116
+ run.setdefault('terminals', {})
117
+
118
+
119
+ def _config_normalize(config):
120
+ """
121
+ Fills in any missing lists or dictionaries with empty ones.
122
+ """
123
+ config.setdefault('name', None)
124
+ config.setdefault('fullname', None)
125
+ config.setdefault('description', None)
126
+ config.setdefault('image', None)
127
+ config.setdefault('concrete', False)
128
+ config.setdefault('layers', [])
129
+ config.setdefault('graph', {})
130
+ config.setdefault('build', {})
131
+ config.setdefault('buildex', {})
132
+
133
+ _build_normalize(config['build'])
134
+ _buildex_normalize(config['buildex'])
135
+
136
+ config.setdefault('artifacts', {})
137
+ config.setdefault('run', {})
138
+
139
+ _run_normalize(config['run'])
140
+
141
+ return config
142
+
143
+ def _component_validate(component):
144
+ sync = component.get('sync')
145
+ if sync not in (None, 'true', 'false', 'force'):
146
+ raise Exception(f'invalid "sync" value "{sync}"')
147
+
148
+ def _build_validate(build):
149
+ for component in build.values():
150
+ _component_validate(component)
151
+
152
+ def _config_validate(config):
153
+ """
154
+ Ensures the config conforms to the schema. Throws exception if any
155
+ issues are found.
156
+ """
157
+ # TODO:
158
+
159
+ if 'build' in config:
160
+ _build_validate(config['build'])
161
+
162
+
163
+ def _component_sort(component):
164
+ """
165
+ Sort the component so that the keys are in a canonical order. This
166
+ improves readability by humans.
167
+ """
168
+ lut = ['repo', 'sync', 'sourcedir', 'builddir', 'toolchain', 'stderrfilt',
169
+ 'params', 'prebuild', 'build', 'postbuild', 'artifacts']
170
+ lut = {k: i for i, k in enumerate(lut)}
171
+ return dict(sorted(component.items(), key=lambda x: lut[x[0]]))
172
+
173
+
174
+ def _build_sort(build):
175
+ """
176
+ Sort the build section so that the keys are in a canonical order. This
177
+ improves readability by humans.
178
+ """
179
+ for name in build:
180
+ build[name] = _component_sort(build[name])
181
+ return dict(sorted(build.items()))
182
+
183
+
184
+ def _run_sort(run):
185
+ """
186
+ Sort the run section so that the keys are in a canonical order. This
187
+ improves readability by humans.
188
+ """
189
+ lut = ['name', 'rtvars', 'params', 'prerun', 'run', 'terminals']
190
+ lut = {k: i for i, k in enumerate(lut)}
191
+ return dict(sorted(run.items(), key=lambda x: lut[x[0]]))
192
+
193
+
194
+ def _config_sort(config):
195
+ """
196
+ Sort the config so that the keys are in a canonical order. This improves
197
+ readability by humans.
198
+ """
199
+ config['build'] = _build_sort(config['build'])
200
+ config['run'] = _run_sort(config['run'])
201
+
202
+ lut = ['name', 'fullname', 'description', 'image', 'concrete', 'layers',
203
+ 'graph', 'build', 'buildex', 'artifacts', 'run']
204
+ lut = {k: i for i, k in enumerate(lut)}
205
+ return dict(sorted(config.items(), key=lambda x: lut[x[0]]))
206
+
207
+
208
+ def _config_merge(base, new):
209
+ """
210
+ Merges new config into the base config.
211
+ """
212
+ _config_validate(base)
213
+ _config_validate(new)
214
+
215
+ def _merge(base, new, level=0):
216
+ if new is None:
217
+ return base
218
+
219
+ if isinstance(base, list) and isinstance(new, list):
220
+ return base + new
221
+
222
+ elif isinstance(base, dict) and isinstance(new, dict):
223
+ d = {}
224
+ for k in list(set(list(base.keys()) + list(new.keys()))):
225
+ d[k] = _merge(base.get(k), new.get(k), level+1)
226
+ return d
227
+
228
+ elif isinstance(base, str) and isinstance(new, str):
229
+ return new
230
+
231
+ return new
232
+
233
+ config = _merge(base, new)
234
+
235
+ # We add a dummy component if there are no others. After merging, if
236
+ # there other components, remove it.
237
+ if '__dummy' in config['build'] and len(config['build']) > 1:
238
+ del config['build']['__dummy']
239
+
240
+ return config
241
+
242
+
243
+ def _string_tokenize(string, escape=True):
244
+ """
245
+ Returns ordered list of tokens, where each token has a 'type' and
246
+ 'value'. If 'type' is 'literal', 'value' is the literal string. If
247
+ 'type' is 'macro', 'value' is a dict defining 'type' and 'name'.
248
+ """
249
+ regex = r'\$(?:' \
250
+ r'(?P<escape>\$)|' \
251
+ r'(?:\{' \
252
+ r'(?P<type>[_a-zA-Z][_a-zA-Z0-9]*):' \
253
+ r'(?P<name>[_a-zA-Z][_a-zA-Z0-9]*)?' \
254
+ r'\})|' \
255
+ r'(?P<invalid>)' \
256
+ r')'
257
+ pattern = re.compile(regex)
258
+ tokens = []
259
+ lit_start = 0
260
+
261
+ m = pattern.search(string)
262
+ while m:
263
+ lit_end = m.span()[0]
264
+
265
+ if lit_end > lit_start:
266
+ tokens.append({
267
+ 'type': 'literal',
268
+ 'value': string[lit_start:lit_end],
269
+ })
270
+
271
+ lit_start = m.span()[1]
272
+
273
+ if m['invalid'] is not None:
274
+ raise Exception(f"Macro at col {lit_end}" \
275
+ f" in '{string}' is invalid.")
276
+ if m['escape'] is not None:
277
+ assert(m['escape'] == '$')
278
+ tokens.append({
279
+ 'type': 'literal',
280
+ 'value': '$' if escape else '$$',
281
+ })
282
+ if m['type'] is not None:
283
+ tokens.append({
284
+ 'type': 'macro',
285
+ 'value': {
286
+ 'type': m['type'],
287
+ 'name': m['name'],
288
+ },
289
+ })
290
+
291
+ m = pattern.search(string, pos=lit_start)
292
+
293
+ tokens.append({
294
+ 'type': 'literal',
295
+ 'value': string[lit_start:],
296
+ })
297
+
298
+ return tokens
299
+
300
+
301
+ def _string_substitute(string, lut, final=True):
302
+ """
303
+ Takes a string containg macros and returns a string with the macros
304
+ substituted for the values found in the lut. If final is False, any
305
+ macro that does not have a value in the lut will be left as a macro in
306
+ the returned string. If final is True, any macro that does not have a
307
+ value in the lut will cause an exception. Final also controls unescaping
308
+ on $. If False, $$ is left as is, otherwise they are replaced with $.
309
+ """
310
+ # Skip substitution if not a string or is empty
311
+ if not isinstance(string, str) or not string:
312
+ return string
313
+
314
+ calls = []
315
+ frags = []
316
+ frag = ''
317
+ tokens = _string_tokenize(string, final)
318
+
319
+ for t in tokens:
320
+ if t['type'] == 'literal':
321
+ frag += t['value']
322
+ elif t['type'] == 'macro':
323
+ m = t['value']
324
+ try:
325
+ lu = lut[m['type']][m['name']]
326
+ if callable(lu):
327
+ calls.append(lu)
328
+ frags.append(frag)
329
+ frag = ''
330
+ else:
331
+ frag += lu
332
+ except Exception:
333
+ macro = f"${{{m['type']}:{m['name']}}}"
334
+ frag += macro
335
+ else:
336
+ assert(False)
337
+
338
+ frags.append(frag)
339
+ assert(len(calls) + 1 == len(frags))
340
+
341
+ final = frags[0]
342
+
343
+ # Any callable macros expect to be called with anything that immediately
344
+ # follows them to the next whitespace, so do that now and assemble the
345
+ # final string.
346
+ for call, frag in zip(calls, frags[1:]):
347
+ final += call(frag.split(' ')[0])
348
+ final += frag
349
+
350
+ return final
351
+
352
+
353
+ def _string_has_macros(string):
354
+ tokens = _string_tokenize(string)
355
+ return any([True for t in tokens if t['type'] == 'macro'])
356
+
357
+
358
+ def _mk_params(params, separator):
359
+ pairs = [f'{k}' if v is None else f'{k}{separator}{v}'
360
+ for k, v in params.items()]
361
+ return ' '.join(sorted(pairs))
362
+
363
+
364
+ def filename(name, rel=os.getcwd()):
365
+ """
366
+ Given a config name, finds the path to the config on disk. If the config
367
+ name exists relative to rel, we return that since it is a user config.
368
+ Else, if the config name exists relative to the config store then we
369
+ return that. If neither exist, then we return name unmodified, since
370
+ that will generate the most useful error.
371
+ """
372
+ fpath = os.path.abspath(os.path.join(rel, name))
373
+ cpath = workspace.config(name)
374
+
375
+ if os.path.exists(fpath):
376
+ return fpath
377
+ elif cpath:
378
+ return os.path.abspath(os.path.join(cpath, name))
379
+ else:
380
+ return name
381
+
382
+
383
+ def load(file_name, overlays=[], friendly=None):
384
+ """
385
+ Load a config from disk and return it as a dictionary. The config is
386
+ fully normalized, validated and merged. If file_name starts with '{' it
387
+ is treated as a json config instead of a file name and is parsed
388
+ directly. This allows passing json config snippets as overlays on the
389
+ command line.
390
+ """
391
+ def _config_load(file_name):
392
+ if file_name[0] == '{':
393
+ config = json.loads(file_name)
394
+ config_dir = os.getcwd()
395
+ else:
396
+ with open(file_name) as file:
397
+ config = yaml.safe_load(file)
398
+ config_dir = os.path.dirname(file_name)
399
+
400
+ config = _config_normalize(config)
401
+ _config_validate(config)
402
+
403
+ # Recursively load and merge the layers.
404
+ master = _config_normalize({})
405
+ for layer in config['layers']:
406
+ layer = _config_load(filename(layer, config_dir))
407
+ master = _config_merge(master, layer)
408
+
409
+ master = _config_merge(master, config)
410
+
411
+ return master
412
+
413
+ config = _config_load(file_name)
414
+
415
+ for overlay in overlays:
416
+ config = _config_merge(config, overlay)
417
+
418
+ # Now that the config is fully merged, we don't need the layers
419
+ # property. Its also useful to store the name.
420
+ del config['layers']
421
+ config['fullname'] = os.path.basename(file_name)
422
+ config['name'] = os.path.splitext(config['fullname'])[0]
423
+ if friendly:
424
+ config['fullname'] = friendly
425
+
426
+ return _config_sort(config)
427
+
428
+
429
+ def dumps(config):
430
+ return dump(config, None)
431
+
432
+
433
+ def dump(config, fileobj):
434
+ return yaml.safe_dump(config,
435
+ fileobj,
436
+ explicit_start=True,
437
+ sort_keys=False,
438
+ version=(1, 2))
439
+
440
+
441
+ def _string_extract_artifacts(artifacts, strings):
442
+ for s in strings:
443
+ for t in _string_tokenize(str(s)):
444
+ if t['type'] != 'macro':
445
+ continue
446
+ m = t['value']
447
+ if m['type'] != 'artifact':
448
+ continue
449
+ if m['name'] is None:
450
+ raise KeyError('name')
451
+ artifacts.add(m['name'])
452
+
453
+
454
+ def resolveb(config, btvars={}, clivars={}):
455
+ """
456
+ Resolves the build-time macros (params, artifacts, etc) and fixes up the
457
+ config. Based on the artifact dependencies, the component build graph is
458
+ determined and placed into the config along with the global artifact
459
+ map. Expects a config that was previously loaded with load().
460
+ btvars=None implies that it is OK not to resolve btvars whose default
461
+ value is None. isinstance(btvars, dict) implies btvars values must all be
462
+ resolved.
463
+ """
464
+ def _resolve_build_graph(config):
465
+ def _exporters_update(exporters, name, component):
466
+ new = {a: name for a in component['artifacts'].keys()}
467
+ clash = set(exporters.keys()).intersection(new.keys())
468
+
469
+ if len(clash) > 0:
470
+ a = clash.pop()
471
+ raise Exception(f"Duplicate artifact '{a}' exported by '{exporters[a]}' and '{new[a]}'.")
472
+
473
+ exporters.update(new)
474
+
475
+ def _importers_update(importers, name, component):
476
+ artifacts = set()
477
+
478
+ def _find_artifacts(strings):
479
+ try:
480
+ return _string_extract_artifacts(artifacts, strings)
481
+ except KeyError as e:
482
+ if e.args[0] != 'name': raise
483
+ raise Exception(f"'{name}' uses unnamed 'artifact' macro. 'artifact' macros must be named.")
484
+
485
+ _find_artifacts(component['params'].values())
486
+ _find_artifacts(component['prebuild'])
487
+ _find_artifacts(component['build'])
488
+ _find_artifacts(component['postbuild'])
489
+ _find_artifacts(component['artifacts'].values())
490
+
491
+ importers[name] = sorted(list(artifacts))
492
+
493
+ artifacts_exp = {}
494
+ artifacts_imp = {}
495
+ for name, desc in config['build'].items():
496
+ _exporters_update(artifacts_exp, name, desc)
497
+ _importers_update(artifacts_imp, name, desc)
498
+
499
+ graph = {}
500
+ for depender, deps in artifacts_imp.items():
501
+ graph[depender] = []
502
+ for dep in deps:
503
+ if dep not in artifacts_exp:
504
+ raise Exception(f"Imported artifact '{dep}' not exported by any component.")
505
+ dependee = artifacts_exp[dep]
506
+ if depender != dependee:
507
+ graph[depender].append(dependee)
508
+
509
+ return graph
510
+
511
+ def _resolve_artifact_map(config):
512
+ def _combine(config):
513
+ return { 'artifact': { k: v['path']
514
+ for desc in config['build'].values()
515
+ for k, v in desc['artifacts'].items() } }
516
+
517
+ def _normalize_basename(path):
518
+ return os.path.basename(os.path.normpath(path))
519
+
520
+ def _combine_full(config):
521
+ artifact_map = {}
522
+ for name, desc in config['build'].items():
523
+ locs = {key: {
524
+ 'src': os.path.normpath(val['path']),
525
+ 'dst': os.path.join(config['name'], _normalize_basename(val['base'] or val['path']))
526
+ if val['export'] == True else None,
527
+ 'component': name,
528
+ } for key, val in desc['artifacts'].items()}
529
+ artifact_map.update(locs)
530
+ return artifact_map
531
+
532
+ # ${artifact:*} macros could refer to other ${artifact:*}
533
+ # macros, so iteratively substitute the maximum number of times,
534
+ # which would be once per entry in the pathalogical case.
535
+
536
+ artifact_lut = _combine(config)
537
+ artifact_nr = len(artifact_lut['artifact'])
538
+
539
+ while artifact_nr > 0:
540
+ artifact_nr -= 1
541
+
542
+ for desc in config['build'].values():
543
+ for v in desc['artifacts'].values():
544
+ v['path'] = _string_substitute(v['path'], artifact_lut, False)
545
+
546
+ if artifact_nr > 0:
547
+ artifact_lut = _combine(config)
548
+
549
+ return _combine_full(config)
550
+
551
+ def _substitute_macros(config, lut, final):
552
+ for desc in config['build'].values():
553
+ desc['sourcedir'] = _string_substitute(desc['sourcedir'], lut, final)
554
+ desc['builddir'] = _string_substitute(desc['builddir'], lut, final)
555
+
556
+ lut['param']['sourcedir'] = desc['sourcedir']
557
+ lut['param']['builddir'] = desc['builddir']
558
+
559
+ desc['params'] = { k: _string_substitute(v, lut, final) for k, v in desc['params'].items() }
560
+
561
+ lut['param']['join_equal'] = _mk_params(desc['params'], '=')
562
+ lut['param']['join_space'] = _mk_params(desc['params'], ' ')
563
+
564
+ for r in desc['repo'].values():
565
+ r['remote'] = _string_substitute(r['remote'], lut, final)
566
+ r['revision'] = _string_substitute(r['revision'], lut, final)
567
+
568
+ desc['toolchain'] = _string_substitute(desc['toolchain'], lut, final)
569
+
570
+ for k in ( 'prebuild', 'build', 'postbuild', ):
571
+ desc[k] = [ _string_substitute(s, lut, final) for s in desc[k] ]
572
+
573
+ for v in desc['artifacts'].values():
574
+ v['path'] = _string_substitute(v['path'], lut, False)
575
+ v['base'] = _string_substitute(v['base'], lut, False)
576
+
577
+ for v in config['buildex']['btvars'].values():
578
+ v['value'] = _string_substitute(v['value'], lut, final)
579
+
580
+ # Compute the source and build directories for each component. If they
581
+ # are already present, then don't override. This allows users to supply
582
+ # their own source and build tree locations.
583
+ for name, desc in config['build'].items():
584
+ comp_dir = os.path.join(config['name'], name)
585
+ if desc['sourcedir'] is None:
586
+ desc['sourcedir'] = os.path.join(workspace.build,
587
+ 'source',
588
+ comp_dir)
589
+ if desc['builddir'] is None:
590
+ desc['builddir'] = os.path.join(workspace.build,
591
+ 'build',
592
+ comp_dir)
593
+ if desc['sync'] is None:
594
+ desc['sync'] = 'true'
595
+
596
+ macro_lut = {
597
+ 'param': {
598
+ **uclivars.get(**clivars),
599
+ 'configdir': lambda x: workspace.config(x, False),
600
+ },
601
+ }
602
+
603
+ # Override the btvars with any values supplied by the user and check
604
+ # that all btvars are defined.
605
+ final_btvars = config['buildex']['btvars']
606
+
607
+ for k, v in final_btvars.items():
608
+ if btvars is not None:
609
+ if k in btvars:
610
+ v['value'] = btvars[k]
611
+ if v['value'] is None:
612
+ raise Exception(f'{k} build-time variable ' \
613
+ 'not set by user and no ' \
614
+ 'default available.')
615
+
616
+ if v['type'] == 'path' and \
617
+ v['value'] and \
618
+ not _string_has_macros(v['value']):
619
+ v['value'] = os.path.expanduser(v['value'])
620
+ v['value'] = os.path.abspath(v['value'])
621
+ # Ensure that string btvars with non-string values are converted to strings.
622
+ # Note that values with non-decimal base in the yaml will get converted to
623
+ # a decimal string. This should not be an issue for most uses, but might be
624
+ # unexpected in some edge cases.
625
+ elif v['type'] == 'string':
626
+ v['value'] = str(v['value'])
627
+
628
+ macro_lut['btvar'] = {k: v['value'] for k, v in final_btvars.items()}
629
+
630
+ # Do a first partial substitution, to resolve all macros except
631
+ # ${artifact:*}. These macros must remain in place in order to resolve
632
+ # the build graph. But its possible that btvars resolve to ${artifact:*}
633
+ # so we need to do the first pass prior to resolving the build graph.
634
+ # btvars are external to the component so they can't be used directly to
635
+ # build the graph.
636
+ _substitute_macros(config, macro_lut, False)
637
+
638
+ # Now resolve the build graph, which finds ${artifact:*} users.
639
+ graph = _resolve_build_graph(config)
640
+
641
+ # At this point we should only have ${artifacts:*} macros remaining to
642
+ # resolve. But there may be some cases where ${artifacts:*} resolve to
643
+ # other ${artifacts:*}. So we need to iteratively resolve the
644
+ # artifact_map.
645
+ artifact_map = _resolve_artifact_map(config)
646
+ artifact_src_map = {k: v['src'] for k, v in artifact_map.items()}
647
+ macro_lut['artifact'] = artifact_src_map
648
+
649
+ # Final check to ensure everything is resolved and to fix escaped $.
650
+ _substitute_macros(config, macro_lut, True)
651
+
652
+ config['graph'] = graph
653
+ config['artifacts'] = artifact_map
654
+
655
+ return _config_sort(config)
656
+
657
+
658
+ def resolver(config, rtvars={}, clivars={}):
659
+ """
660
+ Resolves the run-time macros (artifacts, rtvars, etc) and fixes up the
661
+ config. Expects a config that was previously resolved with resolveb().
662
+ """
663
+ clivars = uclivars.get(**clivars)
664
+ run = config['run']
665
+
666
+ # Find the list of imported artifacts before any processing passes
667
+ artifacts_imp = set()
668
+ _string_extract_artifacts(artifacts_imp, run['params'].values())
669
+ _string_extract_artifacts(artifacts_imp, run['prerun'])
670
+ _string_extract_artifacts(artifacts_imp, run['run'])
671
+ _string_extract_artifacts(artifacts_imp, run['rtvars'].values())
672
+
673
+ #Override the rtvars with any values supplied by the user and check that
674
+ #all rtvars are defined.
675
+ for k, v in run['rtvars'].items():
676
+ if k in rtvars:
677
+ v['value'] = rtvars[k]
678
+ if v['value'] is None:
679
+ raise Exception(f'{k} run-time variable not ' \
680
+ 'set by user and no default available.')
681
+
682
+ # Update the artifacts so that the destination now points to an absolute
683
+ # path rather than one that is implictly relative to SHRINKWRAP_PACKAGE.
684
+ # We can't do this at build-time because we don't know where the package
685
+ # will be located at run-time.
686
+ for k, v in config['artifacts'].items():
687
+ if v['dst'] is None:
688
+ if k not in artifacts_imp:
689
+ continue
690
+ raise Exception(f"Artifact '{k}' is required at run-time but not exported by any component")
691
+ v['dst'] = os.path.join(workspace.package, v['dst'])
692
+
693
+ # Create a lookup table with all the artifacts in their package
694
+ # locations, then do substitution to fully resolve the rtvars. An
695
+ # exception will be thrown if there are any macros that we don't have
696
+ # values for.
697
+ lut = {
698
+ 'param': {
699
+ 'packagedir': os.path.join(workspace.package, config['name']),
700
+ **dict(clivars),
701
+ },
702
+ 'artifact': {k: v['dst']
703
+ for k, v in config['artifacts'].items()},
704
+ 'btvar': {k: v['value']
705
+ for k, v in config['buildex']['btvars'].items()},
706
+ }
707
+ for v in run['rtvars'].values():
708
+ v['value'] = _string_substitute(str(v['value']), lut)
709
+ if v['type'] == 'path' and v['value']:
710
+ v['value'] = os.path.expanduser(v['value'])
711
+ v['value'] = os.path.abspath(v['value'])
712
+
713
+ # Now create a lookup table with all the rtvars and resolve all the
714
+ # parameters. An exception will be thrown if there are any macros that
715
+ # we don't have values for.
716
+ lut['rtvar'] = {k: v['value'] for k, v in run['rtvars'].items()}
717
+
718
+ run['params'] = { k: _string_substitute(v, lut) for k, v in run['params'].items() }
719
+
720
+ # Assemble the final runtime command and stuff it into the config.
721
+ params = _mk_params(run['params'], '=')
722
+
723
+ terms = []
724
+ for param, terminal in run['terminals'].items():
725
+ if terminal['type'] in ['stdout']:
726
+ terms.append(f'-C {param}.start_telnet=0')
727
+ terms.append(f'-C {param}.mode=raw')
728
+ if terminal['type'] in ['xterm']:
729
+ terms.append(f'-C {param}.start_telnet=1')
730
+ terms.append(f'-C {param}.mode=telnet')
731
+ if terminal['type'] in ['telnet', 'stdinout']:
732
+ terms.append(f'-C {param}.start_telnet=0')
733
+ terms.append(f'-C {param}.mode=telnet')
734
+ terms = ' '.join(terms)
735
+
736
+ if run["name"]:
737
+ run['run'] = [' '.join([run["name"], params, terms])]
738
+
739
+ for i, s in enumerate(run['prerun']):
740
+ run['prerun'][i] = _string_substitute(s, lut)
741
+
742
+ return _config_sort(config)
743
+
744
+
745
+ def load_all(names, overlaynames=[]):
746
+ """
747
+ Takes a list of config names and returns a corresponding list of
748
+ loaded configs. If the input list is None or empty, all standard
749
+ configs are loaded.
750
+ """
751
+ explicit = names is not None and len(names) != 0
752
+ configs = []
753
+
754
+ if not explicit:
755
+ names = []
756
+ for p in workspace.configs():
757
+ for root, dirs, files in os.walk(p):
758
+ names += [os.path.relpath(
759
+ os.path.join(root, f), p)
760
+ for f in files]
761
+
762
+ overlays = []
763
+ for overlayname in overlaynames:
764
+ overlay = filename(overlayname)
765
+ overlay = load(overlay)
766
+ overlay = {
767
+ 'build': overlay['build'],
768
+ 'buildex': overlay['buildex'],
769
+ 'run': overlay['run'],
770
+ }
771
+ overlays.append(overlay)
772
+
773
+ for name in names:
774
+ try:
775
+ file = filename(name)
776
+ merged = load(file, overlays, name)
777
+ configs.append(merged)
778
+ except Exception:
779
+ if explicit:
780
+ raise
781
+
782
+ return configs
783
+
784
+
785
+ def load_resolveb_all(names, overlaynames=[], clivars={}, btvarss=None):
786
+ """
787
+ Takes a list of config names and returns a corresponding list of
788
+ resolved configs. If the input list is None or empty, all standard
789
+ configs are loaded and resolved.
790
+ """
791
+ configs_m = load_all(names, overlaynames)
792
+
793
+ if btvarss is None:
794
+ btvarss = [None] * len(configs_m)
795
+
796
+ assert(len(configs_m) == len(btvarss))
797
+
798
+ configs_r = []
799
+
800
+ for merged, btvars in zip(configs_m, btvarss):
801
+ resolved = resolveb(merged, btvars, clivars)
802
+ configs_r.append(resolved)
803
+
804
+ return configs_r
805
+
806
+
807
+ class Script:
808
+ def __init__(self,
809
+ summary,
810
+ config=None,
811
+ component=None,
812
+ preamble=None,
813
+ final=False,
814
+ stderrfilt=None):
815
+ self.summary = summary
816
+ self.config = config
817
+ self.component = component
818
+ self.final = final
819
+ self.stderrfilt = stderrfilt
820
+ self._cmds = ''
821
+ self._sealed = False
822
+ self._preamble = preamble
823
+
824
+ def append(self, *args, **kwargs):
825
+ assert(not self._sealed)
826
+
827
+ buf = io.StringIO()
828
+ print(*args, **kwargs, file=buf)
829
+
830
+ self._cmds += buf.getvalue()
831
+
832
+ def append_multiline(self, text, *args, indent='', **kwargs):
833
+ text = textwrap.indent(textwrap.dedent(text).strip(), indent)
834
+ self.append(text, *args, **kwargs)
835
+
836
+ def seal(self):
837
+ assert(not self._sealed)
838
+ self._sealed = True
839
+
840
+ def preamble(self):
841
+ return self._preamble
842
+
843
+ def commands(self, inc_preamble=True):
844
+ if inc_preamble:
845
+ return self._preamble + '\n' + self._cmds
846
+ else:
847
+ return self._cmds
848
+
849
+ def __eq__(self, other):
850
+ return self.summary == other.summary and \
851
+ self.config == other.config and \
852
+ self.component == other.component and \
853
+ self._cmds == other._cmds and \
854
+ self._sealed == other._sealed
855
+
856
+ def __hash__(self):
857
+ assert(self._sealed)
858
+ return hash((
859
+ self.summary,
860
+ self.config,
861
+ self.component,
862
+ self._cmds,
863
+ self._sealed
864
+ ))
865
+
866
+ def __repr__(self):
867
+ return f'{self.config}:{self.component} {self.summary}'
868
+
869
+
870
+ def script_preamble(echo):
871
+ pre = Script(None)
872
+ pre.append(f'#!/bin/bash')
873
+ pre.append(f'# SHRINKWRAP AUTOGENERATED SCRIPT.')
874
+ pre.append()
875
+ if echo:
876
+ pre.append(f'# Exit on error and echo commands.')
877
+ pre.append(f'set -ex')
878
+ else:
879
+ pre.append(f'# Exit on error.')
880
+ pre.append(f'set -e')
881
+ return pre.commands(False)
882
+
883
+
884
+ def script_build_preamble(echo):
885
+ pre = Script(None)
886
+ gitargs = '' if echo else '--quiet '
887
+ pre.append_multiline(f'''
888
+ # Function to update submodules without recursion
889
+ update_submodules() {{
890
+ local repo_path="$1"
891
+ local reference_path="$2"
892
+
893
+ local cwd=$(pwd)
894
+ cd $repo_path
895
+ # Check if .gitmodules file exists
896
+ local gitmodules_file=".gitmodules"
897
+ if [ -f "$gitmodules_file" ]; then
898
+ # Extract submodule paths from .gitmodules
899
+ git config --file "$gitmodules_file" --get-regexp path | while read -r path_key submodule_path; do
900
+ local git_submodule_reference=""
901
+ local submodule_reference="$reference_path/$submodule_path"
902
+
903
+ # Check if the submodule exists in the project cache
904
+ if [[ -n "$reference_path" && -e "$submodule_reference/.git" ]]; then
905
+ git_submodule_reference="--reference $submodule_reference"
906
+ fi
907
+
908
+ if [ -d "$submodule_path" ]; then
909
+ # Manually update nested submodules
910
+ git submodule {gitargs} update --init --checkout --force $git_submodule_reference $submodule_path
911
+ # Recursively process the submodule
912
+ update_submodules "$submodule_path" "$submodule_reference"
913
+ fi
914
+ done
915
+ fi
916
+ cd $cwd
917
+ }}''')
918
+ return pre.commands(False)
919
+
920
+
921
+ def build_graph(configs, echo, nosync, force_sync):
922
+ """
923
+ Returns a graph of scripts where the edges represent dependencies. The
924
+ scripts should be executed according to the graph in order to correctly
925
+ build all the configs.
926
+ """
927
+ graph = {}
928
+ gitargs = '' if echo else '--quiet '
929
+
930
+ pre = script_preamble(echo)
931
+ pre += script_build_preamble(echo)
932
+
933
+ gl1 = Script('Removing old package', preamble=pre)
934
+ gl1.append(f'# Remove old package.')
935
+ for config in configs:
936
+ gl1.append(f'rm -rf {workspace.package}/{config["name"]}.yaml > /dev/null 2>&1 || true')
937
+ gl1.append(f'rm -rf {workspace.package}/{config["name"]} > /dev/null 2>&1 || true')
938
+ gl1.seal()
939
+ graph[gl1] = []
940
+
941
+ gl2 = Script('Creating directory structure', preamble=pre)
942
+ gl2.append(f'# Create directory structure.')
943
+ for config in configs:
944
+ dirs = set()
945
+ for component in config['build'].values():
946
+ dir = component["sourcedir"]
947
+ if dir not in dirs:
948
+ gl2.append(f'mkdir -p {dir}')
949
+ dirs.add(dir)
950
+ dirs = set()
951
+ dir = os.path.join(workspace.package, config['name'])
952
+ gl2.append(f'mkdir -p {dir}')
953
+ dirs.add(dir)
954
+ for artifact in config['artifacts'].values():
955
+ if artifact['dst'] is None:
956
+ continue
957
+ dst = os.path.join(workspace.package, artifact['dst'])
958
+ dir = os.path.dirname(dst)
959
+ if dir not in dirs:
960
+ gl2.append(f'mkdir -p {dir}')
961
+ dirs.add(dir)
962
+ gl2.seal()
963
+ graph[gl2] = [gl1]
964
+
965
+ force_sync_all = not isinstance(force_sync, list)
966
+ sync_none = not isinstance(nosync, list)
967
+
968
+ for config in configs:
969
+ build_scripts = {}
970
+
971
+ # Make copies of nosync and force_sync that we can modify below
972
+ force_sync = set(config['build'].keys() if force_sync_all else force_sync)
973
+ nosync = set(config['build'].keys() if sync_none else nosync)
974
+
975
+ invalid_sync_cfg = force_sync.intersection(nosync)
976
+ if invalid_sync_cfg:
977
+ raise Exception(f'Conflicting sync configuration for {invalid_sync_cfg}')
978
+ for name, component in config['build'].items():
979
+ if component['sync'] == 'false' and name not in force_sync:
980
+ nosync.add(name)
981
+ elif component['sync'] == 'force' and name not in nosync:
982
+ force_sync.add(name)
983
+
984
+ ts = graphlib.TopologicalSorter(config['graph'])
985
+ ts.prepare()
986
+ while ts.is_active():
987
+ for name in ts.get_ready():
988
+ component = config['build'][name]
989
+
990
+ if name not in nosync and len(component['repo']) > 0:
991
+ g = Script('Syncing git repo', config["name"], name, preamble=pre)
992
+ g.append(f'# Sync git repo for config={config["name"]} component={name}.')
993
+ g.append(f'pushd {os.path.dirname(component["sourcedir"])}')
994
+
995
+ for gitlocal, repo in component['repo'].items():
996
+ parent = os.path.basename(component["sourcedir"])
997
+ gitlocal = os.path.normpath(os.path.join(parent, gitlocal))
998
+ gitremote = repo['remote']
999
+ gitrev = repo['revision']
1000
+ git_project_cache = repo['project']
1001
+ basedir = os.path.normpath(os.path.join(gitlocal, '..'))
1002
+ sync = os.path.join(basedir, f'.{os.path.basename(gitlocal)}_sync')
1003
+
1004
+ project_cache_dir = workspace.project_cache
1005
+ git_local_reference=" "
1006
+ if project_cache_dir:
1007
+ if not git_project_cache:
1008
+ parsed_url = urlparse(gitremote)
1009
+ git_project_cache = os.path.basename(parsed_url.path)
1010
+
1011
+ # Lets start searching for non bare repo first
1012
+ git_project_cache = os.path.join(project_cache_dir, git_project_cache.removesuffix('.git'))
1013
+ # Search for non bare repo
1014
+ if os.path.isdir(os.path.join(git_project_cache, '.git')):
1015
+ git_local_reference = f"--reference-if-able {os.path.join(git_project_cache, '.git')} "
1016
+ # Search for a bare repo
1017
+ elif os.path.isdir(f"{git_project_cache}.git"):
1018
+ git_local_reference=f"--reference-if-able {git_project_cache}.git "
1019
+ else:
1020
+ git_project_cache = ""
1021
+ else:
1022
+ git_project_cache = ""
1023
+
1024
+ if name in force_sync:
1025
+ # We don't update any submodule before `git submodule sync`,
1026
+ # to handle the case where a remote changes the submodule's URL.
1027
+ # `git checkout` handles most cases, but doesn't update a local
1028
+ # branch. So if gitrev is a branch, do a `git reset` as well.
1029
+ sync_cmd_when_exists = f'''
1030
+ git remote set-url origin {gitremote}
1031
+ git fetch {gitargs}--prune --prune-tags --force --recurse-submodules=off origin
1032
+ git checkout {gitargs}--force {gitrev}
1033
+ git show-ref -q --heads {gitrev} && git reset {gitargs}--hard origin/{gitrev}
1034
+ git submodule {gitargs}sync --recursive
1035
+ git submodule {gitargs}update --init --checkout --recursive --force
1036
+ '''.strip()
1037
+ else:
1038
+ sync_cmd_when_exists = f'''
1039
+ if ! git checkout {gitargs} {gitrev} > /dev/null 2>&1 &&
1040
+ ! ( git remote set-url origin {gitremote} &&
1041
+ git fetch {gitargs}--prune --tags origin &&
1042
+ git checkout {gitargs} {gitrev}) ||
1043
+ ! git submodule {gitargs}update --init --checkout --recursive
1044
+ then
1045
+ echo "note: use --force-sync={name} to override any change"
1046
+ exit 1
1047
+ fi
1048
+ '''.strip()
1049
+
1050
+ g.append_multiline(f'''
1051
+ if [ ! -e "{gitlocal}/.git" ] || [ -f "{sync}" ]; then
1052
+ rm -rf {gitlocal} > /dev/null 2>&1 || true
1053
+ mkdir -p {basedir}
1054
+ touch {sync}
1055
+ git clone {gitargs}{git_local_reference}{gitremote} {gitlocal}
1056
+ pushd {gitlocal}
1057
+ git checkout {gitargs}--force {gitrev}
1058
+ # run with --reference
1059
+ update_submodules "$(pwd)" "{git_project_cache}"
1060
+ popd
1061
+ rm {sync}
1062
+ else
1063
+ pushd {gitlocal}
1064
+ {sync_cmd_when_exists}
1065
+ popd
1066
+ fi''')
1067
+ g.append(f'popd')
1068
+ g.seal()
1069
+ graph[g] = [gl2]
1070
+ else:
1071
+ g = gl2
1072
+
1073
+ b = Script('Building', config["name"], name, preamble=pre, stderrfilt=component['stderrfilt'])
1074
+ if len(component['prebuild']) + \
1075
+ len(component['build']) + \
1076
+ len(component['postbuild']) > 0:
1077
+ b.append(f'# Build for config={config["name"]} component={name}.')
1078
+ b.append(f'export CROSS_COMPILE={component["toolchain"] if component["toolchain"] else ""}')
1079
+ b.append(f'pushd {component["sourcedir"]}')
1080
+ for cmd in component['prebuild']:
1081
+ b.append(cmd)
1082
+ for cmd in component['build']:
1083
+ b.append(cmd)
1084
+ for cmd in component['postbuild']:
1085
+ b.append(cmd)
1086
+ b.append(f'popd')
1087
+ b.seal()
1088
+ graph[b] = [g] + [build_scripts[s] for s in config['graph'][name]]
1089
+
1090
+ build_scripts[name] = b
1091
+ ts.done(name)
1092
+
1093
+ a = Script('Copying artifacts', config["name"], name, preamble=pre, final=True)
1094
+ artifacts = {k: v for k, v in config['artifacts'].items() if v['component'] == name}
1095
+ if len(artifacts) > 0:
1096
+ a.append(f'# Copy artifacts for config={config["name"]} component={name}.')
1097
+ for artifact in artifacts.values():
1098
+ if artifact['dst'] is None:
1099
+ continue
1100
+ src = artifact['src']
1101
+ dst = os.path.join(workspace.package, artifact['dst'])
1102
+ # Disallow sparse copies to prevent possible file corruption when the host
1103
+ # volume is mounted into a docker container, particularly on macOS.
1104
+ a.append(f'cp -r --sparse=never {src} {dst}')
1105
+ a.seal()
1106
+ graph[a] = [b]
1107
+
1108
+ return graph
1109
+
1110
+
1111
+ def clean_graph(configs, echo):
1112
+ """
1113
+ Returns a graph of scripts where the edges represent dependencies. The
1114
+ scripts should be executed according to the graph in order to correctly
1115
+ clean all the configs.
1116
+ """
1117
+ graph = {}
1118
+ gitargs = '' if echo else '--quiet '
1119
+
1120
+ pre = script_preamble(echo)
1121
+
1122
+ gl1 = Script('Removing old package', preamble=pre)
1123
+ gl1.append(f'# Remove old package.')
1124
+ for config in configs:
1125
+ gl1.append(f'rm -rf {workspace.package}/{config["name"]}.yaml > /dev/null 2>&1 || true')
1126
+ gl1.append(f'rm -rf {workspace.package}/{config["name"]} > /dev/null 2>&1 || true')
1127
+ gl1.seal()
1128
+ graph[gl1] = []
1129
+
1130
+ for config in configs:
1131
+ ts = graphlib.TopologicalSorter(config['graph'])
1132
+ ts.prepare()
1133
+ while ts.is_active():
1134
+ for name in ts.get_ready():
1135
+ component = config['build'][name]
1136
+
1137
+ c = Script('Cleaning', config["name"], name, preamble=pre, final=True)
1138
+ c.append(f'# Clean for config={config["name"]} component={name}.')
1139
+ c.append(f'rm -rf {component["builddir"]} > /dev/null 2>&1 || true')
1140
+ if len(component['repo']) > 0:
1141
+ c.append(f'if [ -d "{os.path.dirname(component["sourcedir"])}" ]; then')
1142
+ c.append(f'\tpushd {os.path.dirname(component["sourcedir"])}')
1143
+
1144
+ for gitlocal, repo in component['repo'].items():
1145
+ parent = os.path.basename(component["sourcedir"])
1146
+ gitlocal = os.path.normpath(os.path.join(parent, gitlocal))
1147
+ basedir = os.path.normpath(os.path.join(gitlocal, '..'))
1148
+ sync = os.path.join(basedir, f'.{os.path.basename(gitlocal)}_sync')
1149
+
1150
+ c.append_multiline(f'''
1151
+ if [ -d "{gitlocal}/.git" ] && [ ! -f "{sync}" ]; then
1152
+ pushd {gitlocal}
1153
+ git clean {gitargs}-xdff
1154
+ popd
1155
+ else
1156
+ rm -rf {gitlocal} {sync} > /dev/null 2>&1 || true
1157
+ fi''', indent='\t')
1158
+
1159
+ c.append(f'\tpopd')
1160
+ c.append(f'fi')
1161
+ c.seal()
1162
+ graph[c] = [gl1]
1163
+
1164
+ ts.done(name)
1165
+
1166
+ return graph